hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acec20279a716b90b3ec2c735bf72ce18ec0b46d | 872 | py | Python | tifa/contrib/kafka.py | twocucao/tifa | f703fd27f54000e7d51f06d2456d09cc79e0ab72 | [
"MIT"
] | 71 | 2020-04-16T04:28:45.000Z | 2022-03-31T22:45:11.000Z | tifa/contrib/kafka.py | twocucao/tifa | f703fd27f54000e7d51f06d2456d09cc79e0ab72 | [
"MIT"
] | 6 | 2021-05-13T06:32:38.000Z | 2022-03-04T01:18:34.000Z | tifa/contrib/kafka.py | twocucao/tifa | f703fd27f54000e7d51f06d2456d09cc79e0ab72 | [
"MIT"
] | 12 | 2021-05-01T08:43:11.000Z | 2022-03-29T00:58:54.000Z | import asyncio
import json
import logging
from aiokafka import AIOKafkaProducer, AIOKafkaConsumer
from tifa.conf import setting
loop = asyncio.get_event_loop()
logger = logging.getLogger(__name__)
class MyKafka:
producer: AIOKafkaProducer
consumer: AIOKafkaConsumer
def __init__(self):
self.producer = AIOKafkaProducer(
loop=loop, bootstrap_servers=setting.KAFKA_BOOTSTRAP_SERVERS
)
self.consumer = AIOKafkaConsumer(
setting.KAFKA_TOPIC,
loop=loop,
bootstrap_servers=setting.KAFKA_BOOTSTRAP_SERVERS,
)
async def start_producer(self):
await self.producer.start()
async def start_consumer(self):
await self.consumer.start()
async def send(self, data):
await self.producer.send(setting.KAFKA_TOPIC, json.dumps(data))
kafka = MyKafka()
| 22.358974 | 72 | 0.693807 |
acec215fb244b81f724440a6035f328a01da617c | 2,852 | py | Python | common/datasets/wiki_test_dataset.py | Ben-Louis/ImageTextMatching-Pytorch | 4d95308fe3ba9f1fd8a0266a98300cd7210fdb56 | [
"Apache-2.0"
] | null | null | null | common/datasets/wiki_test_dataset.py | Ben-Louis/ImageTextMatching-Pytorch | 4d95308fe3ba9f1fd8a0266a98300cd7210fdb56 | [
"Apache-2.0"
] | null | null | null | common/datasets/wiki_test_dataset.py | Ben-Louis/ImageTextMatching-Pytorch | 4d95308fe3ba9f1fd8a0266a98300cd7210fdb56 | [
"Apache-2.0"
] | null | null | null | import os
import pickle
import torch
import urllib
import re
from PIL import Image
import numpy as np
from torchvision import transforms
from torch.utils.data import Dataset, DataLoader
import random
def expand_to_three_channel(ts, size=224):
if ts.size(0) == 2:
ts = ts[:1]
return ts[:3].float().expand(3, size, size)
class WikiTestSet(Dataset):
def __init__(self, data_root, use_image=True, use_name=True):
self.data_root = data_root
assert use_image or use_name
self.use_image = use_image
self.use_name = use_name
with open(os.path.join(data_root, "samples_image.pkl"), 'rb') as f:
self.samples_image = pickle.load(f)
with open(os.path.join(data_root, "samples_text.pkl"), 'rb') as f:
self.samples_text = pickle.load(f)
self.transform = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Lambda(expand_to_three_channel),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
])
self.set_domain("image")
def __len__(self):
return len(self.samples_image)
def set_domain(self, domain="image"):
assert domain in ("image", "text")
self.domain = domain
def __getitem__(self, item):
if self.domain == "image":
idx, image_url, path = self.samples_image[item]
output = {"id": idx}
if self.use_image:
img_pil = Image.open(os.path.join(self.data_root, path))
img = self.transform(img_pil)
if hasattr(img_pil, "close"):
img_pil.close()
output["image"] = img
if self.use_name:
image_name = re.sub(r"[_\-\%\.]", r" ", urllib.parse.unquote(image_url).split('/')[-1][:-4])
image_name = re.sub(r" +", r" ", image_name.strip())
if len(image_name) == 0:
image_name = "random"
output["image_name"] = image_name
return output
elif self.domain == "text":
idx, caption = self.samples_text[item]
output = {"id": idx}
caption = re.sub(r" \[SEP\]", r".", caption)
caption = re.sub(r" +", r" ", caption.strip())
if len(caption) == 0:
caption = "random"
output["caption"] = caption
return output
else:
raise ValueError
def get_loader(self, *args, **kwargs):
return DataLoader(self, *args, **kwargs)
def idx_to_caption(self, index):
return self.samples_text[index][1] | 34.361446 | 109 | 0.541024 |
acec22a46a5532ff0d173ae0db6e54f9ac889515 | 11,212 | py | Python | sdks/python/apache_beam/coders/standard_coders_test.py | aaltay/incubator-beam | b150ace0884c88bc93da21f6dfe3b7684f886e94 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 5,279 | 2016-12-29T04:00:44.000Z | 2022-03-31T22:56:45.000Z | sdks/python/apache_beam/coders/standard_coders_test.py | aaltay/incubator-beam | b150ace0884c88bc93da21f6dfe3b7684f886e94 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 14,149 | 2016-12-28T00:43:50.000Z | 2022-03-31T23:50:22.000Z | sdks/python/apache_beam/coders/standard_coders_test.py | aaltay/incubator-beam | b150ace0884c88bc93da21f6dfe3b7684f886e94 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 3,763 | 2016-12-29T04:06:10.000Z | 2022-03-31T22:25:49.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Unit tests for coders that must be consistent across all Beam SDKs.
"""
# pytype: skip-file
import json
import logging
import math
import os.path
import sys
import unittest
from copy import deepcopy
from typing import Dict
from typing import Tuple
import yaml
from apache_beam.coders import coder_impl
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.portability.api import schema_pb2
from apache_beam.runners import pipeline_context
from apache_beam.transforms import userstate
from apache_beam.transforms import window
from apache_beam.transforms.window import IntervalWindow
from apache_beam.typehints import schemas
from apache_beam.utils import windowed_value
from apache_beam.utils.sharded_key import ShardedKey
from apache_beam.utils.timestamp import Timestamp
from apache_beam.utils.windowed_value import PaneInfo
from apache_beam.utils.windowed_value import PaneInfoTiming
STANDARD_CODERS_YAML = os.path.normpath(
os.path.join(
os.path.dirname(__file__), '../portability/api/standard_coders.yaml'))
def _load_test_cases(test_yaml):
"""Load test data from yaml file and return an iterable of test cases.
See ``standard_coders.yaml`` for more details.
"""
if not os.path.exists(test_yaml):
raise ValueError('Could not find the test spec: %s' % test_yaml)
with open(test_yaml, 'rb') as coder_spec:
for ix, spec in enumerate(
yaml.load_all(coder_spec, Loader=yaml.SafeLoader)):
spec['index'] = ix
name = spec.get('name', spec['coder']['urn'].split(':')[-2])
yield [name, spec]
def parse_float(s):
x = float(s)
if math.isnan(x):
# In Windows, float('NaN') has opposite sign from other platforms.
# For the purpose of this test, we just need consistency.
x = abs(x)
return x
def value_parser_from_schema(schema):
def attribute_parser_from_type(type_):
parser = nonnull_attribute_parser_from_type(type_)
if type_.nullable:
return lambda x: None if x is None else parser(x)
else:
return parser
def nonnull_attribute_parser_from_type(type_):
# TODO: This should be exhaustive
type_info = type_.WhichOneof("type_info")
if type_info == "atomic_type":
if type_.atomic_type == schema_pb2.BYTES:
return lambda x: x.encode("utf-8")
else:
return schemas.ATOMIC_TYPE_TO_PRIMITIVE[type_.atomic_type]
elif type_info == "array_type":
element_parser = attribute_parser_from_type(type_.array_type.element_type)
return lambda x: list(map(element_parser, x))
elif type_info == "map_type":
key_parser = attribute_parser_from_type(type_.map_type.key_type)
value_parser = attribute_parser_from_type(type_.map_type.value_type)
return lambda x: dict(
(key_parser(k), value_parser(v)) for k, v in x.items())
elif type_info == "row_type":
return value_parser_from_schema(type_.row_type.schema)
elif type_info == "logical_type":
# In YAML logical types are represented with their representation types.
to_language_type = schemas.LogicalType.from_runner_api(
type_.logical_type).to_language_type
parse_representation = attribute_parser_from_type(
type_.logical_type.representation)
return lambda x: to_language_type(parse_representation(x))
parsers = [(field.name, attribute_parser_from_type(field.type))
for field in schema.fields]
constructor = schemas.named_tuple_from_schema(schema)
def value_parser(x):
result = []
x = deepcopy(x)
for name, parser in parsers:
value = x.pop(name)
result.append(None if value is None else parser(value))
if len(x):
raise ValueError(
"Test data contains attributes that don't exist in the schema: {}".
format(', '.join(x.keys())))
return constructor(*result)
return value_parser
class StandardCodersTest(unittest.TestCase):
_urn_to_json_value_parser = {
'beam:coder:bytes:v1': lambda x: x.encode('utf-8'),
'beam:coder:bool:v1': lambda x: x,
'beam:coder:string_utf8:v1': lambda x: x,
'beam:coder:varint:v1': lambda x: x,
'beam:coder:kv:v1': lambda x,
key_parser,
value_parser: (key_parser(x['key']), value_parser(x['value'])),
'beam:coder:interval_window:v1': lambda x: IntervalWindow(
start=Timestamp(micros=(x['end'] - x['span']) * 1000),
end=Timestamp(micros=x['end'] * 1000)),
'beam:coder:iterable:v1': lambda x,
parser: list(map(parser, x)),
'beam:coder:global_window:v1': lambda x: window.GlobalWindow(),
'beam:coder:windowed_value:v1': lambda x,
value_parser,
window_parser: windowed_value.create(
value_parser(x['value']),
x['timestamp'] * 1000,
tuple(window_parser(w) for w in x['windows'])),
'beam:coder:param_windowed_value:v1': lambda x,
value_parser,
window_parser: windowed_value.create(
value_parser(x['value']),
x['timestamp'] * 1000,
tuple(window_parser(w) for w in x['windows']),
PaneInfo(
x['pane']['is_first'],
x['pane']['is_last'],
PaneInfoTiming.from_string(x['pane']['timing']),
x['pane']['index'],
x['pane']['on_time_index'])),
'beam:coder:timer:v1': lambda x,
value_parser,
window_parser: userstate.Timer(
user_key=value_parser(x['userKey']),
dynamic_timer_tag=x['dynamicTimerTag'],
clear_bit=x['clearBit'],
windows=tuple(window_parser(w) for w in x['windows']),
fire_timestamp=None,
hold_timestamp=None,
paneinfo=None) if x['clearBit'] else userstate.Timer(
user_key=value_parser(x['userKey']),
dynamic_timer_tag=x['dynamicTimerTag'],
clear_bit=x['clearBit'],
fire_timestamp=Timestamp(micros=x['fireTimestamp'] * 1000),
hold_timestamp=Timestamp(micros=x['holdTimestamp'] * 1000),
windows=tuple(window_parser(w) for w in x['windows']),
paneinfo=PaneInfo(
x['pane']['is_first'],
x['pane']['is_last'],
PaneInfoTiming.from_string(x['pane']['timing']),
x['pane']['index'],
x['pane']['on_time_index'])),
'beam:coder:double:v1': parse_float,
'beam:coder:sharded_key:v1': lambda x,
value_parser: ShardedKey(
key=value_parser(x['key']), shard_id=x['shardId'].encode('utf-8')),
'beam:coder:custom_window:v1': lambda x,
window_parser: window_parser(x['window'])
}
def test_standard_coders(self):
for name, spec in _load_test_cases(STANDARD_CODERS_YAML):
logging.info('Executing %s test.', name)
self._run_standard_coder(name, spec)
def _run_standard_coder(self, name, spec):
def assert_equal(actual, expected):
"""Handle nan values which self.assertEqual fails on."""
if (isinstance(actual, float) and isinstance(expected, float) and
math.isnan(actual) and math.isnan(expected)):
return
self.assertEqual(actual, expected)
coder = self.parse_coder(spec['coder'])
parse_value = self.json_value_parser(spec['coder'])
nested_list = [spec['nested']] if 'nested' in spec else [True, False]
for nested in nested_list:
for expected_encoded, json_value in spec['examples'].items():
value = parse_value(json_value)
expected_encoded = expected_encoded.encode('latin1')
if not spec['coder'].get('non_deterministic', False):
actual_encoded = encode_nested(coder, value, nested)
if self.fix and actual_encoded != expected_encoded:
self.to_fix[spec['index'], expected_encoded] = actual_encoded
else:
self.assertEqual(expected_encoded, actual_encoded)
decoded = decode_nested(coder, expected_encoded, nested)
assert_equal(decoded, value)
else:
# Only verify decoding for a non-deterministic coder
self.assertEqual(
decode_nested(coder, expected_encoded, nested), value)
def parse_coder(self, spec):
context = pipeline_context.PipelineContext()
coder_id = str(hash(str(spec)))
component_ids = [
context.coders.get_id(self.parse_coder(c))
for c in spec.get('components', ())
]
context.coders.put_proto(
coder_id,
beam_runner_api_pb2.Coder(
spec=beam_runner_api_pb2.FunctionSpec(
urn=spec['urn'],
payload=spec.get('payload', '').encode('latin1')),
component_coder_ids=component_ids))
return context.coders.get_by_id(coder_id)
def json_value_parser(self, coder_spec):
# TODO: integrate this with the logic for the other parsers
if coder_spec['urn'] == 'beam:coder:row:v1':
schema = schema_pb2.Schema.FromString(
coder_spec['payload'].encode('latin1'))
return value_parser_from_schema(schema)
component_parsers = [
self.json_value_parser(c) for c in coder_spec.get('components', ())
]
return lambda x: self._urn_to_json_value_parser[coder_spec['urn']](
x, *component_parsers)
# Used when --fix is passed.
fix = False
to_fix = {} # type: Dict[Tuple[int, bytes], bytes]
@classmethod
def tearDownClass(cls):
if cls.fix and cls.to_fix:
print("FIXING", len(cls.to_fix), "TESTS")
doc_sep = '\n---\n'
docs = open(STANDARD_CODERS_YAML).read().split(doc_sep)
def quote(s):
return json.dumps(s.decode('latin1')).replace(r'\u0000', r'\0')
for (doc_ix, expected_encoded), actual_encoded in cls.to_fix.items():
print(quote(expected_encoded), "->", quote(actual_encoded))
docs[doc_ix] = docs[doc_ix].replace(
quote(expected_encoded) + ':', quote(actual_encoded) + ':')
open(STANDARD_CODERS_YAML, 'w').write(doc_sep.join(docs))
def encode_nested(coder, value, nested=True):
out = coder_impl.create_OutputStream()
coder.get_impl().encode_to_stream(value, out, nested)
return out.get()
def decode_nested(coder, encoded, nested=True):
return coder.get_impl().decode_from_stream(
coder_impl.create_InputStream(encoded), nested)
if __name__ == '__main__':
if '--fix' in sys.argv:
StandardCodersTest.fix = True
sys.argv.remove('--fix')
unittest.main()
| 37.750842 | 80 | 0.671959 |
acec2382718c3347c7d0321f0f83f2e7db7e3aa0 | 2,732 | py | Python | aydin/it/demo/n2s/lgbm/2D_camera_tiledinf.py | royerloic/aydin | f9c61a24030891d008c318b250da5faec69fcd7d | [
"BSD-3-Clause"
] | null | null | null | aydin/it/demo/n2s/lgbm/2D_camera_tiledinf.py | royerloic/aydin | f9c61a24030891d008c318b250da5faec69fcd7d | [
"BSD-3-Clause"
] | null | null | null | aydin/it/demo/n2s/lgbm/2D_camera_tiledinf.py | royerloic/aydin | f9c61a24030891d008c318b250da5faec69fcd7d | [
"BSD-3-Clause"
] | null | null | null | # flake8: noqa
import time
import numpy
import numpy as np
import skimage
from skimage.data import camera
from skimage.metrics import peak_signal_noise_ratio as psnr
from skimage.metrics import structural_similarity as ssim
from skimage.morphology import disk
from skimage.restoration import denoise_nl_means, estimate_sigma
from skimage.util import random_noise
from aydin.features.standard_features import StandardFeatureGenerator
from aydin.io.datasets import normalise
from aydin.it.fgr import ImageTranslatorFGR
from aydin.regression.lgbm import LGBMRegressor
from aydin.util.log.log import Log
def demo():
"""
Demo for self-supervised denoising using camera image with synthetic noise
"""
Log.enable_output = True
Log.set_log_max_depth(5)
image = camera().astype(np.float32) # newyork()[256:-256, 256:-256]
image = normalise(image)
intensity = 5
np.random.seed(0)
noisy = np.random.poisson(image * intensity) / intensity
noisy = random_noise(noisy, mode='gaussian', var=0.01, seed=0)
noisy = noisy.astype(np.float32)
median1 = skimage.filters.median(noisy, disk(1))
median2 = skimage.filters.median(noisy, disk(2))
median5 = skimage.filters.median(noisy, disk(5))
nlm = denoise_nl_means(noisy, patch_size=11, sigma=estimate_sigma(noisy))
generator = StandardFeatureGenerator(max_level=10)
regressor = LGBMRegressor()
it = ImageTranslatorFGR(
feature_generator=generator, regressor=regressor, max_memory_usage_ratio=0.00001
)
start = time.time()
it.train(noisy, noisy)
stop = time.time()
print(f"Training: elapsed time: {stop - start} ")
# in case of batching we have to do this:
start = time.time()
denoised = it.translate(noisy, tile_size=256)
stop = time.time()
print(f"inference: elapsed time: {stop - start} ")
image = numpy.clip(image, 0, 1)
noisy = numpy.clip(noisy, 0, 1)
denoised = numpy.clip(denoised, 0, 1)
image = numpy.clip(image, 0, 1)
noisy = numpy.clip(noisy, 0, 1)
denoised = numpy.clip(denoised, 0, 1)
print("noisy :", psnr(image, noisy), ssim(noisy, image))
print("denoised :", psnr(image, denoised), ssim(denoised, image))
import napari
with napari.gui_qt():
viewer = napari.Viewer()
viewer.add_image(normalise(image), name='image')
viewer.add_image(normalise(noisy), name='noisy')
viewer.add_image(normalise(nlm), name='nlm')
viewer.add_image(normalise(median1), name='median1')
viewer.add_image(normalise(median2), name='median2')
viewer.add_image(normalise(median5), name='median5')
viewer.add_image(normalise(denoised), name='denoised')
demo()
| 31.767442 | 88 | 0.700586 |
acec239db8d30f9d13cc6ecc4f139e6466907b91 | 299 | py | Python | section/serializers.py | abdukhashimov/django-base2 | 87161fa7eea79634093d325414b63edd9f620f62 | [
"MIT"
] | null | null | null | section/serializers.py | abdukhashimov/django-base2 | 87161fa7eea79634093d325414b63edd9f620f62 | [
"MIT"
] | 4 | 2020-06-05T20:32:58.000Z | 2021-09-22T18:25:39.000Z | section/serializers.py | abdukhashimov/django-base2 | 87161fa7eea79634093d325414b63edd9f620f62 | [
"MIT"
] | null | null | null | from rest_framework.serializers import ModelSerializer
from core.models import Service
class ServiceSerializer(ModelSerializer):
"""Serializer for service obejcts"""
class Meta:
model = Service
fields = ('id','name', 'title', 'body')
read_only_fields = ('id', )
| 21.357143 | 54 | 0.672241 |
acec247df4a9973b952358b7ca1d69a53b677e35 | 24,451 | py | Python | tests/hazmat/backends/test_openssl.py | odidev/cryptography | 88dad5a20599bef1820217e08fa7b065cf9ea6a6 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/hazmat/backends/test_openssl.py | odidev/cryptography | 88dad5a20599bef1820217e08fa7b065cf9ea6a6 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/hazmat/backends/test_openssl.py | odidev/cryptography | 88dad5a20599bef1820217e08fa7b065cf9ea6a6 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import itertools
import os
import subprocess
import sys
import textwrap
import pytest
from cryptography import x509
from cryptography.exceptions import InternalError, _Reasons
from cryptography.hazmat.backends.interfaces import DHBackend, RSABackend
from cryptography.hazmat.backends.openssl.backend import Backend, backend
from cryptography.hazmat.backends.openssl.ec import _sn_to_elliptic_curve
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dh, dsa, padding
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.ciphers.modes import CBC
from ..primitives.fixtures_rsa import RSA_KEY_2048, RSA_KEY_512
from ...doubles import (
DummyAsymmetricPadding,
DummyCipherAlgorithm,
DummyHashAlgorithm,
DummyMode,
)
from ...utils import (
load_nist_vectors,
load_vectors_from_file,
raises_unsupported_algorithm,
)
from ...x509.test_x509 import _load_cert
def skip_if_libre_ssl(openssl_version):
if u"LibreSSL" in openssl_version:
pytest.skip("LibreSSL hard-codes RAND_bytes to use arc4random.")
class TestLibreSkip(object):
def test_skip_no(self):
assert skip_if_libre_ssl(u"OpenSSL 1.0.2h 3 May 2016") is None
def test_skip_yes(self):
with pytest.raises(pytest.skip.Exception):
skip_if_libre_ssl(u"LibreSSL 2.1.6")
class DummyMGF(object):
_salt_length = 0
class TestOpenSSL(object):
def test_backend_exists(self):
assert backend
def test_openssl_version_text(self):
"""
This test checks the value of OPENSSL_VERSION_TEXT.
Unfortunately, this define does not appear to have a
formal content definition, so for now we'll test to see
if it starts with OpenSSL or LibreSSL as that appears
to be true for every OpenSSL-alike.
"""
assert backend.openssl_version_text().startswith(
"OpenSSL"
) or backend.openssl_version_text().startswith("LibreSSL")
def test_openssl_version_number(self):
assert backend.openssl_version_number() > 0
def test_supports_cipher(self):
assert backend.cipher_supported(None, None) is False
def test_register_duplicate_cipher_adapter(self):
with pytest.raises(ValueError):
backend.register_cipher_adapter(AES, CBC, None)
@pytest.mark.parametrize("mode", [DummyMode(), None])
def test_nonexistent_cipher(self, mode):
b = Backend()
b.register_cipher_adapter(
DummyCipherAlgorithm,
type(mode),
lambda backend, cipher, mode: backend._ffi.NULL,
)
cipher = Cipher(DummyCipherAlgorithm(), mode, backend=b,)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_CIPHER):
cipher.encryptor()
def test_openssl_assert(self):
backend.openssl_assert(True)
with pytest.raises(InternalError):
backend.openssl_assert(False)
def test_consume_errors(self):
for i in range(10):
backend._lib.ERR_put_error(
backend._lib.ERR_LIB_EVP, 0, 0, b"test_openssl.py", -1
)
assert backend._lib.ERR_peek_error() != 0
errors = backend._consume_errors()
assert backend._lib.ERR_peek_error() == 0
assert len(errors) == 10
def test_ssl_ciphers_registered(self):
meth = backend._lib.SSLv23_method()
ctx = backend._lib.SSL_CTX_new(meth)
assert ctx != backend._ffi.NULL
backend._lib.SSL_CTX_free(ctx)
def test_evp_ciphers_registered(self):
cipher = backend._lib.EVP_get_cipherbyname(b"aes-256-cbc")
assert cipher != backend._ffi.NULL
def test_error_strings_loaded(self):
buf = backend._ffi.new("char[]", 256)
backend._lib.ERR_error_string_n(101183626, buf, len(buf))
assert b"data not multiple of block length" in backend._ffi.string(buf)
def test_unknown_error_in_cipher_finalize(self):
cipher = Cipher(AES(b"\0" * 16), CBC(b"\0" * 16), backend=backend)
enc = cipher.encryptor()
enc.update(b"\0")
backend._lib.ERR_put_error(0, 0, 1, b"test_openssl.py", -1)
with pytest.raises(InternalError):
enc.finalize()
def test_large_key_size_on_new_openssl(self):
parameters = dsa.generate_parameters(2048, backend)
param_num = parameters.parameter_numbers()
assert param_num.p.bit_length() == 2048
parameters = dsa.generate_parameters(3072, backend)
param_num = parameters.parameter_numbers()
assert param_num.p.bit_length() == 3072
def test_int_to_bn(self):
value = (2 ** 4242) - 4242
bn = backend._int_to_bn(value)
assert bn != backend._ffi.NULL
bn = backend._ffi.gc(bn, backend._lib.BN_clear_free)
assert bn
assert backend._bn_to_int(bn) == value
def test_int_to_bn_inplace(self):
value = (2 ** 4242) - 4242
bn_ptr = backend._lib.BN_new()
assert bn_ptr != backend._ffi.NULL
bn_ptr = backend._ffi.gc(bn_ptr, backend._lib.BN_free)
bn = backend._int_to_bn(value, bn_ptr)
assert bn == bn_ptr
assert backend._bn_to_int(bn_ptr) == value
def test_bn_to_int(self):
bn = backend._int_to_bn(0)
assert backend._bn_to_int(bn) == 0
@pytest.mark.skipif(
not backend._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE,
reason="Requires OpenSSL with ENGINE support and OpenSSL < 1.1.1d",
)
@pytest.mark.skip_fips(reason="osrandom engine disabled for FIPS")
class TestOpenSSLRandomEngine(object):
def setup(self):
# The default RAND engine is global and shared between
# tests. We make sure that the default engine is osrandom
# before we start each test and restore the global state to
# that engine in teardown.
current_default = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(current_default)
assert name == backend._lib.Cryptography_osrandom_engine_name
def teardown(self):
# we need to reset state to being default. backend is a shared global
# for all these tests.
backend.activate_osrandom_engine()
current_default = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(current_default)
assert name == backend._lib.Cryptography_osrandom_engine_name
@pytest.mark.skipif(
sys.executable is None, reason="No Python interpreter available."
)
def test_osrandom_engine_is_default(self, tmpdir):
engine_printer = textwrap.dedent(
"""
import sys
from cryptography.hazmat.backends.openssl.backend import backend
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
sys.stdout.write(backend._ffi.string(name).decode('ascii'))
res = backend._lib.ENGINE_free(e)
assert res == 1
"""
)
engine_name = tmpdir.join("engine_name")
# If we're running tests via ``python setup.py test`` in a clean
# environment then all of our dependencies are going to be installed
# into either the current directory or the .eggs directory. However the
# subprocess won't know to activate these dependencies, so we'll get it
# to do so by passing our entire sys.path into the subprocess via the
# PYTHONPATH environment variable.
env = os.environ.copy()
env["PYTHONPATH"] = os.pathsep.join(sys.path)
with engine_name.open("w") as out:
subprocess.check_call(
[sys.executable, "-c", engine_printer],
env=env,
stdout=out,
stderr=subprocess.PIPE,
)
osrandom_engine_name = backend._ffi.string(
backend._lib.Cryptography_osrandom_engine_name
)
assert engine_name.read().encode("ascii") == osrandom_engine_name
def test_osrandom_sanity_check(self):
# This test serves as a check against catastrophic failure.
buf = backend._ffi.new("unsigned char[]", 500)
res = backend._lib.RAND_bytes(buf, 500)
assert res == 1
assert backend._ffi.buffer(buf)[:] != "\x00" * 500
def test_activate_osrandom_no_default(self):
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
backend.activate_osrandom_engine()
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
assert name == backend._lib.Cryptography_osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
def test_activate_builtin_random(self):
e = backend._lib.ENGINE_get_default_RAND()
assert e != backend._ffi.NULL
name = backend._lib.ENGINE_get_name(e)
assert name == backend._lib.Cryptography_osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
def test_activate_builtin_random_already_active(self):
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
def test_osrandom_engine_implementation(self):
name = backend.osrandom_engine_implementation()
assert name in [
"/dev/urandom",
"CryptGenRandom",
"getentropy",
"getrandom",
]
if sys.platform.startswith("linux"):
assert name in ["getrandom", "/dev/urandom"]
if sys.platform == "darwin":
assert name in ["getentropy", "/dev/urandom"]
if sys.platform == "win32":
assert name == "CryptGenRandom"
def test_activate_osrandom_already_default(self):
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
assert name == backend._lib.Cryptography_osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
backend.activate_osrandom_engine()
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
assert name == backend._lib.Cryptography_osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
@pytest.mark.skipif(
backend._lib.CRYPTOGRAPHY_NEEDS_OSRANDOM_ENGINE,
reason="Requires OpenSSL without ENGINE support or OpenSSL >=1.1.1d",
)
class TestOpenSSLNoEngine(object):
def test_no_engine_support(self):
assert (
backend._ffi.string(backend._lib.Cryptography_osrandom_engine_id)
== b"no-engine-support"
)
assert (
backend._ffi.string(backend._lib.Cryptography_osrandom_engine_name)
== b"osrandom_engine disabled"
)
def test_activate_builtin_random_does_nothing(self):
backend.activate_builtin_random()
def test_activate_osrandom_does_nothing(self):
backend.activate_osrandom_engine()
class TestOpenSSLRSA(object):
def test_generate_rsa_parameters_supported(self):
assert backend.generate_rsa_parameters_supported(1, 1024) is False
assert backend.generate_rsa_parameters_supported(4, 1024) is False
assert backend.generate_rsa_parameters_supported(3, 1024) is True
assert backend.generate_rsa_parameters_supported(3, 511) is False
def test_generate_bad_public_exponent(self):
with pytest.raises(ValueError):
backend.generate_rsa_private_key(public_exponent=1, key_size=2048)
with pytest.raises(ValueError):
backend.generate_rsa_private_key(public_exponent=4, key_size=2048)
def test_cant_generate_insecure_tiny_key(self):
with pytest.raises(ValueError):
backend.generate_rsa_private_key(
public_exponent=65537, key_size=511
)
with pytest.raises(ValueError):
backend.generate_rsa_private_key(
public_exponent=65537, key_size=256
)
def test_rsa_padding_unsupported_pss_mgf1_hash(self):
assert (
backend.rsa_padding_supported(
padding.PSS(
mgf=padding.MGF1(DummyHashAlgorithm()), salt_length=0
)
)
is False
)
def test_rsa_padding_unsupported(self):
assert backend.rsa_padding_supported(DummyAsymmetricPadding()) is False
def test_rsa_padding_supported_pkcs1v15(self):
assert backend.rsa_padding_supported(padding.PKCS1v15()) is True
def test_rsa_padding_supported_pss(self):
assert (
backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
)
is True
)
def test_rsa_padding_supported_oaep(self):
assert (
backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None,
),
)
is True
)
@pytest.mark.skipif(
backend._lib.Cryptography_HAS_RSA_OAEP_MD == 0,
reason="Requires OpenSSL with rsa_oaep_md (1.0.2+)",
)
def test_rsa_padding_supported_oaep_sha2_combinations(self):
hashalgs = [
hashes.SHA1(),
hashes.SHA224(),
hashes.SHA256(),
hashes.SHA384(),
hashes.SHA512(),
]
for mgf1alg, oaepalg in itertools.product(hashalgs, hashalgs):
assert (
backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=mgf1alg),
algorithm=oaepalg,
label=None,
),
)
is True
)
def test_rsa_padding_unsupported_mgf(self):
assert (
backend.rsa_padding_supported(
padding.OAEP(
mgf=DummyMGF(), algorithm=hashes.SHA1(), label=None
),
)
is False
)
assert (
backend.rsa_padding_supported(
padding.PSS(mgf=DummyMGF(), salt_length=0)
)
is False
)
@pytest.mark.skipif(
backend._lib.Cryptography_HAS_RSA_OAEP_MD == 1,
reason="Requires OpenSSL without rsa_oaep_md (< 1.0.2)",
)
def test_unsupported_mgf1_hash_algorithm_decrypt(self):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA1(),
label=None,
),
)
@pytest.mark.skipif(
backend._lib.Cryptography_HAS_RSA_OAEP_MD == 1,
reason="Requires OpenSSL without rsa_oaep_md (< 1.0.2)",
)
def test_unsupported_oaep_hash_algorithm_decrypt(self):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA256(),
label=None,
),
)
def test_unsupported_mgf1_hash_algorithm_md5_decrypt(self):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_PADDING):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.MD5()),
algorithm=hashes.MD5(),
label=None,
),
)
class TestOpenSSLCMAC(object):
def test_unsupported_cipher(self):
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_CIPHER):
backend.create_cmac_ctx(DummyCipherAlgorithm())
class TestOpenSSLSignX509Certificate(object):
def test_requires_certificate_builder(self):
private_key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
backend.create_x509_certificate(
object(), private_key, DummyHashAlgorithm()
)
class TestOpenSSLSignX509CSR(object):
def test_requires_csr_builder(self):
private_key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
backend.create_x509_csr(
object(), private_key, DummyHashAlgorithm()
)
class TestOpenSSLSignX509CertificateRevocationList(object):
def test_invalid_builder(self):
private_key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
backend.create_x509_crl(object(), private_key, hashes.SHA256())
class TestOpenSSLCreateRevokedCertificate(object):
def test_invalid_builder(self):
with pytest.raises(TypeError):
backend.create_x509_revoked_certificate(object())
class TestOpenSSLSerializationWithOpenSSL(object):
def test_pem_password_cb(self):
userdata = backend._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
pw = b"abcdefg"
password = backend._ffi.new("char []", pw)
userdata.password = password
userdata.length = len(pw)
buflen = 10
buf = backend._ffi.new("char []", buflen)
res = backend._lib.Cryptography_pem_password_cb(
buf, buflen, 0, userdata
)
assert res == len(pw)
assert userdata.called == 1
assert backend._ffi.buffer(buf, len(pw))[:] == pw
assert userdata.maxsize == buflen
assert userdata.error == 0
def test_pem_password_cb_no_password(self):
userdata = backend._ffi.new("CRYPTOGRAPHY_PASSWORD_DATA *")
buflen = 10
buf = backend._ffi.new("char []", buflen)
res = backend._lib.Cryptography_pem_password_cb(
buf, buflen, 0, userdata
)
assert res == 0
assert userdata.error == -1
def test_unsupported_evp_pkey_type(self):
key = backend._create_evp_pkey_gc()
with raises_unsupported_algorithm(None):
backend._evp_pkey_to_private_key(key)
with raises_unsupported_algorithm(None):
backend._evp_pkey_to_public_key(key)
def test_very_long_pem_serialization_password(self):
password = b"x" * 1024
with pytest.raises(ValueError):
load_vectors_from_file(
os.path.join(
"asymmetric",
"Traditional_OpenSSL_Serialization",
"key1.pem",
),
lambda pemfile: (
backend.load_pem_private_key(
pemfile.read().encode(), password
)
),
)
class TestOpenSSLEllipticCurve(object):
def test_sn_to_elliptic_curve_not_supported(self):
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_ELLIPTIC_CURVE):
_sn_to_elliptic_curve(backend, b"fake")
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSAPEMSerialization(object):
def test_password_length_limit(self):
password = b"x" * 1024
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(ValueError):
key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.BestAvailableEncryption(password),
)
class TestGOSTCertificate(object):
def test_numeric_string_x509_name_entry(self):
cert = _load_cert(
os.path.join("x509", "e-trust.ru.der"),
x509.load_der_x509_certificate,
backend,
)
if backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102I:
with pytest.raises(ValueError) as exc:
cert.subject
# We assert on the message in this case because if the certificate
# fails to load it will also raise a ValueError and this test could
# erroneously pass.
assert str(exc.value) == "Unsupported ASN1 string type. Type: 18"
else:
assert (
cert.subject.get_attributes_for_oid(
x509.ObjectIdentifier("1.2.643.3.131.1.1")
)[0].value
== "007710474375"
)
@pytest.mark.skipif(
backend._lib.Cryptography_HAS_EVP_PKEY_DHX == 1,
reason="Requires OpenSSL without EVP_PKEY_DHX (< 1.0.2)",
)
@pytest.mark.requires_backend_interface(interface=DHBackend)
class TestOpenSSLDHSerialization(object):
@pytest.mark.parametrize(
"vector",
load_vectors_from_file(
os.path.join("asymmetric", "DH", "RFC5114.txt"), load_nist_vectors
),
)
def test_dh_serialization_with_q_unsupported(self, backend, vector):
parameters = dh.DHParameterNumbers(
int(vector["p"], 16), int(vector["g"], 16), int(vector["q"], 16)
)
public = dh.DHPublicNumbers(int(vector["ystatcavs"], 16), parameters)
private = dh.DHPrivateNumbers(int(vector["xstatcavs"], 16), public)
private_key = private.private_key(backend)
public_key = private_key.public_key()
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_SERIALIZATION):
private_key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption(),
)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_SERIALIZATION):
public_key.public_bytes(
serialization.Encoding.PEM,
serialization.PublicFormat.SubjectPublicKeyInfo,
)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_SERIALIZATION):
parameters.parameters(backend).parameter_bytes(
serialization.Encoding.PEM, serialization.ParameterFormat.PKCS3
)
@pytest.mark.parametrize(
("key_path", "loader_func"),
[
(
os.path.join("asymmetric", "DH", "dhkey_rfc5114_2.pem"),
serialization.load_pem_private_key,
),
(
os.path.join("asymmetric", "DH", "dhkey_rfc5114_2.der"),
serialization.load_der_private_key,
),
],
)
def test_private_load_dhx_unsupported(
self, key_path, loader_func, backend
):
key_bytes = load_vectors_from_file(
key_path, lambda pemfile: pemfile.read(), mode="rb"
)
with pytest.raises(ValueError):
loader_func(key_bytes, None, backend)
@pytest.mark.parametrize(
("key_path", "loader_func"),
[
(
os.path.join("asymmetric", "DH", "dhpub_rfc5114_2.pem"),
serialization.load_pem_public_key,
),
(
os.path.join("asymmetric", "DH", "dhpub_rfc5114_2.der"),
serialization.load_der_public_key,
),
],
)
def test_public_load_dhx_unsupported(self, key_path, loader_func, backend):
key_bytes = load_vectors_from_file(
key_path, lambda pemfile: pemfile.read(), mode="rb"
)
with pytest.raises(ValueError):
loader_func(key_bytes, backend)
| 35.799414 | 79 | 0.635557 |
acec2592cf0434f3fa234b6ef584a646e05972e1 | 40,265 | py | Python | nova/tests/unit/api/openstack/compute/test_volumes.py | sarafraj-singh/nova | 677594480ecb9c093a3d81ae49dead120798a5c4 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api/openstack/compute/test_volumes.py | sarafraj-singh/nova | 677594480ecb9c093a3d81ae49dead120798a5c4 | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api/openstack/compute/test_volumes.py | sarafraj-singh/nova | 677594480ecb9c093a3d81ae49dead120798a5c4 | [
"Apache-2.0"
] | 1 | 2020-07-24T01:18:44.000Z | 2020-07-24T01:18:44.000Z | # Copyright 2013 Josh Durgin
# Copyright 2013 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_serialization import jsonutils
from six.moves import urllib
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack.compute import assisted_volume_snapshots \
as assisted_snaps_v21
from nova.api.openstack.compute import volumes as volumes_v21
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import vm_states
import nova.conf
from nova import context
from nova import exception
from nova import objects
from nova.objects import base
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.volume import cinder
CONF = nova.conf.CONF
FAKE_UUID = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
FAKE_UUID_A = '00000000-aaaa-aaaa-aaaa-000000000000'
FAKE_UUID_B = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
FAKE_UUID_C = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
FAKE_UUID_D = 'dddddddd-dddd-dddd-dddd-dddddddddddd'
IMAGE_UUID = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
def fake_get_instance(self, context, instance_id, want_objects=False,
expected_attrs=None):
return fake_instance.fake_instance_obj(context, **{'uuid': instance_id})
def fake_get_volume(self, context, id):
return {'id': FAKE_UUID_A,
'status': 'available',
'attach_status': 'detached'
}
def fake_attach_volume(self, context, instance, volume_id, device):
pass
def fake_detach_volume(self, context, instance, volume):
pass
def fake_swap_volume(self, context, instance,
old_volume_id, new_volume_id):
pass
def fake_create_snapshot(self, context, volume, name, description):
return {'id': 123,
'volume_id': 'fakeVolId',
'status': 'available',
'volume_size': 123,
'created_at': '2013-01-01 00:00:01',
'display_name': 'myVolumeName',
'display_description': 'myVolumeDescription'}
def fake_delete_snapshot(self, context, snapshot_id):
pass
def fake_compute_volume_snapshot_delete(self, context, volume_id, snapshot_id,
delete_info):
pass
def fake_compute_volume_snapshot_create(self, context, volume_id,
create_info):
pass
@classmethod
def fake_bdm_list_get_by_instance_uuid(cls, context, instance_uuid):
db_list = [fake_block_device.FakeDbBlockDeviceDict(
{'id': 1,
'instance_uuid': instance_uuid,
'device_name': '/dev/fake0',
'delete_on_termination': 'False',
'source_type': 'volume',
'destination_type': 'volume',
'snapshot_id': None,
'volume_id': FAKE_UUID_A,
'volume_size': 1}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 2,
'instance_uuid': instance_uuid,
'device_name': '/dev/fake1',
'delete_on_termination': 'False',
'source_type': 'volume',
'destination_type': 'volume',
'snapshot_id': None,
'volume_id': FAKE_UUID_B,
'volume_size': 1})]
item_cls = objects.BlockDeviceMapping
return base.obj_make_list(context, cls(), item_cls, db_list)
class BootFromVolumeTest(test.TestCase):
def setUp(self):
super(BootFromVolumeTest, self).setUp()
self.stubs.Set(compute_api.API, 'create',
self._get_fake_compute_api_create())
fakes.stub_out_nw_api(self)
self._block_device_mapping_seen = None
self._legacy_bdm_seen = True
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Volumes', 'Block_device_mapping_v2_boot'])
def _get_fake_compute_api_create(self):
def _fake_compute_api_create(cls, context, instance_type,
image_href, **kwargs):
self._block_device_mapping_seen = kwargs.get(
'block_device_mapping')
self._legacy_bdm_seen = kwargs.get('legacy_bdm')
inst_type = flavors.get_flavor_by_flavor_id(2)
resv_id = None
return ([{'id': 1,
'display_name': 'test_server',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': IMAGE_UUID,
'user_id': 'fake',
'project_id': 'fake',
'created_at': datetime.datetime(2010, 10, 10, 12, 0, 0),
'updated_at': datetime.datetime(2010, 11, 11, 11, 0, 0),
'progress': 0,
'fixed_ips': []
}], resv_id)
return _fake_compute_api_create
def test_create_root_volume(self):
body = dict(server=dict(
name='test_server', imageRef=IMAGE_UUID,
flavorRef=2, min_count=1, max_count=1,
block_device_mapping=[dict(
volume_id='1',
device_name='/dev/vda',
virtual='root',
delete_on_termination=False,
)]
))
req = fakes.HTTPRequest.blank('/v2/fake/os-volumes_boot')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers['content-type'] = 'application/json'
res = req.get_response(fakes.wsgi_app(
init_only=('os-volumes_boot', 'servers')))
self.assertEqual(202, res.status_int)
server = jsonutils.loads(res.body)['server']
self.assertEqual(FAKE_UUID, server['id'])
self.assertEqual(CONF.password_length, len(server['adminPass']))
self.assertEqual(1, len(self._block_device_mapping_seen))
self.assertTrue(self._legacy_bdm_seen)
self.assertEqual('1', self._block_device_mapping_seen[0]['volume_id'])
self.assertEqual('/dev/vda',
self._block_device_mapping_seen[0]['device_name'])
def test_create_root_volume_bdm_v2(self):
body = dict(server=dict(
name='test_server', imageRef=IMAGE_UUID,
flavorRef=2, min_count=1, max_count=1,
block_device_mapping_v2=[dict(
source_type='volume',
uuid='1',
device_name='/dev/vda',
boot_index=0,
delete_on_termination=False,
)]
))
req = fakes.HTTPRequest.blank('/v2/fake/os-volumes_boot')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers['content-type'] = 'application/json'
res = req.get_response(fakes.wsgi_app(
init_only=('os-volumes_boot', 'servers')))
self.assertEqual(202, res.status_int)
server = jsonutils.loads(res.body)['server']
self.assertEqual(FAKE_UUID, server['id'])
self.assertEqual(CONF.password_length, len(server['adminPass']))
self.assertEqual(1, len(self._block_device_mapping_seen))
self.assertFalse(self._legacy_bdm_seen)
self.assertEqual('1', self._block_device_mapping_seen[0]['volume_id'])
self.assertEqual(0, self._block_device_mapping_seen[0]['boot_index'])
self.assertEqual('/dev/vda',
self._block_device_mapping_seen[0]['device_name'])
class VolumeApiTestV21(test.NoDBTestCase):
url_prefix = '/v2/fake'
def setUp(self):
super(VolumeApiTestV21, self).setUp()
fakes.stub_out_networking(self)
fakes.stub_out_rate_limiting(self.stubs)
self.stubs.Set(cinder.API, "delete", fakes.stub_volume_delete)
self.stubs.Set(cinder.API, "get", fakes.stub_volume_get)
self.stubs.Set(cinder.API, "get_all", fakes.stub_volume_get_all)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Volumes'])
self.context = context.get_admin_context()
@property
def app(self):
return fakes.wsgi_app_v21(init_only=('os-volumes', 'servers'))
def test_volume_create(self):
self.stubs.Set(cinder.API, "create", fakes.stub_volume_create)
vol = {"size": 100,
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
resp_dict = jsonutils.loads(resp.body)
self.assertIn('volume', resp_dict)
self.assertEqual(vol['size'], resp_dict['volume']['size'])
self.assertEqual(vol['display_name'],
resp_dict['volume']['displayName'])
self.assertEqual(vol['display_description'],
resp_dict['volume']['displayDescription'])
self.assertEqual(vol['availability_zone'],
resp_dict['volume']['availabilityZone'])
def _test_volume_translate_exception(self, cinder_exc, api_exc):
"""Tests that cinder exceptions are correctly translated"""
def fake_volume_create(self, context, size, name, description,
snapshot, **param):
raise cinder_exc
self.stubs.Set(cinder.API, "create", fake_volume_create)
vol = {"size": '10',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes')
self.assertRaises(api_exc,
volumes_v21.VolumeController().create, req,
body=body)
@mock.patch.object(cinder.API, 'get_snapshot')
@mock.patch.object(cinder.API, 'create')
def test_volume_create_bad_snapshot_id(self, mock_create, mock_get):
vol = {"snapshot_id": '1', "size": 10}
body = {"volume": vol}
mock_get.side_effect = exception.SnapshotNotFound(snapshot_id='1')
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes')
self.assertRaises(webob.exc.HTTPNotFound,
volumes_v21.VolumeController().create, req,
body=body)
def test_volume_create_bad_input(self):
self._test_volume_translate_exception(
exception.InvalidInput(reason='fake'), webob.exc.HTTPBadRequest)
def test_volume_create_bad_quota(self):
self._test_volume_translate_exception(
exception.OverQuota(overs='fake'), webob.exc.HTTPForbidden)
def test_volume_index(self):
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes')
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
def test_volume_detail(self):
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes/detail')
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
def test_volume_show(self):
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes/123')
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
def test_volume_show_no_volume(self):
self.stubs.Set(cinder.API, "get", fakes.stub_volume_notfound)
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes/456')
resp = req.get_response(self.app)
self.assertEqual(404, resp.status_int)
self.assertIn('Volume 456 could not be found.', resp.body)
def test_volume_delete(self):
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes/123')
req.method = 'DELETE'
resp = req.get_response(self.app)
self.assertEqual(202, resp.status_int)
def test_volume_delete_no_volume(self):
self.stubs.Set(cinder.API, "delete", fakes.stub_volume_notfound)
req = fakes.HTTPRequest.blank(self.url_prefix + '/os-volumes/456')
req.method = 'DELETE'
resp = req.get_response(self.app)
self.assertEqual(404, resp.status_int)
self.assertIn('Volume 456 could not be found.', resp.body)
class VolumeAttachTestsV21(test.NoDBTestCase):
validation_error = exception.ValidationError
def setUp(self):
super(VolumeAttachTestsV21, self).setUp()
self.stub_out('nova.objects.BlockDeviceMappingList'
'.get_by_instance_uuid',
fake_bdm_list_get_by_instance_uuid)
self.stubs.Set(compute_api.API, 'get', fake_get_instance)
self.stubs.Set(cinder.API, 'get', fake_get_volume)
self.context = context.get_admin_context()
self.expected_show = {'volumeAttachment':
{'device': '/dev/fake0',
'serverId': FAKE_UUID,
'id': FAKE_UUID_A,
'volumeId': FAKE_UUID_A
}}
self._set_up_controller()
def _set_up_controller(self):
self.attachments = volumes_v21.VolumeAttachmentController()
def test_show(self):
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.show(req, FAKE_UUID, FAKE_UUID_A)
self.assertEqual(self.expected_show, result)
@mock.patch.object(compute_api.API, 'get',
side_effect=exception.InstanceNotFound(instance_id=FAKE_UUID))
def test_show_no_instance(self, mock_mr):
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
self.attachments.show,
req,
FAKE_UUID,
FAKE_UUID_A)
@mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid', return_value=None)
def test_show_no_bdms(self, mock_mr):
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
self.attachments.show,
req,
FAKE_UUID,
FAKE_UUID_A)
def test_show_bdms_no_mountpoint(self):
FAKE_UUID_NOTEXIST = '00000000-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
self.attachments.show,
req,
FAKE_UUID,
FAKE_UUID_NOTEXIST)
def test_detach(self):
self.stubs.Set(compute_api.API,
'detach_volume',
fake_detach_volume)
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.delete(req, FAKE_UUID, FAKE_UUID_A)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.attachments,
volumes_v21.VolumeAttachmentController):
status_int = self.attachments.delete.wsgi_code
else:
status_int = result.status_int
self.assertEqual(202, status_int)
@mock.patch.object(common, 'get_instance')
def test_detach_vol_shelved_not_supported(self, mock_get_instance):
inst = fake_instance.fake_instance_obj(self.context,
**{'uuid': FAKE_UUID})
inst.vm_state = vm_states.SHELVED
mock_get_instance.return_value = inst
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid', version='2.19')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(webob.exc.HTTPConflict,
self.attachments.delete,
req,
FAKE_UUID,
FAKE_UUID_A)
@mock.patch.object(compute_api.API, 'detach_volume')
@mock.patch.object(common, 'get_instance')
def test_detach_vol_shelved_supported(self,
mock_get_instance,
mock_detach):
inst = fake_instance.fake_instance_obj(self.context,
**{'uuid': FAKE_UUID})
inst.vm_state = vm_states.SHELVED
mock_get_instance.return_value = inst
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid', version='2.20')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.attachments.delete(req, FAKE_UUID, FAKE_UUID_A)
self.assertTrue(mock_detach.called)
def test_detach_vol_not_found(self):
self.stubs.Set(compute_api.API,
'detach_volume',
fake_detach_volume)
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(exc.HTTPNotFound,
self.attachments.delete,
req,
FAKE_UUID,
FAKE_UUID_C)
@mock.patch('nova.objects.BlockDeviceMapping.is_root',
new_callable=mock.PropertyMock)
def test_detach_vol_root(self, mock_isroot):
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
mock_isroot.return_value = True
self.assertRaises(exc.HTTPForbidden,
self.attachments.delete,
req,
FAKE_UUID,
FAKE_UUID_A)
def test_detach_volume_from_locked_server(self):
def fake_detach_volume_from_locked_server(self, context,
instance, volume):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
self.stubs.Set(compute_api.API,
'detach_volume',
fake_detach_volume_from_locked_server)
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'DELETE'
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(webob.exc.HTTPConflict, self.attachments.delete,
req, FAKE_UUID, FAKE_UUID_A)
def test_attach_volume(self):
self.stubs.Set(compute_api.API,
'attach_volume',
fake_attach_volume)
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.create(req, FAKE_UUID, body=body)
self.assertEqual('00000000-aaaa-aaaa-aaaa-000000000000',
result['volumeAttachment']['id'])
@mock.patch.object(common, 'get_instance')
def test_attach_vol_shelved_not_supported(self, mock_get_instance):
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
inst = fake_instance.fake_instance_obj(self.context,
**{'uuid': FAKE_UUID})
inst.vm_state = vm_states.SHELVED
mock_get_instance.return_value = inst
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments',
version='2.19')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(webob.exc.HTTPConflict,
self.attachments.create,
req,
FAKE_UUID,
body=body)
@mock.patch.object(compute_api.API, 'attach_volume',
return_value='/dev/myfake')
@mock.patch.object(common, 'get_instance')
def test_attach_vol_shelved_supported(self,
mock_get_instance,
mock_attach):
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
inst = fake_instance.fake_instance_obj(self.context,
**{'uuid': FAKE_UUID})
inst.vm_state = vm_states.SHELVED
mock_get_instance.return_value = inst
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments',
version='2.20')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.create(req, FAKE_UUID, body=body)
self.assertEqual('00000000-aaaa-aaaa-aaaa-000000000000',
result['volumeAttachment']['id'])
self.assertEqual('/dev/myfake', result['volumeAttachment']['device'])
@mock.patch.object(compute_api.API, 'attach_volume',
return_value='/dev/myfake')
def test_attach_volume_with_auto_device(self, mock_attach):
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': None}}
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
result = self.attachments.create(req, FAKE_UUID, body=body)
self.assertEqual('00000000-aaaa-aaaa-aaaa-000000000000',
result['volumeAttachment']['id'])
self.assertEqual('/dev/myfake', result['volumeAttachment']['device'])
def test_attach_volume_to_locked_server(self):
def fake_attach_volume_to_locked_server(self, context, instance,
volume_id, device=None):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
self.stubs.Set(compute_api.API,
'attach_volume',
fake_attach_volume_to_locked_server)
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(webob.exc.HTTPConflict, self.attachments.create,
req, FAKE_UUID, body=body)
def test_attach_volume_bad_id(self):
self.stubs.Set(compute_api.API,
'attach_volume',
fake_attach_volume)
body = {
'volumeAttachment': {
'device': None,
'volumeId': 'TESTVOLUME',
}
}
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(self.validation_error, self.attachments.create,
req, FAKE_UUID, body=body)
def test_attach_volume_without_volumeId(self):
self.stubs.Set(compute_api.API,
'attach_volume',
fake_attach_volume)
body = {
'volumeAttachment': {
'device': None
}
}
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(self.validation_error, self.attachments.create,
req, FAKE_UUID, body=body)
def test_attach_volume_with_extra_arg(self):
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake',
'extra': 'extra_arg'}}
req = fakes.HTTPRequest.blank('/v2/servers/id/os-volume_attachments')
req.method = 'POST'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
self.assertRaises(self.validation_error, self.attachments.create,
req, FAKE_UUID, body=body)
def _test_swap(self, attachments, uuid=FAKE_UUID_A,
fake_func=None, body=None):
fake_func = fake_func or fake_swap_volume
self.stubs.Set(compute_api.API,
'swap_volume',
fake_func)
body = body or {'volumeAttachment': {'volumeId': FAKE_UUID_B}}
req = fakes.HTTPRequest.blank(
'/v2/servers/id/os-volume_attachments/uuid')
req.method = 'PUT'
req.body = jsonutils.dump_as_bytes({})
req.headers['content-type'] = 'application/json'
req.environ['nova.context'] = self.context
return attachments.update(req, FAKE_UUID, uuid, body=body)
def test_swap_volume_for_locked_server(self):
def fake_swap_volume_for_locked_server(self, context, instance,
old_volume, new_volume):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
self.assertRaises(webob.exc.HTTPConflict, self._test_swap,
self.attachments,
fake_func=fake_swap_volume_for_locked_server)
def test_swap_volume(self):
result = self._test_swap(self.attachments)
# NOTE: on v2.1, http status code is set as wsgi_code of API
# method instead of status_int in a response object.
if isinstance(self.attachments,
volumes_v21.VolumeAttachmentController):
status_int = self.attachments.update.wsgi_code
else:
status_int = result.status_int
self.assertEqual(202, status_int)
def test_swap_volume_no_attachment(self):
self.assertRaises(exc.HTTPNotFound, self._test_swap,
self.attachments, FAKE_UUID_C)
def test_swap_volume_without_volumeId(self):
body = {'volumeAttachment': {'device': '/dev/fake'}}
self.assertRaises(self.validation_error,
self._test_swap,
self.attachments,
body=body)
def test_swap_volume_with_extra_arg(self):
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
self.assertRaises(self.validation_error,
self._test_swap,
self.attachments,
body=body)
class CommonBadRequestTestCase(object):
resource = None
entity_name = None
controller_cls = None
kwargs = {}
bad_request = exc.HTTPBadRequest
"""
Tests of places we throw 400 Bad Request from
"""
def setUp(self):
super(CommonBadRequestTestCase, self).setUp()
self.controller = self.controller_cls()
def _bad_request_create(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/' + self.resource)
req.method = 'POST'
kwargs = self.kwargs.copy()
kwargs['body'] = body
self.assertRaises(self.bad_request,
self.controller.create, req, **kwargs)
def test_create_no_body(self):
self._bad_request_create(body=None)
def test_create_missing_volume(self):
body = {'foo': {'a': 'b'}}
self._bad_request_create(body=body)
def test_create_malformed_entity(self):
body = {self.entity_name: 'string'}
self._bad_request_create(body=body)
class BadRequestVolumeTestCaseV21(CommonBadRequestTestCase,
test.NoDBTestCase):
resource = 'os-volumes'
entity_name = 'volume'
controller_cls = volumes_v21.VolumeController
bad_request = exception.ValidationError
class BadRequestSnapshotTestCaseV21(CommonBadRequestTestCase,
test.NoDBTestCase):
resource = 'os-snapshots'
entity_name = 'snapshot'
controller_cls = volumes_v21.SnapshotController
bad_request = exception.ValidationError
class AssistedSnapshotCreateTestCaseV21(test.NoDBTestCase):
assisted_snaps = assisted_snaps_v21
bad_request = exception.ValidationError
def setUp(self):
super(AssistedSnapshotCreateTestCaseV21, self).setUp()
self.controller = \
self.assisted_snaps.AssistedVolumeSnapshotsController()
self.stubs.Set(compute_api.API, 'volume_snapshot_create',
fake_compute_volume_snapshot_create)
def test_assisted_create(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
body = {'snapshot':
{'volume_id': '1',
'create_info': {'type': 'qcow2',
'new_file': 'new_file',
'snapshot_id': 'snapshot_id'}}}
req.method = 'POST'
self.controller.create(req, body=body)
def test_assisted_create_missing_create_info(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
body = {'snapshot': {'volume_id': '1'}}
req.method = 'POST'
self.assertRaises(self.bad_request, self.controller.create,
req, body=body)
def test_assisted_create_with_unexpected_attr(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
body = {
'snapshot': {
'volume_id': '1',
'create_info': {
'type': 'qcow2',
'new_file': 'new_file',
'snapshot_id': 'snapshot_id'
}
},
'unexpected': 0,
}
req.method = 'POST'
self.assertRaises(self.bad_request, self.controller.create,
req, body=body)
class AssistedSnapshotDeleteTestCaseV21(test.NoDBTestCase):
assisted_snaps = assisted_snaps_v21
def _check_status(self, expected_status, res, controller_method):
self.assertEqual(expected_status, controller_method.wsgi_code)
def setUp(self):
super(AssistedSnapshotDeleteTestCaseV21, self).setUp()
self.controller = \
self.assisted_snaps.AssistedVolumeSnapshotsController()
self.stubs.Set(compute_api.API, 'volume_snapshot_delete',
fake_compute_volume_snapshot_delete)
def test_assisted_delete(self):
params = {
'delete_info': jsonutils.dumps({'volume_id': '1'}),
}
req = fakes.HTTPRequest.blank(
'/v2/fake/os-assisted-volume-snapshots?%s' %
urllib.parse.urlencode(params))
req.method = 'DELETE'
result = self.controller.delete(req, '5')
self._check_status(204, result, self.controller.delete)
def test_assisted_delete_missing_delete_info(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-assisted-volume-snapshots')
req.method = 'DELETE'
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, '5')
class TestAssistedVolumeSnapshotsPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(TestAssistedVolumeSnapshotsPolicyEnforcementV21, self).setUp()
self.controller = (
assisted_snaps_v21.AssistedVolumeSnapshotsController())
self.req = fakes.HTTPRequest.blank('')
def test_create_assisted_volumes_snapshots_policy_failed(self):
rule_name = "os_compute_api:os-assisted-volume-snapshots:create"
self.policy.set_rules({rule_name: "project:non_fake"})
body = {'snapshot':
{'volume_id': '1',
'create_info': {'type': 'qcow2',
'new_file': 'new_file',
'snapshot_id': 'snapshot_id'}}}
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.create, self.req, body=body)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_delete_assisted_volumes_snapshots_policy_failed(self):
rule_name = "os_compute_api:os-assisted-volume-snapshots:delete"
self.policy.set_rules({rule_name: "project:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete, self.req, '5')
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class TestVolumeAttachPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(TestVolumeAttachPolicyEnforcementV21, self).setUp()
self.controller = volumes_v21.VolumeAttachmentController()
self.req = fakes.HTTPRequest.blank('')
def _common_policy_check(self, rules, rule_name, func, *arg, **kwarg):
self.policy.set_rules(rules)
exc = self.assertRaises(
exception.PolicyNotAuthorized, func, *arg, **kwarg)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_index_volume_attach_policy_failed(self):
rule_name = "os_compute_api:os-volumes-attachments:index"
rules = {rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name,
self.controller.index, self.req, FAKE_UUID)
def test_show_volume_attach_policy_failed(self):
rule_name = "os_compute_api:os-volumes"
rules = {"os_compute_api:os-volumes-attachments:show": "@",
rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name, self.controller.show,
self.req, FAKE_UUID, FAKE_UUID_A)
rule_name = "os_compute_api:os-volumes-attachments:show"
rules = {"os_compute_api:os-volumes": "@",
rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name, self.controller.show,
self.req, FAKE_UUID, FAKE_UUID_A)
def test_create_volume_attach_policy_failed(self):
rule_name = "os_compute_api:os-volumes"
rules = {"os_compute_api:os-volumes-attachments:create": "@",
rule_name: "project:non_fake"}
body = {'volumeAttachment': {'volumeId': FAKE_UUID_A,
'device': '/dev/fake'}}
self._common_policy_check(rules, rule_name, self.controller.create,
self.req, FAKE_UUID, body=body)
rule_name = "os_compute_api:os-volumes-attachments:create"
rules = {"os_compute_api:os-volumes": "@",
rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name, self.controller.create,
self.req, FAKE_UUID, body=body)
def test_update_volume_attach_policy_failed(self):
rule_name = "os_compute_api:os-volumes"
rules = {"os_compute_api:os-volumes-attachments:update": "@",
rule_name: "project:non_fake"}
body = {'volumeAttachment': {'volumeId': FAKE_UUID_B}}
self._common_policy_check(rules, rule_name, self.controller.update,
self.req, FAKE_UUID, FAKE_UUID_A, body=body)
rule_name = "os_compute_api:os-volumes-attachments:update"
rules = {"os_compute_api:os-volumes": "@",
rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name, self.controller.update,
self.req, FAKE_UUID, FAKE_UUID_A, body=body)
def test_delete_volume_attach_policy_failed(self):
rule_name = "os_compute_api:os-volumes"
rules = {"os_compute_api:os-volumes-attachments:delete": "@",
rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name, self.controller.delete,
self.req, FAKE_UUID, FAKE_UUID_A)
rule_name = "os_compute_api:os-volumes-attachments:delete"
rules = {"os_compute_api:os-volumes": "@",
rule_name: "project:non_fake"}
self._common_policy_check(rules, rule_name, self.controller.delete,
self.req, FAKE_UUID, FAKE_UUID_A)
| 40.878173 | 79 | 0.599727 |
acec276a22fab2eb9049519b4ab2b68719eed013 | 1,389 | py | Python | medicpro/hook/patient.py | Lewinta/MedicPro | 992340f8d09f48094b7588d326531059d7d7ab72 | [
"MIT"
] | null | null | null | medicpro/hook/patient.py | Lewinta/MedicPro | 992340f8d09f48094b7588d326531059d7d7ab72 | [
"MIT"
] | null | null | null | medicpro/hook/patient.py | Lewinta/MedicPro | 992340f8d09f48094b7588d326531059d7d7ab72 | [
"MIT"
] | null | null | null | import frappe
from frappe import _
def after_insert(doc, event):
create_customer(doc)
def on_update(doc, event):
sync_customer(doc)
def create_customer(doc):
customer_group = frappe.get_value("Selling Settings", None, "customer_group")
territory = frappe.get_value("Selling Settings", None, "territory")
if not (customer_group and territory):
customer_group = "Commercial"
territory = "Rest Of The World"
frappe.msgprint(_("Please set default customer group and territory in Selling Settings"), alert=True)
customer = frappe.get_doc({"doctype": "Customer",
"customer_name": doc.patient_name,
"customer_group": customer_group,
"territory" : territory,
"tax_id" : doc.tax_id,
"ars" : doc.ars,
"ars_name" : doc.ars_name,
"nss" : doc.nss,
"customer_type": "Individual"
}).insert(ignore_permissions=True)
frappe.db.set_value("Patient", doc.name, "customer", customer.name)
frappe.msgprint(_("Customer {0} is created.").format(customer.name), alert=True)
def sync_customer(doc):
if not frappe.db.exists("Customer", doc.customer):
return
customer = frappe.get_doc("Customer", doc.customer)
customer.update({
"customer_name": doc.patient_name,
"tax_id" : doc.tax_id,
"ars" : doc.ars,
"gender" : doc.sex,
"salutation" : "Sra." if doc.sex == "Femenino" else "Sr.",
"ars_name" : doc.ars_name,
"nss" : doc.nss,
})
customer.save(ignore_permissions=True)
| 31.568182 | 103 | 0.722102 |
acec27833eb3b8fdea8847c225333ce180225935 | 11,735 | py | Python | CarND-test/src/train/train.py | powerfulwang/decision-making-CarND | ee720b41fc57a7f5146ffd58dfd1197d273880d1 | [
"MIT"
] | 2 | 2022-03-14T15:29:18.000Z | 2022-03-19T05:49:02.000Z | CarND-test/src/train/train.py | powerfulwang/decision-making-CarND | ee720b41fc57a7f5146ffd58dfd1197d273880d1 | [
"MIT"
] | null | null | null | CarND-test/src/train/train.py | powerfulwang/decision-making-CarND | ee720b41fc57a7f5146ffd58dfd1197d273880d1 | [
"MIT"
] | null | null | null | # coding=utf-8
import socket # socket模块
import json
import random
import numpy as np
from collections import deque
from keras.models import Sequential, Model
from keras.layers import Input, Dense, Conv2D, Flatten, concatenate
from keras.optimizers import Adam
from math import floor, sqrt
import tensorflow as tf
import subprocess
import time
import psutil
import pyautogui
import os
import pickle
from multiprocessing import Pool
from keras.backend.tensorflow_backend import set_session
config = tf.ConfigProto()
config.gpu_options.per_process_gpu_memory_fraction = 0.3
set_session(tf.Session(config=config))
# import os
# os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
# os.environ["CUDA_VISIBLE_DEVICES"] = "4,5,6,7"
class DQNAgent:
def __init__(self, state_height, state_width, action_size):
# self.state_size = state_size
self.state_height = state_height
self.state_width = state_width
self.action_size = action_size
self.memory1 = deque(maxlen=20000)
self.memory2 = deque(maxlen=20000)
# self.memory3 = deque(maxlen=20000)
self.gamma = 0.90 # discount rate
self.epsilon = 1.0 # exploration rate
self.epsilon_min = 0.3
# self.epsilon_decay = 0.99985
self.epsilon_decay = 1.0
self.learning_rate = 0.00025
self.model = self._build_model()
self.target_model = self._build_model()
self.update_target_model()
def _build_model(self):
# Neural Net for Deep-Q learning Model
input1 = Input(shape=(1, self.state_height, self.state_width))
conv1 = Conv2D(64, (4, 2), strides=1, activation='relu', padding='valid', data_format='channels_first',
input_shape=(1, self.state_height, self.state_width))(input1)
conv2 = Conv2D(64, (4, 2), strides=1, activation='relu', padding='valid')(conv1)
conv3 = Conv2D(3, 1, strides=1, activation='relu', padding='valid')(conv2)
state1 = Flatten()(conv3)
input2 = Input(shape=(3,))
state2 = concatenate([input2, state1])
state2 = Dense(256, activation='relu')(state2)
state2 = Dense(64, activation='relu')(state2)
out_put = Dense(self.action_size, activation='linear')(state2)
model = Model(inputs=[input1, input2], outputs=out_put)
model.compile(loss='mse',
optimizer=Adam(lr=self.learning_rate))
return model
def update_target_model(self):
# copy weights from model to target_model
self.target_model.set_weights(self.model.get_weights())
def remember1(self, state, action, reward, next_state):
self.memory1.append((state, action, reward, next_state))
def remember2(self, state, action, reward, next_state):
self.memory2.append((state, action, reward, next_state))
def remember3(self, state, action, reward, next_state):
self.memory3.append((state, action, reward, next_state))
def act(self, state):
if np.random.rand() <= self.epsilon:
print('random')
return random.randrange(self.action_size)
act_values = self.model.predict(state)
return np.argmax(act_values[0]) # returns action
def replay(self, batch_size):
minibatch1 = random.sample(self.memory1, int(batch_size / 2))
minibatch2 = random.sample(self.memory2, batch_size - int(batch_size / 2))
minibatch = minibatch1 + minibatch2
for state, action, reward, next_state in minibatch:
target = self.model.predict(state)
t = self.target_model.predict(next_state)[0]
target[0][action] = reward + self.gamma * np.amax(t)
self.model.fit(state, target, epochs=1, verbose=0)
if self.epsilon > self.epsilon_min:
self.epsilon = max(self.epsilon*self.epsilon_decay, self.epsilon_min)
def load(self, name):
self.model.load_weights(name)
def save(self, name):
self.model.save_weights(name)
def connect(ser):
conn, addr = ser.accept() # 接受TCP连接,并返回新的套接字与IP地址
print('Connected by', addr) # 输出客户端的IP地址
return conn
def open_ter(loc):
os.system("gnome-terminal -e 'bash -c \"cd " + loc + " && ./path_planning; exec bash\"'")
time.sleep(1)
# return sim
def kill_terminal():
pids = psutil.pids()
for pid in pids:
p = psutil.Process(pid)
if p.name() == "gnome-terminal-server":
os.kill(pid, 9)
def close_all(sim):
if sim.poll() is None:
sim.terminate()
sim.wait()
time.sleep(2)
kill_terminal()
EPISODES = 100
location = "path_to/lane_changing/decision_making/build"
HOST = '127.0.0.1'
PORT = 1234
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # 定义socket类型,网络通信,TCP
server.bind((HOST, PORT)) # 套接字绑定的IP与端口
server.listen(1) # 开始TCP监听
state_height = 45
state_width = 3
action_size = 3
agent = DQNAgent(state_height, state_width, action_size)
batch_size = 16
episode = 1
while episode <= EPISODES:
pool = Pool(processes=2)
result = []
result.append(pool.apply_async(connect, (server,)))
pool.apply_async(open_ter, (location,))
pool.close()
pool.join()
conn = result[0].get()
sim = subprocess.Popen('path_to/term3_sim_linux/term3_sim.x86_64')
time.sleep(2)
pyautogui.click(x=1164, y=864, button='left')
time.sleep(6)
pyautogui.click(x=465, y=535, button='left')
try:
data = conn.recv(2000) # 把接收的数据实例化
except Exception as e:
close_all(sim)
continue
while not data:
try:
data = conn.recv(2000) # 把接收的数据实例化
except Exception as e:
close_all(sim)
continue
data = bytes.decode(data)
# print(data)
j = json.loads(data)
# Main car's localization Data
# car_x = j[1]['x']
# car_y = j[1]['y']
car_s = j[1]['s']
car_d = j[1]['d']
car_yaw = j[1]['yaw']
car_speed = j[1]['speed']
# Sensor Fusion Data, a list of all other cars on the same side of the road.\
sensor_fusion = j[1]['sensor_fusion']
grid = np.ones((51, 3))
ego_car_lane = int(floor(car_d/4))
grid[31:35, ego_car_lane] = car_speed / 100.0
# sensor_fusion_array = np.array(sensor_fusion)
for i in range(len(sensor_fusion)):
vx = sensor_fusion[i][3]
vy = sensor_fusion[i][4]
s = sensor_fusion[i][5]
d = sensor_fusion[i][6]
check_speed = sqrt(vx * vx + vy * vy)
car_lane = int(floor(d / 4))
if 0 <= car_lane < 3:
s_dis = s - car_s
if -36 < s_dis < 66:
pers = - int(floor(s_dis / 2.0)) + 30
grid[pers:pers + 4, car_lane] = - check_speed / 100.0 * 2.237
state = np.zeros((state_height, state_width))
state[:, :] = grid[3:48, :]
state = np.reshape(state, [-1, 1, state_height, state_width])
pos = [car_speed / 50, 0, 0]
if ego_car_lane == 0:
pos = [car_speed / 50, 0, 1]
elif ego_car_lane == 1:
pos = [car_speed / 50, 1, 1]
elif ego_car_lane == 2:
pos = [car_speed / 50, 1, 0]
pos = np.reshape(pos, [1, 3])
# print(state)
action = 0
mess_out = str(action)
mess_out = str.encode(mess_out)
conn.sendall(mess_out)
count = 0
start = time.time()
while True:
# now = time.time()
# if (now - start) / 60 > 15:
# close_all(sim)
# break
try:
data = conn.recv(2000) # 把接收的数据实例化
except Exception as e:
pass
while not data:
try:
data = conn.recv(2000) # 把接收的数据实例化
except Exception as e:
pass
data = bytes.decode(data)
if data == "over":
agent.save("episode" + str(episode) + ".h5")
print("weight saved")
print("episode:{}, epsilon:{}".format(episode, agent.epsilon))
with open('.train.txt', 'a') as f:
f.write(" episode {} epsilon {}\n".format(episode, agent.epsilon))
close_all(sim)
conn.close() # 关闭连接
with open('exp1.pkl', 'wb') as exp1:
pickle.dump(agent.memory1, exp1)
with open('exp2.pkl', 'wb') as exp2:
pickle.dump(agent.memory2, exp2)
episode = episode + 1
if episode == 41:
agent.epsilon_min = 0.10
if episode == 71:
agent.epsilon_min = 0.03
if episode == 6:
agent.epsilon_decay = 0.99985
break
try:
j = json.loads(data)
except Exception as e:
close_all(sim)
break
last_state = state
last_pos = pos
last_act = action
last_lane = ego_car_lane
# Main car's localization Data
# car_x = j[1]['x']
# car_y = j[1]['y']
car_s = j[1]['s']
car_d = j[1]['d']
car_yaw = j[1]['yaw']
car_speed = j[1]['speed']
print(car_s)
if car_speed == 0:
mess_out = str(0)
mess_out = str.encode(mess_out)
conn.sendall(mess_out)
continue
# Sensor Fusion Data, a list of all other cars on the same side of the road.
sensor_fusion = j[1]['sensor_fusion']
ego_car_lane = int(floor(car_d / 4))
if last_act == 0:
last_reward = (2 * ((j[3] - 25.0) / 5.0)) # - abs(ego_car_lane - 1))
else:
last_reward = (2 * ((j[3] - 25.0) / 5.0)) - 10.0
if grid[3:31, last_lane].sum() > 27 and last_act != 0:
last_reward = -30.0
grid = np.ones((51, 3))
grid[31:35, ego_car_lane] = car_speed / 100.0
# sensor_fusion_array = np.array(sensor_fusion)
for i in range(len(sensor_fusion)):
vx = sensor_fusion[i][3]
vy = sensor_fusion[i][4]
s = sensor_fusion[i][5]
d = sensor_fusion[i][6]
check_speed = sqrt(vx * vx + vy * vy)
car_lane = int(floor(d / 4))
if 0 <= car_lane < 3:
s_dis = s - car_s
if -36 < s_dis < 66:
pers = - int(floor(s_dis / 2.0)) + 30
grid[pers:pers + 4, car_lane] = - check_speed / 100.0 * 2.237
if j[2] < -10:
last_reward = float(j[2]) # reward -50, -100
last_reward = last_reward / 10.0
state = np.zeros((state_height, state_width))
state[:, :] = grid[3:48, :]
state = np.reshape(state, [-1, 1, state_height, state_width])
# print(state)
pos = [car_speed / 50, 0, 0]
if ego_car_lane == 0:
pos = [car_speed / 50, 0, 1]
elif ego_car_lane == 1:
pos = [car_speed / 50, 1, 1]
elif ego_car_lane == 2:
pos = [car_speed / 50, 1, 0]
pos = np.reshape(pos, [1, 3])
# print(state)
print("last_action:{}, last_reward:{:.4}, speed:{:.3}".format(last_act, last_reward, float(car_speed)))
if last_act != 0:
agent.remember1([last_state, last_pos], last_act, last_reward, [state, pos])
else:
agent.remember2([last_state, last_pos], last_act, last_reward, [state, pos])
action = agent.act([state, pos])
count += 1
if count == 10:
agent.update_target_model()
print("target model updated")
count = 0
# action = agent.act(state)
if len(agent.memory1) > batch_size and len(agent.memory2) > batch_size:
agent.replay(batch_size)
mess_out = str(action)
mess_out = str.encode(mess_out)
conn.sendall(mess_out)
| 34.41349 | 111 | 0.577929 |
acec28f49c3dabb3cda4bd1f4e1a58d47cbc2c9d | 614 | py | Python | Examples/07_ColorTerm.py | JoelStienlet/g3logPython | eacb1fb47e575b3af815079bb6f0d26d863c3e2b | [
"Unlicense"
] | 4 | 2020-02-29T09:37:12.000Z | 2021-05-21T23:01:03.000Z | Examples/07_ColorTerm.py | JoelStienlet/g3logPython | eacb1fb47e575b3af815079bb6f0d26d863c3e2b | [
"Unlicense"
] | null | null | null | Examples/07_ColorTerm.py | JoelStienlet/g3logPython | eacb1fb47e575b3af815079bb6f0d26d863c3e2b | [
"Unlicense"
] | 2 | 2020-03-07T18:27:47.000Z | 2020-04-07T08:14:47.000Z | #!/usr/bin/env python3
#
# full color terminal example
#
import g3logPython as log
import time
logger = log.get_ifaceLogWorker(False)
colorTermSink = logger.ClrTermSinks.new_Sink("color term")
log.debug("hello world! from python")
# this is required to make sure the text was sent for display before muting:
time.sleep(1)
future = colorTermSink.mute()
future.join()
log.debug("This is not displayed in the terminal")
# this is required to prevent the sink to be unmuted before the text in sent for display:
time.sleep(1)
future = colorTermSink.unmute()
future.join()
log.debug("this in displayed again")
| 21.928571 | 90 | 0.757329 |
acec291592cba295811c8f8fa9df089571df5c47 | 27,496 | py | Python | tests/test_cli.py | buildvoc/Annif | 0ed8c02c0ea9577caba530f5952a70f0b45c21f0 | [
"Apache-2.0"
] | null | null | null | tests/test_cli.py | buildvoc/Annif | 0ed8c02c0ea9577caba530f5952a70f0b45c21f0 | [
"Apache-2.0"
] | null | null | null | tests/test_cli.py | buildvoc/Annif | 0ed8c02c0ea9577caba530f5952a70f0b45c21f0 | [
"Apache-2.0"
] | null | null | null | """Unit test module for Annif CLI commands"""
import contextlib
import random
import re
import os.path
import pytest
from click.testing import CliRunner
import annif.cli
runner = CliRunner(env={'ANNIF_CONFIG': 'annif.default_config.TestingConfig'})
# Generate a random project name to use in tests
TEMP_PROJECT = ''.join(
random.choice('abcdefghiklmnopqrstuvwxyz') for _ in range(8))
PROJECTS_FILE_OPTION = 'tests/projects_for_config_path_option.cfg'
def test_list_projects():
result = runner.invoke(annif.cli.cli, ["list-projects"])
assert not result.exception
assert result.exit_code == 0
# public project should be visible
assert 'dummy-fi' in result.output
# hidden project should be visible
assert 'dummy-en' in result.output
# private project should be visible
assert 'dummydummy' in result.output
# project with no access setting should be visible
assert 'ensemble' in result.output
def test_list_projects_bad_arguments():
# The listprojects function does not accept any arguments, it should fail
# if such are provided.
assert runner.invoke(
annif.cli.cli, [
'list-projects', 'moi']).exit_code != 0
assert runner.invoke(
annif.cli.run_list_projects, ['moi', '--debug', 'y']).exit_code != 0
def test_list_projects_config_path_option():
result = runner.invoke(
annif.cli.cli, ["list-projects", "--projects", PROJECTS_FILE_OPTION])
assert not result.exception
assert result.exit_code == 0
assert 'dummy_for_projects_option' in result.output
assert 'dummy-fi' not in result.output
assert 'dummy-en' not in result.output
def test_list_projects_config_path_option_nonexistent():
failed_result = runner.invoke(
annif.cli.cli, ["list-projects", "--projects", "nonexistent.cfg"])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Error: Invalid value for '-p' / '--projects': " \
"File 'nonexistent.cfg' does not exist." in failed_result.output
def test_show_project():
result = runner.invoke(annif.cli.cli, ['show-project', 'dummy-en'])
assert not result.exception
project_id = re.search(r'Project ID:\s+(.+)', result.output)
assert project_id.group(1) == 'dummy-en'
project_name = re.search(r'Project Name:\s+(.+)', result.output)
assert project_name.group(1) == 'Dummy English'
project_lang = re.search(r'Language:\s+(.+)', result.output)
assert project_lang.group(1) == 'en'
access = re.search(r'Access:\s+(.+)', result.output)
assert access.group(1) == 'hidden'
is_trained = re.search(r'Trained:\s+(.+)', result.output)
assert is_trained.group(1) == 'True'
modification_time = re.search(r'Modification time:\s+(.+)', result.output)
assert modification_time.group(1) == 'None'
def test_show_project_nonexistent():
assert runner.invoke(
annif.cli.cli,
['show-project', TEMP_PROJECT]).exit_code != 0
# Test should not fail even if the user queries for a non-existent project.
failed_result = runner.invoke(
annif.cli.cli, [
'show-project', 'nonexistent'])
assert failed_result.exception
def test_clear_project(testdatadir):
dirpath = os.path.join(str(testdatadir), 'projects', 'dummy-fi')
fpath = os.path.join(str(dirpath), 'test_clear_project_datafile')
os.makedirs(dirpath)
open(fpath, 'a').close()
assert runner.invoke(
annif.cli.cli,
['clear', 'dummy-fi']).exit_code == 0
assert not os.path.isdir(dirpath)
def test_clear_project_nonexistent_data(testdatadir, caplog):
logger = annif.logger
logger.propagate = True
runner.invoke(
annif.cli.cli,
['clear', 'dummy-fi']).exit_code != 0
assert len(caplog.records) == 1
expected_msg = 'No model data to remove for project dummy-fi.'
assert expected_msg == caplog.records[0].message
def test_loadvoc_tsv(testdatadir):
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects')))
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects.ttl')))
subjectfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'subjects.tsv')
result = runner.invoke(annif.cli.cli, ['loadvoc', 'tfidf-fi', subjectfile])
assert not result.exception
assert result.exit_code == 0
assert testdatadir.join('vocabs/yso-fi/subjects').exists()
assert testdatadir.join('vocabs/yso-fi/subjects').size() > 0
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').exists()
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').size() > 0
def test_loadvoc_tsv_with_bom(testdatadir):
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects')))
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects.ttl')))
subjectfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'subjects-bom.tsv')
result = runner.invoke(annif.cli.cli, ['loadvoc', 'tfidf-fi', subjectfile])
assert not result.exception
assert result.exit_code == 0
assert testdatadir.join('vocabs/yso-fi/subjects').exists()
assert testdatadir.join('vocabs/yso-fi/subjects').size() > 0
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').exists()
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').size() > 0
def test_loadvoc_rdf(testdatadir):
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects')))
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects.ttl')))
subjectfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'yso-archaeology.rdf')
result = runner.invoke(annif.cli.cli, ['loadvoc', 'tfidf-fi', subjectfile])
assert not result.exception
assert result.exit_code == 0
assert testdatadir.join('vocabs/yso-fi/subjects').exists()
assert testdatadir.join('vocabs/yso-fi/subjects').size() > 0
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').exists()
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').size() > 0
def test_loadvoc_ttl(testdatadir):
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects')))
with contextlib.suppress(FileNotFoundError):
os.remove(str(testdatadir.join('projects/tfidf-fi/subjects.ttl')))
subjectfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'yso-archaeology.ttl')
result = runner.invoke(annif.cli.cli, ['loadvoc', 'tfidf-fi', subjectfile])
assert not result.exception
assert result.exit_code == 0
assert testdatadir.join('vocabs/yso-fi/subjects').exists()
assert testdatadir.join('vocabs/yso-fi/subjects').size() > 0
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').exists()
assert testdatadir.join('vocabs/yso-fi/subjects.ttl').size() > 0
def test_loadvoc_nonexistent_path():
failed_result = runner.invoke(
annif.cli.cli, [
'loadvoc', 'dummy-fi', 'nonexistent_path'])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Invalid value for 'SUBJECTFILE': " \
"File 'nonexistent_path' does not exist." in failed_result.output
def test_train(testdatadir):
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(annif.cli.cli, ['train', 'tfidf-fi', docfile])
assert not result.exception
assert result.exit_code == 0
assert testdatadir.join('projects/tfidf-fi/vectorizer').exists()
assert testdatadir.join('projects/tfidf-fi/vectorizer').size() > 0
assert testdatadir.join('projects/tfidf-fi/tfidf-index').exists()
assert testdatadir.join('projects/tfidf-fi/tfidf-index').size() > 0
def test_train_multiple(testdatadir):
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(annif.cli.cli,
['train', 'tfidf-fi', docfile, docfile])
assert not result.exception
assert result.exit_code == 0
assert testdatadir.join('projects/tfidf-fi/vectorizer').exists()
assert testdatadir.join('projects/tfidf-fi/vectorizer').size() > 0
assert testdatadir.join('projects/tfidf-fi/tfidf-index').exists()
assert testdatadir.join('projects/tfidf-fi/tfidf-index').size() > 0
def test_train_cached(testdatadir):
result = runner.invoke(annif.cli.cli,
['train', '--cached', 'tfidf-fi'])
assert result.exception
assert result.exit_code == 1
assert 'Training tfidf project from cached data not supported.' \
in result.output
def test_train_cached_with_corpus(testdatadir):
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(annif.cli.cli,
['train', '--cached', 'tfidf-fi', docfile])
assert result.exception
assert result.exit_code == 2
assert 'Corpus paths cannot be given when using --cached option.' \
in result.output
def test_train_param_override_algo_notsupported():
pytest.importorskip('annif.backend.vw_multi')
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(
annif.cli.cli,
['train', 'vw-multi-fi', docfile,
'--backend-param', 'vw_multi.algorithm=oaa'])
assert result.exception
assert result.exit_code == 1
assert 'Algorithm overriding not supported.' in result.output
def test_train_nonexistent_path():
failed_result = runner.invoke(
annif.cli.cli, [
'train', 'dummy-fi', 'nonexistent_path'])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Invalid value for '[PATHS]...': " \
"Path 'nonexistent_path' does not exist." in failed_result.output
def test_train_no_path(caplog):
logger = annif.logger
logger.propagate = True
result = runner.invoke(
annif.cli.cli, [
'train', 'dummy-fi'])
assert not result.exception
assert result.exit_code == 0
assert 'Reading empty file' == caplog.records[0].message
def test_learn(testdatadir):
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(annif.cli.cli, ['learn', 'dummy-fi', docfile])
assert not result.exception
assert result.exit_code == 0
def test_learn_notsupported(testdatadir):
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(annif.cli.cli, ['learn', 'tfidf-fi', docfile])
assert result.exit_code != 0
assert 'Learning not supported' in result.output
def test_learn_nonexistent_path():
failed_result = runner.invoke(
annif.cli.cli, [
'learn', 'dummy-fi', 'nonexistent_path'])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Invalid value for '[PATHS]...': " \
"Path 'nonexistent_path' does not exist." in failed_result.output
def test_suggest():
result = runner.invoke(
annif.cli.cli,
['suggest', 'dummy-fi'],
input='kissa')
assert not result.exception
assert result.output == "<http://example.org/dummy>\tdummy\t1.0\n"
assert result.exit_code == 0
def test_suggest_with_notations():
result = runner.invoke(
annif.cli.cli,
['suggest', '--backend-param', 'dummy.notation=42.42', 'dummy-fi'],
input='kissa')
assert not result.exception
assert result.output == "<http://example.org/dummy>\tdummy\t42.42\t1.0\n"
assert result.exit_code == 0
def test_suggest_nonexistent():
result = runner.invoke(
annif.cli.cli,
['suggest', TEMP_PROJECT],
input='kissa')
assert result.exception
assert result.output == "No projects found with id '{}'.\n".format(
TEMP_PROJECT)
assert result.exit_code != 0
def test_suggest_param():
result = runner.invoke(
annif.cli.cli,
['suggest', '--backend-param', 'dummy.score=0.8', 'dummy-fi'],
input='kissa')
assert not result.exception
assert result.output == "<http://example.org/dummy>\tdummy\t0.8\n"
assert result.exit_code == 0
def test_suggest_param_backend_nonexistent():
result = runner.invoke(
annif.cli.cli,
['suggest', '--backend-param', 'not_a_backend.score=0.8', 'dummy-fi'],
input='kissa')
assert result.exception
assert 'The backend not_a_backend in CLI option ' + \
'"-b not_a_backend.score=0.8" not matching the project backend ' + \
'dummy.' in result.output
assert result.exit_code != 0
def test_suggest_ensemble():
result = runner.invoke(
annif.cli.cli,
['suggest', 'ensemble'],
input='the cat sat on the mat')
assert not result.exception
assert result.output == "<http://example.org/dummy>\tdummy\t1.0\n"
assert result.exit_code == 0
def test_index(tmpdir):
tmpdir.join('doc1.txt').write('nothing special')
result = runner.invoke(
annif.cli.cli, ['index', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
assert tmpdir.join('doc1.annif').exists()
assert tmpdir.join('doc1.annif').read_text(
'utf-8') == "<http://example.org/dummy>\tdummy\t1.0\n"
# make sure that preexisting subject files are not overwritten
result = runner.invoke(
annif.cli.cli, ['index', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
assert "Not overwriting" in result.output
# check that the --force parameter forces overwriting
result = runner.invoke(
annif.cli.cli, ['index', 'dummy-fi', '--force', str(tmpdir)])
assert tmpdir.join('doc1.annif').exists()
assert "Not overwriting" not in result.output
assert tmpdir.join('doc1.annif').read_text(
'utf-8') == "<http://example.org/dummy>\tdummy\t1.0\n"
def test_index_nonexistent_path():
failed_result = runner.invoke(
annif.cli.cli, [
'index', 'dummy-fi', 'nonexistent_path'])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Invalid value for 'DIRECTORY': " \
"Directory 'nonexistent_path' does not exist." \
in failed_result.output
def test_eval_label(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
result = runner.invoke(annif.cli.cli, ['eval', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
precision = re.search(r'Precision .*doc.*:\s+(\d.\d+)', result.output)
assert float(precision.group(1)) == 0.5
recall = re.search(r'Recall .*doc.*:\s+(\d.\d+)', result.output)
assert float(recall.group(1)) == 0.5
f_measure = re.search(r'F1 score .*doc.*:\s+(\d.\d+)', result.output)
assert float(f_measure.group(1)) == 0.5
precision1 = re.search(r'Precision@1:\s+(\d.\d+)', result.output)
assert float(precision1.group(1)) == 0.5
precision3 = re.search(r'Precision@3:\s+(\d.\d+)', result.output)
assert float(precision3.group(1)) == 0.5
precision5 = re.search(r'Precision@5:\s+(\d.\d+)', result.output)
assert float(precision5.group(1)) == 0.5
lrap = re.search(r'LRAP:\s+(\d.\d+)', result.output)
assert float(lrap.group(1)) == 0.75
true_positives = re.search(r'True positives:\s+(\d+)', result.output)
assert int(true_positives.group(1)) == 1
false_positives = re.search(r'False positives:\s+(\d+)', result.output)
assert int(false_positives.group(1)) == 1
false_negatives = re.search(r'False negatives:\s+(\d+)', result.output)
assert int(false_negatives.group(1)) == 1
ndocs = re.search(r'Documents evaluated:\s+(\d+)', result.output)
assert int(ndocs.group(1)) == 2
def test_eval_uri(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write("<http://example.org/dummy>\tdummy\n")
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write("<http://example.org/none>\tnone\n")
tmpdir.join('doc3.txt').write('doc3')
result = runner.invoke(annif.cli.cli, ['eval', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
precision = re.search(r'Precision .*doc.*:\s+(\d.\d+)', result.output)
assert float(precision.group(1)) == 0.5
recall = re.search(r'Recall .*doc.*:\s+(\d.\d+)', result.output)
assert float(recall.group(1)) == 0.5
f_measure = re.search(r'F1 score .*doc.*:\s+(\d.\d+)', result.output)
assert float(f_measure.group(1)) == 0.5
precision1 = re.search(r'Precision@1:\s+(\d.\d+)', result.output)
assert float(precision1.group(1)) == 0.5
precision3 = re.search(r'Precision@3:\s+(\d.\d+)', result.output)
assert float(precision3.group(1)) == 0.5
precision5 = re.search(r'Precision@5:\s+(\d.\d+)', result.output)
assert float(precision5.group(1)) == 0.5
lrap = re.search(r'LRAP:\s+(\d.\d+)', result.output)
assert float(lrap.group(1)) == 0.75
true_positives = re.search(r'True positives:\s+(\d+)', result.output)
assert int(true_positives.group(1)) == 1
false_positives = re.search(r'False positives:\s+(\d+)', result.output)
assert int(false_positives.group(1)) == 1
false_negatives = re.search(r'False negatives:\s+(\d+)', result.output)
assert int(false_negatives.group(1)) == 1
ndocs = re.search(r'Documents evaluated:\s+(\d+)', result.output)
assert int(ndocs.group(1)) == 2
def test_eval_param(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
result = runner.invoke(
annif.cli.cli, [
'eval', '--backend-param', 'dummy.score=0.0', 'dummy-en',
str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
# since zero scores were set with the parameter, there should be no hits
# at all
recall = re.search(r'Recall .*doc.*:\s+(\d.\d+)', result.output)
assert float(recall.group(1)) == 0.0
def test_eval_resultsfile(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
resultfile = tmpdir.join('results.tsv')
result = runner.invoke(
annif.cli.cli, [
'eval', '--results-file', str(resultfile), 'dummy-en',
str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
# subject average should equal average of all subject scores in outputfile
precision = float(
re.search(r'Precision .*subj.*:\s+(\d.\d+)', result.output).group(1))
recall = float(
re.search(r'Recall .*subj.*:\s+(\d.\d+)', result.output).group(1))
f_measure = float(
re.search(r'F1 score .*subj.*:\s+(\d.\d+)', result.output).group(1))
precision_numerator = 0
recall_numerator = 0
f_measure_numerator = 0
denominator = 0
with resultfile.open() as f:
header = next(f)
assert header.strip('\n') == '\t'.join(['URI',
'Label',
'Support',
'True_positives',
'False_positives',
'False_negatives',
'Precision',
'Recall',
'F1_score'])
for line in f:
assert line.strip() != ''
parts = line.split('\t')
if parts[1] == 'dummy':
assert int(parts[2]) == 1
assert int(parts[3]) == 1
assert int(parts[4]) == 1
assert int(parts[5]) == 0
if parts[1] == 'none':
assert int(parts[2]) == 1
assert int(parts[3]) == 0
assert int(parts[4]) == 0
assert int(parts[5]) == 1
precision_numerator += float(parts[6])
recall_numerator += float(parts[7])
f_measure_numerator += float(parts[8])
denominator += 1
assert precision_numerator / denominator == precision
assert recall_numerator / denominator == recall
assert f_measure_numerator / denominator == f_measure
def test_eval_badresultsfile(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
failed_result = runner.invoke(
annif.cli.cli, [
'eval', '--results-file', 'newdir/test_file.txt', 'dummy-en',
str(tmpdir)])
assert failed_result.exception
assert failed_result.exit_code != 0
assert 'cannot open results-file for writing' in failed_result.output
def test_eval_docfile():
docfile = os.path.join(
os.path.dirname(__file__),
'corpora',
'archaeology',
'documents.tsv')
result = runner.invoke(annif.cli.cli, ['eval', 'dummy-fi', docfile])
assert not result.exception
assert result.exit_code == 0
def test_eval_empty_file(tmpdir):
empty_file = tmpdir.ensure('empty.tsv')
failed_result = runner.invoke(
annif.cli.cli, [
'eval', 'dummy-fi', str(empty_file)])
assert failed_result.exception
assert failed_result.exit_code != 0
assert 'cannot evaluate empty corpus' in failed_result.output
def test_eval_nonexistent_path():
failed_result = runner.invoke(
annif.cli.cli, [
'eval', 'dummy-fi', 'nonexistent_path'])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Invalid value for '[PATHS]...': " \
"Path 'nonexistent_path' does not exist." in failed_result.output
def test_eval_single_process(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
result = runner.invoke(
annif.cli.cli, ['eval', '--jobs', '1', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
def test_eval_two_jobs(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
result = runner.invoke(
annif.cli.cli, ['eval', '--jobs', '2', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
def test_optimize_dir(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
tmpdir.join('doc3.txt').write('doc3')
result = runner.invoke(
annif.cli.cli, [
'optimize', 'dummy-en', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
precision = re.search(r'Best\s+Precision .*?doc.*?:\s+(\d.\d+)',
result.output)
assert float(precision.group(1)) == 0.5
recall = re.search(r'Best\s+Recall .*?doc.*?:\s+(\d.\d+)', result.output)
assert float(recall.group(1)) == 0.5
f_measure = re.search(r'Best\s+F1 score .*?doc.*?:\s+(\d.\d+)',
result.output)
assert float(f_measure.group(1)) == 0.5
ndocs = re.search(r'Documents evaluated:\s+(\d)', result.output)
assert int(ndocs.group(1)) == 2
def test_optimize_docfile(tmpdir):
docfile = tmpdir.join('documents.tsv')
docfile.write("""Läntinen\t<http://www.yso.fi/onto/yso/p2557>
Oulunlinnan\t<http://www.yso.fi/onto/yso/p7346>
Harald Hirmuinen\t<http://www.yso.fi/onto/yso/p6479>""")
result = runner.invoke(
annif.cli.cli, [
'optimize', 'dummy-fi', str(docfile)])
assert not result.exception
assert result.exit_code == 0
def test_optimize_nonexistent_path():
failed_result = runner.invoke(
annif.cli.cli, [
'optimize', 'dummy-fi', 'nonexistent_path'])
assert failed_result.exception
assert failed_result.exit_code != 0
assert "Invalid value for '[PATHS]...': " \
"Path 'nonexistent_path' does not exist." in failed_result.output
def test_hyperopt_ensemble(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
result = runner.invoke(
annif.cli.cli, [
'hyperopt', 'ensemble', str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
assert re.search(
r'sources=dummy-en:0.\d+,dummydummy:0.\d+',
result.output) is not None
def test_hyperopt_ensemble_resultsfile(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
resultfile = tmpdir.join('results.tsv')
result = runner.invoke(
annif.cli.cli, [
'hyperopt', '--results-file', str(resultfile), 'ensemble',
str(tmpdir)])
assert not result.exception
assert result.exit_code == 0
with resultfile.open() as f:
header = next(f)
assert header.strip('\n') == '\t'.join(['trial',
'value',
'dummy-en',
'dummydummy'])
for idx, line in enumerate(f):
assert line.strip() != ''
parts = line.split('\t')
assert len(parts) == 4
assert int(parts[0]) == idx
def test_hyperopt_not_supported(tmpdir):
tmpdir.join('doc1.txt').write('doc1')
tmpdir.join('doc1.key').write('dummy')
tmpdir.join('doc2.txt').write('doc2')
tmpdir.join('doc2.key').write('none')
failed_result = runner.invoke(
annif.cli.cli, [
'hyperopt', 'tfidf-en', str(tmpdir)])
assert failed_result.exception
assert failed_result.exit_code != 0
assert 'Hyperparameter optimization not supported' in failed_result.output
| 36.661333 | 79 | 0.631583 |
acec299144ff63db648cd6425dba704391ea831b | 1,033 | py | Python | commands/files.py | wikiq/routeros-scanner | f50a9e9e692378e58bb6ce7a243f82f50255b96f | [
"MIT"
] | 693 | 2022-03-16T02:20:18.000Z | 2022-03-31T20:29:21.000Z | commands/files.py | wikiq/routeros-scanner | f50a9e9e692378e58bb6ce7a243f82f50255b96f | [
"MIT"
] | 20 | 2022-03-18T02:21:20.000Z | 2022-03-29T13:34:23.000Z | commands/files.py | wikiq/routeros-scanner | f50a9e9e692378e58bb6ce7a243f82f50255b96f | [
"MIT"
] | 102 | 2022-03-11T05:59:04.000Z | 2022-03-31T14:49:30.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import traceback
import sys
from commands.basecommand import BaseCommand
class Files(BaseCommand):
def __init__(self):
self.__name__ = 'Files'
def run_ssh(self, sshc):
res = self._ssh_data_with_header(sshc, '/file print detail')
sus_dns, recommendation = self.check_results_ssh(res)
return {'raw_data': res,
'suspicious': sus_dns,
'recommendation': recommendation}
def check_results_ssh(self, res):
sus_files = []
recommendation = []
try:
for item in res:
if 'contents' in item:
if ('/tool fetch' in item['contents']) or ('http://' in item['contents']):
sus_files.append(f'File name: {item["name"]}, content: {item["contents"]} - severity: high')
except Exception:
print(traceback.format_exc(), file = sys.stderr)
return sus_files, recommendation
| 26.487179 | 116 | 0.595353 |
acec2a2438dc2dc563eb13c66cd5e808286e62e1 | 22,561 | py | Python | tools/SDKTool/src/ui/tree/applications_tree/ui_explore_tree/train_sample.py | Passer-D/GameAISDK | a089330a30b7bfe1f6442258a12d8c0086240606 | [
"Apache-2.0"
] | 1,210 | 2020-08-18T07:57:36.000Z | 2022-03-31T15:06:05.000Z | tools/SDKTool/src/ui/tree/applications_tree/ui_explore_tree/train_sample.py | guokaiSama/GameAISDK | a089330a30b7bfe1f6442258a12d8c0086240606 | [
"Apache-2.0"
] | 37 | 2020-08-24T02:48:38.000Z | 2022-01-30T06:41:52.000Z | tools/SDKTool/src/ui/tree/applications_tree/ui_explore_tree/train_sample.py | guokaiSama/GameAISDK | a089330a30b7bfe1f6442258a12d8c0086240606 | [
"Apache-2.0"
] | 275 | 2020-08-18T08:35:16.000Z | 2022-03-31T15:06:07.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making GameAISDK available.
This source code file is licensed under the GNU General Public License Version 3.
For full details, please refer to the file "LICENSE.txt" which is provided as part of this source code package.
Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved.
"""
import os
import sys
import time
import shutil
import logging
import threading
import json
import matplotlib.pyplot as plot
import zmq
from PyQt5.QtWidgets import QApplication
from ....canvas.ui_canvas import canvas
from ....canvas.canvas_signal import canvas_signal_inst
from ....dialog.tip_dialog import show_warning_tips, show_message_tips
from ....tree.applications_tree.ui_explore_tree.json_to_refine_det_txt import json_to_refine_det_txt
from ....utils import del_files, get_files_count, set_log_text
from .....subprocess_service.subprocess_service_manager import backend_service_manager as bsa
from .....subprocess_service.process_timer import ProcessTimer
from .....project.project_manager import g_project_manager
from .....config_manager.ui_auto_explore.ui_explore_api import explore_train_get_params
from .define import TRAIN_PARAMS
platform = sys.platform
BASE_TRAIN_EPOCH = 55
_test_dataset_file = "test_dataset.txt"
_train_dataset_file = "train_dataset.txt"
REFINE_HC2NET_DATA_PTH = "Final_Refine_hc2net_version3_self_dataset.pth"
class TrainLogParam(object):
def __init__(self):
self.loss = []
self.sum_loss = 0
self.exit_paint = False
self.exit_recv = False
self.epoch = 0
self.epoch_size = 0
self.step = 0
self.batch_size = 0
self.current_epoch = 0
self.current_step = 0
self.notify_num = 0
self.total_time = 0
def set_exit_paint_log(self, flag):
self.exit_paint = flag
def set_exit_recv_log(self, flag):
self.exit_recv = flag
def set_exit(self, flag):
self.exit_paint = flag
self.exit_recv = flag
def is_valid(self):
return not self.exit_recv
class CNNTrainSample(object):
TRAIN_SAMPLE_SERVICE_NAME = 'train_sample'
ANALYZE_SERVICE_NAME = 'analyze_result'
def __init__(self, sample_path):
self.__train_sample_path = sample_path
self.__logger = logging.getLogger('sdktool')
self.__canvas = canvas
# self.__ui = ui
ai_sdk_path = os.environ.get('AI_SDK_PATH')
self.__refine_det_path = os.path.join(ai_sdk_path, "Modules", "RefineDet")
self.__run_result_path = None
self.__pre_dir = None
self.__grd_dir = None
self.__results_dir = None
self.__map_path = None
self.__weight_file = \
os.path.join(self.__refine_det_path,
"weights/Refine_hc2net_version3_320/model/Final_Refine_hc2net_version3_self_dataset.pth")
# weight_file 由文件目录和文件名称组成。
# 文件目录: --save_folder('weights'), --version('Refine_hc2net_version3') + '_' + --size(320), --date('model')
# 文件名: 'Final_' + --version('Refine_hc2net_version3') + '_' + --dataset(self_dataset) + '.pth'
self.__train_param_dict = {}
self.__paint_log_thread = None
self.__recvLogThread = None
self.__mapThread = None
self.__lock = threading.Lock()
self.__train_log_param = None
self.__process_running = False
self.__socket = None
def __init_test_map(self):
""" 初始化test_map目录
:return:
"""
self.__run_result_path = os.path.join(g_project_manager.get_project_path(), "data", "run_result", "test_map")
self.__pre_dir = os.path.join(self.__run_result_path, "pre")
if not os.path.exists(self.__pre_dir):
os.makedirs(self.__pre_dir)
self.__grd_dir = os.path.join(self.__run_result_path, "grd")
if not os.path.exists(self.__grd_dir):
os.makedirs(self.__grd_dir)
self.__results_dir = os.path.join(self.__run_result_path, "results")
if not os.path.exists(self.__results_dir):
os.makedirs(self.__results_dir)
self.__map_path = os.path.join(self.__results_dir, "mAP.jpg")
def set_sample_path(self, path):
self.__train_sample_path = path
def get_sample_path(self):
return self.__train_sample_path
def _clear_files(self):
if self.__pre_dir and os.path.isdir(self.__pre_dir):
del_files(self.__pre_dir)
if self.__grd_dir and os.path.isdir(self.__grd_dir):
del_files(self.__grd_dir)
if self.__results_dir and os.path.isdir(self.__results_dir):
del_files(self.__results_dir)
if self.__map_path and os.path.exists(self.__map_path):
os.remove(self.__map_path)
def _pre_load_model(self):
src_test_data_file = self._get_dataset_path(_test_dataset_file)
dst_model_file = self._get_model_path()
if not os.path.exists(dst_model_file) or not os.path.exists(src_test_data_file):
self.__logger.error("weight file or test data not exist, model_file: {}, "
"test_data_file: {}".format(dst_model_file, _test_dataset_file))
return False
else:
shutil.copyfile(dst_model_file, self.__weight_file)
return True
def _get_model_path(self):
model_file = os.path.join(g_project_manager.get_project_path(), "data", REFINE_HC2NET_DATA_PTH)
return model_file
def _get_dataset_path(self, dataset_file):
"""
:param dataset_file: train dataset or test dataset
:return:
"""
dataset_path = os.path.join(g_project_manager.get_project_path(), "data", dataset_file)
return dataset_path
def run(self):
if not bsa.exist_service(service_name=self.TRAIN_SAMPLE_SERVICE_NAME):
self.__logger.info("start train")
self.__init_test_map()
self._clear_files()
is_ok, desc = self._train_sample()
if is_ok:
self._paint_train_log()
else:
show_warning_tips(desc)
else:
show_warning_tips('正在训练中,请先停止训练后再启动')
def _train_sample(self):
if not self.__train_sample_path or not os.path.exists(self.__train_sample_path):
raise ValueError('train sample path(%s) is not valid' % self.__train_sample_path)
image_count = get_files_count(self.__train_sample_path)
if image_count <= 0:
text = "failed, no image in {}, please check".format(self.__train_sample_path)
show_warning_tips(text)
return
current_path = os.getcwd()
if not os.path.exists(self.__refine_det_path):
raise Exception("refineDetPath: {} is not exist".format(self.__refine_det_path))
# 训练前,先删除上次训练结果文件
if os.path.exists(self.__weight_file):
os.remove(self.__weight_file)
canvas_signal_inst.reset_state()
self.__train_log_param = TrainLogParam()
os.chdir(self.__refine_det_path)
time.sleep(1)
train_param_dict = explore_train_get_params().copy()
for k, v in TRAIN_PARAMS.items():
if k not in train_param_dict:
train_param_dict[k] = v
self.__train_param_dict = train_param_dict
run_program_param_keys = ['batch_size', 'num_workers']
run_program_params = dict()
for param_key, param_value in train_param_dict.items():
if param_key in run_program_param_keys:
run_program_params[param_key] = param_value
is_debug = bool(train_param_dict.get("is_debug", False))
train_dataset_path = self._get_dataset_path(_train_dataset_file)
test_dataset_path = self._get_dataset_path(_test_dataset_file)
json_to_refine_det_txt(self.__train_sample_path, train_dataset_path, test_dataset_path, is_debug)
max_epoch = train_param_dict.get("微调次数", 5)
max_epoch = int(max_epoch) + BASE_TRAIN_EPOCH
run_program = "python train_val.py --max_epoch {} --train_label_list {}".format(max_epoch, train_dataset_path)
for k, v in run_program_params.items():
run_program = " {} --{} {}".format(run_program, k, str(v))
self.__logger.info(run_program)
self.__process_running = True
is_ok, desc = bsa.start_service(service_name=self.TRAIN_SAMPLE_SERVICE_NAME,
run_programs=run_program,
process_param_type=bsa.SUBPROCESS_SHELL_TYPE,
callback_func=self._process_monitor_callback)
if not is_ok:
self.__logger.error("start train sample failed: %s", desc)
else:
self.__logger.info('start train sample success '
'pid: %s', bsa.get_pids(service_name=self.TRAIN_SAMPLE_SERVICE_NAME))
time.sleep(1)
os.chdir(current_path)
return is_ok, desc
def _save_weight(self):
src_test_data_file = self._get_dataset_path(_test_dataset_file)
if not os.path.exists(self.__weight_file) or not os.path.exists(src_test_data_file):
raise Exception("weight file or test data not exist, weight_file: {}, "
"test_data_file: {}".format(self.__weight_file, src_test_data_file))
else:
dst_model_file = self._get_model_path()
shutil.copyfile(self.__weight_file, dst_model_file)
def __build_socket(self):
self.__delete_socket()
context = zmq.Context()
self.__socket = context.socket(zmq.PULL)
self.__socket.bind("tcp://*:5558")
def __delete_socket(self):
try:
if self.__socket:
self.__socket.close()
except BlockingIOError:
self.__logger.error('failed to close socket')
finally:
self.__socket = None
def _paint_train_log(self):
set_log_text("训练网络模型")
max_epoch = int(self.__train_param_dict.get("微调次数", 5))
self.__train_log_param.epoch = max_epoch
def _paint_log(train_log_param, lock):
font1 = {'family': 'Times New Roman', 'weight': 'normal', 'size': 12}
self.__logger.info("************start paint log************")
while not train_log_param.exit_paint:
if not self.__process_running:
self.__logger.error('remote process quit')
break
time.sleep(3)
speed = 0 if train_log_param.notify_num == 0 else \
train_log_param.total_time / train_log_param.notify_num
avg_loss = 0 if train_log_param.notify_num == 0 else \
train_log_param.sum_loss / train_log_param.notify_num
msg = 'epoch: %s/%s, step: %s/%s, speed: %.3f(s), avg_loss:%.3f' % (train_log_param.current_epoch,
train_log_param.epoch,
train_log_param.current_step,
train_log_param.step,
speed, avg_loss)
self.__logger.info(msg)
plot.figure(num=1, clear=True)
plot.xlabel('epoch', font1)
plot.ylabel('loss', font1)
plot.plot(range(len(train_log_param.loss)), train_log_param.loss, '', c='g')
if train_log_param.current_epoch < max_epoch or \
(train_log_param.current_epoch == train_log_param.epoch and
train_log_param.current_step < train_log_param.step):
plot.title('train ' + msg)
else:
plot.title('train over, max epoch: {}'.format(max_epoch), font1)
name = './test2.jpg'
plot.savefig(name)
# 加载图像文件
canvas_signal_inst.canvas_show_img(name)
if len(train_log_param.loss) > 0 and \
train_log_param.current_epoch == train_log_param.epoch and \
train_log_param.current_step == train_log_param.step:
self.__logger.info("reach max epoch")
break
train_log_param.set_exit_paint_log(False)
self.__logger.info("************exit paint log************")
self.__logger.info('stop service(%s)', self.TRAIN_SAMPLE_SERVICE_NAME)
# self.finish_train()
self.__paint_log_thread = threading.Thread(target=_paint_log, args=(self.__train_log_param, self.__lock))
self.__paint_log_thread.start()
def _recv_log(socket, train_log_param, lock, paint_log_thread):
""" if state = running, data format:
{
'state': 'running',
'epoch': epoch,
'epoch_size': epoch_size,
'epoch_iter': iteration % epoch_size,
'iteration': iteration,
'al': al,
'ac': ac,
'ol': ol,
'oc': oc,
'batch_time': batch_time,
'lr': lr
}
:param socket:
:param train_log_param:
:param lock:
:param paint_log_thread:
:return:
"""
self.__logger.info("************start recv log************")
all_finished = False
while not train_log_param.exit_recv:
if not self.__process_running:
self.__logger.error('remote process quit')
break
self.__logger.debug("recv loss %s", train_log_param.loss)
try:
data = socket.recv(flags=zmq.NOBLOCK)
if not data:
continue
self.__logger.info(b"recv log data is: %s", data)
data = json.loads(data.decode('utf-8'))
except zmq.ZMQError as err:
self.__logger.warning("zmq receive warning: {}".format(err))
time.sleep(5)
continue
self.__logger.debug("recv log data is %s", data)
state = data.get('state')
if state == 'over':
self._save_weight()
self.__logger.info("************recv over************")
all_finished = True
train_log_param.current_epoch = train_log_param.epoch
train_log_param.current_step = train_log_param.step
break
cur_loss = data.get('al')
cur_epoch = data.get('epoch')
epoch_size = data.get('epoch_size', 1)
epoch_iter = data.get('epoch_iter', 0)
batch_time = data.get('batch_time', 0)
train_log_param.total_time += batch_time
train_log_param.current_epoch = int(cur_epoch - BASE_TRAIN_EPOCH)
train_log_param.sum_loss += cur_loss
train_log_param.notify_num += 1
train_log_param.current_step = epoch_iter + 1
train_log_param.step = epoch_size
if (epoch_iter + 1) == epoch_size:
train_log_param.loss.append(cur_loss)
self.__logger.info("cur_epoch:%s, BASE_TRAIN_EPOCH:%s, epoch_size:%s, epoch_iter:%s",
cur_epoch, BASE_TRAIN_EPOCH, epoch_size, epoch_iter)
self.__logger.info("************exit recv log************")
train_log_param.set_exit_recv_log(False)
# 等待绘制线程退出
paint_log_thread.join(10)
# 设置训练结束
self.finish_train()
if not all_finished:
self.__logger.error('train task is not finished!')
self.__socket = None
self.__build_socket()
self.__recvLogThread = threading.Thread(target=_recv_log,
args=(self.__socket,
self.__train_log_param,
self.__lock,
self.__paint_log_thread))
self.__recvLogThread.start()
def _compute_map(self):
self.__logger.info("********start compute map************")
while self.__process_running:
data = self.__socket.recv().decode('utf-8')
self.__logger.info("recv log data is %s", data)
if data == 'over':
self.__logger.info("recv over")
break
else:
time.sleep(1)
if os.path.exists(self.__map_path):
self.__logger.info("process success")
# 加载图像文件
canvas_signal_inst.canvas_show_img(self.__map_path)
else:
raise Exception("image not exist {}".format(self.__map_path))
self.__logger.info("********exit compute map********")
def _process_monitor_callback(self, service_state, desc, *args, **kwargs):
self.__logger.info("service state(%s), desc(%s), args: %s, kwargs:%s", service_state, desc, args, kwargs)
if service_state != ProcessTimer.SERVICE_STATE_RUNING:
self.__process_running = False
def _show_compute_process(self, target_count=0):
if target_count == 0:
show_warning_tips('target count is 0')
return
if not self.__pre_dir or not os.path.exists(self.__pre_dir):
show_warning_tips('target dir(%s) is empty' % self.__pre_dir)
return
txt_count = get_files_count(self.__pre_dir, ".txt")
self.__canvas.create_process_bar("计算map", "处理中", 0, target_count)
self.__logger.info("********start show compute information************")
all_finished = False
while self.__process_running:
if txt_count >= target_count:
all_finished = True
break
self.__logger.debug("txt count is %s, target_count %s", txt_count, target_count)
self.__canvas.set_bar_cur_value(txt_count)
QApplication.processEvents()
time.sleep(0.5)
txt_count = get_files_count(self.__pre_dir, ".txt")
self.__canvas.close_bar()
if all_finished:
show_message_tips("处理完成")
else:
show_warning_tips('分析进程异常退出')
self.__logger.info("********exit show compute information************")
def analyze_result(self):
if bsa.has_service_running():
show_warning_tips('已有服务在运行,请先停止')
return
current_path = os.getcwd()
if not os.path.exists(self.__refine_det_path):
show_warning_tips("refineDetPath: {} is not exist".format(self.__refine_det_path))
return
# raise Exception("refineDetPath: {} is not exist".format(self.__refine_det_path))
os.chdir(self.__refine_det_path)
self.__init_test_map()
self._clear_files()
if not self._pre_load_model():
show_warning_tips("预加载model失败")
return
# 必须有训练结果文件,才能进行分析
if not self.__weight_file or not os.path.exists(self.__weight_file):
self.__logger.warning('taret file(%s) is not found', self.__weight_file)
show_warning_tips('请先完成训练!')
return
target_count = 0
test_dataset_path = self._get_dataset_path(_test_dataset_file)
with open(test_dataset_path) as fd:
content = fd.read()
lines = content.strip().strip('\n').strip('\r').split('\n')
target_count = len(lines)
run_program = "python detectmap.py --save_folder {} --label_list {}".format(self.__run_result_path,
test_dataset_path)
self.__logger.info("**********detectmap**********")
self.__process_running = True
self.__socket = None
self.__build_socket()
is_ok, desc = bsa.start_service(service_name=self.ANALYZE_SERVICE_NAME,
run_programs=run_program,
process_param_type=bsa.SUBPROCESS_SHELL_TYPE,
callback_func=self._process_monitor_callback)
if not is_ok:
self.__logger.error("start %s service failed, %s", self.ANALYZE_SERVICE_NAME, desc)
show_warning_tips(desc)
self.__delete_socket()
return
self.__logger.info('start %s service success, pid: %s', self.ANALYZE_SERVICE_NAME,
bsa.get_pids(service_name=self.ANALYZE_SERVICE_NAME))
os.chdir(current_path)
set_log_text("计算mAP")
self.__mapThread = threading.Thread(target=self._compute_map)
self.__mapThread.start()
# move function ShowCompProcess to thread, may be wrong
self._show_compute_process(target_count)
self.__mapThread.join(5)
self.__mapThread = None
self.__delete_socket()
self.finish_map()
def finish_map(self):
self.__logger.info("finish map process pid: %s", bsa.get_pids(service_name=self.ANALYZE_SERVICE_NAME))
is_ok, _ = bsa.stop_service(service_name=self.ANALYZE_SERVICE_NAME)
if not is_ok:
self.__logger.error("stop train sample failed")
else:
self.__logger.info("stop train sample success")
def finish_train(self):
self.__delete_socket()
self.__logger.info("finish train process pid: %s", bsa.get_pids(service_name=self.TRAIN_SAMPLE_SERVICE_NAME))
is_ok, desc = bsa.stop_service(service_name=self.TRAIN_SAMPLE_SERVICE_NAME)
if not is_ok:
self.__logger.error("stop train sample failed,desc:%s", desc)
else:
self.__logger.info("stop train sample success")
if self.__train_log_param is not None:
self.__lock.acquire()
self.__train_log_param.set_exit(True)
self.__lock.release()
time.sleep(2)
| 40.215686 | 118 | 0.590887 |
acec2a2994e5ef953134ab9282053f5a0b430db3 | 3,751 | py | Python | uhd_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/type_b3f7b13bb03ac01972b75a01e4f61712.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/type_b3f7b13bb03ac01972b75a01e4f61712.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | uhd_restpy/testplatform/sessions/ixnetwork/topology/tlvprofile/type_b3f7b13bb03ac01972b75a01e4f61712.py | rfrye-github/ixnetwork_restpy | 23eeb24b21568a23d3f31bbd72814ff55eb1af44 | [
"MIT"
] | null | null | null | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class Type(Base):
"""Tlv type container
The Type class encapsulates a required type resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'type'
_SDM_ATT_MAP = {
'IsEditable': 'isEditable',
'IsRequired': 'isRequired',
'Name': 'name',
}
def __init__(self, parent):
super(Type, self).__init__(parent)
@property
def Object(self):
"""
Returns
-------
- obj(uhd_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.object_1ba6063c8cfb61359d0cafa499ed49e4.Object): An instance of the Object class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from uhd_restpy.testplatform.sessions.ixnetwork.topology.tlvprofile.object_1ba6063c8cfb61359d0cafa499ed49e4 import Object
return Object(self)
@property
def IsEditable(self):
"""
Returns
-------
- bool: Indicates whether this is editable or not
"""
return self._get_attribute(self._SDM_ATT_MAP['IsEditable'])
@IsEditable.setter
def IsEditable(self, value):
self._set_attribute(self._SDM_ATT_MAP['IsEditable'], value)
@property
def IsRequired(self):
"""
Returns
-------
- bool: Indicates whether this is required or not
"""
return self._get_attribute(self._SDM_ATT_MAP['IsRequired'])
@IsRequired.setter
def IsRequired(self, value):
self._set_attribute(self._SDM_ATT_MAP['IsRequired'], value)
@property
def Name(self):
"""
Returns
-------
- str: Name of the node
"""
return self._get_attribute(self._SDM_ATT_MAP['Name'])
@Name.setter
def Name(self, value):
self._set_attribute(self._SDM_ATT_MAP['Name'], value)
def update(self, IsEditable=None, IsRequired=None, Name=None):
"""Updates type resource on the server.
Args
----
- IsEditable (bool): Indicates whether this is editable or not
- IsRequired (bool): Indicates whether this is required or not
- Name (str): Name of the node
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
| 35.386792 | 158 | 0.651293 |
acec2abeb8e695c7923f1a9b54eb5ee15fc8ff27 | 440 | py | Python | modules/updates.py | 5225225/bar | cc72eb45f21ac2b2e070c6d9f66b306ed51aef35 | [
"MIT"
] | 1 | 2015-09-05T17:07:59.000Z | 2015-09-05T17:07:59.000Z | modules/updates.py | 5225225/bar | cc72eb45f21ac2b2e070c6d9f66b306ed51aef35 | [
"MIT"
] | null | null | null | modules/updates.py | 5225225/bar | cc72eb45f21ac2b2e070c6d9f66b306ed51aef35 | [
"MIT"
] | 2 | 2015-09-05T17:08:02.000Z | 2019-02-22T21:14:08.000Z | import signal
import linelib
import subprocess
def handler(x, y):
pass
signal.signal(signal.SIGUSR1, handler)
signal.signal(signal.SIGALRM, handler)
ID = "updates"
while True:
updates = subprocess.check_output(["/usr/bin/pacaur", "-Qua"]).decode(
"ASCII").strip().split("\n")
linelib.sendblock(ID, {"full_text": str(len(updates))})
linelib.sendPID(ID)
linelib.waitsig(1)
| 22 | 74 | 0.627273 |
acec2ac6d4270ec3a01a64ba3c51ac4aeaeefd31 | 1,981 | py | Python | tests/unit/test_enums.py | Rohitpandit021/jina | f3db4d5e480375d8dc3bceda814ac1963dee76d7 | [
"Apache-2.0"
] | 15,179 | 2020-04-28T10:23:56.000Z | 2022-03-31T14:35:25.000Z | tests/unit/test_enums.py | Rohitpandit021/jina | f3db4d5e480375d8dc3bceda814ac1963dee76d7 | [
"Apache-2.0"
] | 3,912 | 2020-04-28T13:01:29.000Z | 2022-03-31T14:36:46.000Z | tests/unit/test_enums.py | Rohitpandit021/jina | f3db4d5e480375d8dc3bceda814ac1963dee76d7 | [
"Apache-2.0"
] | 1,955 | 2020-04-28T10:50:49.000Z | 2022-03-31T12:28:34.000Z | from jina.enums import EmbeddingClsType
def test_embedding_cls_type():
dense = EmbeddingClsType.DENSE
assert dense.is_dense
assert not dense.is_sparse
assert not dense.is_torch
assert not dense.is_tf
assert not dense.is_scipy
assert not dense.is_scipy_stackable
assert dense.scipy_cls_type is None
scipy_coo = EmbeddingClsType.SCIPY_COO
assert not scipy_coo.is_dense
assert not scipy_coo.is_torch
assert not scipy_coo.is_tf
assert scipy_coo.is_sparse
assert scipy_coo.is_scipy
assert scipy_coo.is_scipy_stackable
assert scipy_coo.scipy_cls_type == 'coo'
scipy_csr = EmbeddingClsType.SCIPY_CSR
assert not scipy_csr.is_dense
assert not scipy_csr.is_torch
assert not scipy_csr.is_tf
assert scipy_csr.is_sparse
assert scipy_csr.is_scipy
assert scipy_csr.is_scipy_stackable
assert scipy_csr.scipy_cls_type == 'csr'
scipy_bsr = EmbeddingClsType.SCIPY_BSR
assert not scipy_bsr.is_dense
assert not scipy_bsr.is_torch
assert not scipy_bsr.is_tf
assert scipy_bsr.is_sparse
assert scipy_bsr.is_scipy
assert not scipy_bsr.is_scipy_stackable
assert scipy_bsr.scipy_cls_type == 'bsr'
scipy_csc = EmbeddingClsType.SCIPY_CSC
assert not scipy_csc.is_dense
assert not scipy_csc.is_torch
assert not scipy_csc.is_tf
assert scipy_csc.is_sparse
assert scipy_csc.is_scipy
assert not scipy_csc.is_scipy_stackable
assert scipy_csc.scipy_cls_type == 'csc'
torch = EmbeddingClsType.TORCH
assert torch.is_sparse
assert torch.is_torch
assert not torch.is_scipy
assert not torch.is_dense
assert not torch.is_scipy_stackable
assert not torch.is_tf
assert torch.scipy_cls_type is None
tf = EmbeddingClsType.TF
assert tf.is_sparse
assert tf.is_tf
assert not tf.is_scipy
assert not tf.is_dense
assert not tf.is_scipy_stackable
assert not tf.is_torch
assert tf.scipy_cls_type is None
| 29.567164 | 44 | 0.757193 |
acec2b100e1e69b07aacbeae7a34526224013dfd | 5,239 | py | Python | aws/dynamodb.py | robertcsapo/cisco-devnet-community-webex-bot | 95b442a10da99447dceac4d2c68bcb2b4b76342b | [
"BSD-Source-Code"
] | 2 | 2020-08-02T17:28:55.000Z | 2021-02-21T21:52:29.000Z | aws/dynamodb.py | robertcsapo/cisco-devnet-community-webex-bot | 95b442a10da99447dceac4d2c68bcb2b4b76342b | [
"BSD-Source-Code"
] | 7 | 2020-04-27T17:34:27.000Z | 2021-03-22T06:57:00.000Z | aws/dynamodb.py | robertcsapo/cisco-devnet-community-webex-bot | 95b442a10da99447dceac4d2c68bcb2b4b76342b | [
"BSD-Source-Code"
] | null | null | null | import boto3
import os
import time
import yaml
def latest(table, type, link, args):
result = {}
settings = config(args)
dynamodb = boto3.resource(
"dynamodb",
region_name=os.environ["AWS_DEFAULT_REGION"],
endpoint_url=settings["AWS_ENDPOINT_URL"]
)
table = dynamodb.Table(table)
''' Trying to find unique URL in DynamoDB '''
try:
response = table.get_item(
Key={
"link": link,
"type": type
}
)
except Exception as e:
raise Exception("ERROR: Problem reading DynamoDB - {}".format(e))
''' If there's no Item, then it's new entry '''
try:
response["Item"]
except KeyError:
''' Return false, as Empty response '''
result["link"] = False
return result
result["link"] = response["Item"]["link"]
return result
def add(table, type, title, link, args):
timestamp = int(time.time())
settings = config(args)
dynamodb = boto3.resource(
"dynamodb",
region_name=os.environ["AWS_DEFAULT_REGION"],
endpoint_url=settings["AWS_ENDPOINT_URL"]
)
table = dynamodb.Table(table)
try:
table.put_item(
Item={
"title": title,
"link": link,
"type": type,
"timestamp": str(timestamp),
}
)
except Exception as e:
raise Exception("ERROR: Problem updating DynamoDB - {}".format(e))
return
def dbInit(table, args):
result = {}
settings = config(args)
dynamodb = boto3.resource(
"dynamodb",
region_name=os.environ["AWS_DEFAULT_REGION"],
endpoint_url=settings["AWS_ENDPOINT_URL"]
)
''' Create DynamoDB Database '''
try:
dynamodb.create_table(
TableName=table,
KeySchema=[
{
"AttributeName": "type",
"KeyType": "HASH"
},
{
"AttributeName": "link",
"KeyType": "RANGE"
}
],
AttributeDefinitions=[
{
"AttributeName": "type",
"AttributeType": "S"
},
{
"AttributeName": "link",
"AttributeType": "S"
},
],
ProvisionedThroughput={
"ReadCapacityUnits": 2,
"WriteCapacityUnits": 2
}
)
except Exception as e:
raise Exception("ERROR: {}".format(e))
result["success"] = True
return result
def config(args):
result = {}
''' Open config file '''
with open(args.config, "r") as file:
config = yaml.load(file, Loader=yaml.FullLoader)
''' ENV has higher prio then config file '''
try:
os.environ["AWS_ACCESS_KEY_ID"]
os.environ["AWS_SECRET_ACCESS_KEY"]
result["AWS_ACCESS_KEY_ID"] = os.environ["AWS_ACCESS_KEY_ID"]
result["AWS_SECRET_ACCESS_KEY"] = os.environ["AWS_SECRET_ACCESS_KEY"]
except KeyError:
if (config["dynamodb"]["AWS_ACCESS_KEY_ID"] is not None and
config["dynamodb"]["AWS_SECRET_ACCESS_KEY"] is not None):
result["AWS_ACCESS_KEY_ID"] = config["dynamodb"]["AWS_ACCESS_KEY_ID"]
result["AWS_SECRET_ACCESS_KEY"] = config["dynamodb"]["AWS_SECRET_ACCESS_KEY"]
os.environ["AWS_ACCESS_KEY_ID"] = config["dynamodb"]["AWS_ACCESS_KEY_ID"]
os.environ["AWS_SECRET_ACCESS_KEY"] = config["dynamodb"]["AWS_SECRET_ACCESS_KEY"]
else:
raise Exception("AWS_ACCESS_KEY_ID/AWS_SECRET_ACCESS_KEY missing")
return
try:
os.environ["AWS_DEFAULT_REGION"]
result["AWS_DEFAULT_REGION"] = os.environ["AWS_DEFAULT_REGION"]
except KeyError:
try:
config["dynamodb"]["AWS_DEFAULT_REGION"]
if (config["dynamodb"]["AWS_DEFAULT_REGION"] is not None):
os.environ["AWS_DEFAULT_REGION"] = config["dynamodb"]["AWS_DEFAULT_REGION"]
result["AWS_DEFAULT_REGION"] = os.environ["AWS_DEFAULT_REGION"]
except KeyError:
raise Exception("AWS Region missing in config")
try:
os.environ["AWS_ENDPOINT_URL"]
result["AWS_ENDPOINT_URL"] = os.environ["AWS_ENDPOINT_URL"]
except KeyError:
try:
config["dynamodb"]["AWS_ENDPOINT_URL"]
if (config["dynamodb"]["AWS_ENDPOINT_URL"] is not None):
os.environ["AWS_ENDPOINT_URL"] = config["dynamodb"]["AWS_ENDPOINT_URL"]
result["AWS_ENDPOINT_URL"] = config["dynamodb"]["AWS_ENDPOINT_URL"]
else:
result["AWS_ENDPOINT_URL"] = None
except KeyError:
result["AWS_ENDPOINT_URL"] = None
pass
return result
| 33.8 | 93 | 0.517465 |
acec2caaabed029117b8b3d3bceb308270812de0 | 17,875 | py | Python | EEG_Lightning/predict.py | mcd4874/NeurIPS_competition | 4df1f222929e9824a55c9c4ae6634743391b0fe9 | [
"MIT"
] | 23 | 2021-10-14T02:31:06.000Z | 2022-01-25T16:26:44.000Z | EEG_Lightning/predict.py | mcd4874/NeurIPS_competition | 4df1f222929e9824a55c9c4ae6634743391b0fe9 | [
"MIT"
] | null | null | null | EEG_Lightning/predict.py | mcd4874/NeurIPS_competition | 4df1f222929e9824a55c9c4ae6634743391b0fe9 | [
"MIT"
] | 1 | 2022-03-05T06:54:11.000Z | 2022-03-05T06:54:11.000Z | import argparse
import torch
import os
from dassl.utils import setup_logger, set_random_seed, collect_env_info
from dassl.config import get_cfg_default
from dassl.engine import build_trainer
import numpy as np
import pandas as pd
from torch.utils.data import DataLoader
import pytorch_lightning as pl
from submission.NeurIPS_2.util.support import (
expand_data_dim, normalization_time,normalization_channels, generate_common_chan_test_data, load_Cho2017, load_Physionet, load_BCI_IV,
correct_EEG_data_order, relabel, process_target_data, relabel_target, load_dataset_A, load_dataset_B, modify_data,reformat,
filterBank
)
from train_util import (
setup_cfg,print_args,reset_cfg,convert_to_dict,CustomModelCheckPoint,CustomeCSVLogger,CustomExperimentWriter,generate_excel_report,
generate_model_info_config,trainer_setup,generate_setup
)
from dassl.data.datasets.data_util import EuclideanAlignment
from collections import defaultdict
from numpy.random import RandomState
def generate_pred_MI_label(fold_predict_results, output_dir, predict_folder="predict_folder",
relabel=False):
probs = fold_predict_results[0]["probs"]
preds = fold_predict_results[0]["preds"]
final_pred = np.zeros(probs.shape)
final_prob = np.zeros(preds.shape)
for predict_result in fold_predict_results:
current_prob = predict_result["probs"]
current_pred = predict_result["preds"]
final_pred = final_pred + current_pred
final_prob = final_prob + current_prob
pred_output = list()
for trial_idx in range(len(final_pred)):
trial_pred = final_pred[trial_idx]
trial_prob = final_prob[trial_idx]
best_idx = -1
best_pred = -1
best_prob = -1
for idx in range(len(trial_pred)):
pred = trial_pred[idx]
prob = trial_prob[idx]
if pred > best_pred:
best_pred = pred
best_idx = idx
best_prob = prob
elif pred == best_pred:
if prob > best_prob:
best_idx = idx
best_prob = prob
pred_output.append(best_idx)
pred_output = np.array(pred_output)
if relabel:
pred_output = np.array([relabel_target(l) for l in pred_output])
print("update pred output : ",pred_output)
combine_folder = os.path.join(output_dir, predict_folder)
print("save folder : ",combine_folder)
np.savetxt(os.path.join(combine_folder, "pred_MI_label.txt"), pred_output, delimiter=',', fmt="%d")
def generate_assemble_result(fold_predict_results, output_dir,
predict_folder="predict_folder", relabel=False):
# unique_test_fold = for fold_result in fold_predict_results:
group_test_folds = defaultdict(list)
final_fold_result = list()
for fold_result in fold_predict_results:
test_fold = fold_result["test_fold"]
group_test_folds[test_fold].append(fold_result)
for test_fold,test_fold_result in group_test_folds.items():
probs = test_fold_result[0]["probs"]
preds = test_fold_result[0]["preds"]
final_label = test_fold_result[0]["labels"]
final_pred = np.zeros(probs.shape)
final_prob = np.zeros(preds.shape)
for predict_result in test_fold_result:
current_prob = predict_result["probs"]
current_pred = predict_result["preds"]
final_pred = final_pred + current_pred
final_prob = final_prob + current_prob
pred_output = list()
for trial_idx in range(len(final_pred)):
trial_pred = final_pred[trial_idx]
trial_prob = final_prob[trial_idx]
best_idx = -1
best_pred = -1
best_prob = -1
for idx in range(len(trial_pred)):
pred = trial_pred[idx]
prob = trial_prob[idx]
if pred > best_pred:
best_pred = pred
best_idx = idx
best_prob = prob
elif pred == best_pred:
if prob > best_prob:
best_idx = idx
best_prob = prob
pred_output.append(best_idx)
pred_output = np.array(pred_output)
if relabel:
pred_output = np.array([relabel_target(l) for l in pred_output])
final_label = np.array([relabel_target(l) for l in final_label])
acc = np.mean(pred_output == final_label)
print("test fold {} has acc {} ".format(test_fold, acc))
# current_test_fold = test_fold_prefix + str(test_fold + 1)
result = {
"test_fold": test_fold,
"test_acc": acc
}
final_fold_result.append(result)
result = pd.DataFrame.from_dict(final_fold_result)
result_output_dir = os.path.join(output_dir, predict_folder)
if not os.path.isdir(result_output_dir):
os.makedirs(result_output_dir)
result_filename = 'ensemble_result.xlsx'
result.to_excel(os.path.join(result_output_dir, result_filename), index=False)
#
from scipy.io import loadmat
def load_test_data_from_file(provide_path,dataset_type):
temp = loadmat(provide_path)
datasets = temp['datasets'][0]
target_dataset = None
list_r_op = None
for dataset in datasets:
dataset = dataset[0][0]
dataset_name = dataset['dataset_name'][0]
if dataset_name == dataset_type:
target_dataset = dataset
# data = target_dataset['data'].astype(np.float32)
data = target_dataset['data'].astype(np.float32)
label = np.squeeze(target_dataset['label']).astype(int)
meta_data = target_dataset['meta_data'][0][0]
new_meta_data = {}
new_meta_data['subject'] = meta_data['subject'][0]
new_meta_data['session'] = [session[0] for session in meta_data['session'][0]]
new_meta_data['run'] = [run[0] for run in meta_data['run'][0]]
meta_data = pd.DataFrame.from_dict(new_meta_data)
test_data, test_label, meta_data = reformat(data, label, meta_data)
potential_r_op = dataset_type + '_r_op.mat'
# if dataset_type=="dataset_A":
# potential_r_op="dataset_A_r_op.mat"
# elif dataset_type=="dataset_B":
# potential_r_op="dataset_B_r_op.mat"
# provide_path = provide_path.split("\\")[:-1]
# provide_path = "\\".join(provide_path)
provide_path = provide_path.split("/")[:-1]
provide_path = "/".join(provide_path)
# print("provide path : ",provide_path)
exist_r_op_file = os.path.join(provide_path,potential_r_op)
print("current r_op file : ",exist_r_op_file)
if os.path.exists(exist_r_op_file):
print("path {} exist ".format(exist_r_op_file))
temp = loadmat(exist_r_op_file)
dataset = temp['datasets'][0]
dataset = list(dataset)
dataset = dataset[0][0]
# print("dataset : ",dataset)
if 'r_op_list' in list(dataset.dtype.names):
r_op = dataset['r_op_list'][0]
list_r_op = np.array(r_op).astype(np.float32)
print("use the r-op list")
return test_data,list_r_op
def print_info(source_data,dataset_name):
print("current dataset {}".format(dataset_name))
for subject_idx in range(len(source_data)):
print("source_data subject_idx {} has shape : {}, with range scale ({},{}) ".format(
subject_idx, source_data[subject_idx].shape,
np.max(source_data[subject_idx]), np.min(source_data[subject_idx])))
def get_test_data(dataset_type, norm, provide_data_path = None,EA=False):
os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
cuda = torch.cuda.is_available()
seed = 42
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
np.random.seed(seed)
torch.backends.cudnn.deterministic = True
rng = RandomState(seed)
list_r_op = None
# get correct chans order
if provide_data_path is None:
target_channels = generate_common_chan_test_data()
fmin, fmax = 4, 36
epoch_X_src1, label_src1, m_src1 = load_Cho2017(fmin=fmin, fmax=fmax, selected_chans=target_channels,
subjects=[1])
print("cho2017 current chans : ", epoch_X_src1.ch_names)
print("size : ", len(epoch_X_src1.ch_names))
target_channels = epoch_X_src1.ch_names
if dataset_type == 'dataset_B':
test_data = load_dataset_B(train=False, norm=norm, selected_chans=target_channels)
n_subjects = 3
else:
test_data = load_dataset_A(train=False, norm=norm, selected_chans=target_channels)
n_subjects = 2
# if EA:
print("{} subjects to split : ".format(n_subjects))
test_data = np.split(test_data,n_subjects)
else:
print("load test data from file {}".format(provide_data_path))
test_data,list_r_op = load_test_data_from_file(provide_data_path,dataset_type=dataset_type)
if list_r_op is None:
print("generate new list r op")
else:
print("use r_op")
if EA:
test_EA = EuclideanAlignment(list_r_op=list_r_op)
test_data = test_EA.convert_subjects_data_with_EA(test_data)
print("load test predict data ------------------")
print_info(test_data,dataset_type)
test_data = np.concatenate(test_data)
if norm == 'cross_channel_norm':
print("normalize across channels ")
test_data = normalization_channels(test_data)
elif norm == 'time_norm':
print("normalize across time in each channel ")
test_data = normalization_time(test_data)
test_data = expand_data_dim(test_data)
print("data shape before predict : ",test_data.shape)
return test_data
def generate_ensemble_predict(cfg,experiments_setup,benchmark=False,deterministic=True,generate_predict=False,use_assemble_test_dataloader=False,relabel=False,seed=42):
"""Apply data transformation/normalization"""
if len(cfg.INPUT.TRANSFORMS) > 0:
norm = cfg.INPUT.TRANSFORMS[0]
else:
norm = "none"
EA = cfg.DATAMANAGER.DATASET.USE_Euclidean_Aligment
print("use cross channel norm : ", norm)
print("generate predict : ", generate_predict)
dataset_type = cfg.DATAMANAGER.DATASET.SETUP.TARGET_DATASET_NAME
test_file_path = args.test_data if args.test_data != '' else None
if generate_predict and not use_assemble_test_dataloader:
dataset = get_test_data(dataset_type, norm, EA=EA, provide_data_path=test_file_path)
fold_predict_results = list()
for experiment in experiments_setup:
sub_exp_path = experiment["generate_sub_exp_path"]
output_dir = experiment["output_dir"]
combine_prefix = experiment["sub_exp_prefix"]
cfg = experiment["cfg"]
predict_folder = "predict_folder"
combine_predict_folder = os.path.join(cfg.OUTPUT_DIR, predict_folder, sub_exp_path)
if not os.path.exists(combine_predict_folder):
os.makedirs(combine_predict_folder)
if generate_predict:
trainer_model, trainer_lightning, data_manager = trainer_setup(output_dir, cfg, benchmark, deterministic,
seed=seed)
#predict with best epoch
# model_state = torch.load(os.path.join(output_dir, 'checkpoint.ckpt'), map_location='cuda:0')
#predict with lastest epoch
print("use last.ckpt for prediction" )
model_state = torch.load(os.path.join(output_dir, 'last.ckpt'), map_location='cuda:0')
print("save checkpoint keys : ", model_state.keys())
trainer_model.load_state_dict(model_state['state_dict'])
trainer_model.eval()
probs_list = []
preds_list = []
label_list = []
if use_assemble_test_dataloader:
def parser(test_input):
input, label, domain = test_input
label = label.numpy()
return input, label
test_dataloader = data_manager.predict_dataloader()
else:
test_data = dataset
test_dataloader = DataLoader(test_data, batch_size=64, shuffle=False)
def parser(test_input):
input = test_input
label = np.array([None])
return input, label
for step, test_input in enumerate(test_dataloader):
input, label = parser(test_input)
input = input.float()
probs = trainer_model(input)
probs = probs.detach().numpy()
new_probs = np.zeros_like(probs)
new_probs[np.arange(len(probs)), probs.argmax(1)] = 1
probs_list.append(probs)
preds_list.append(new_probs)
label_list.append(label)
label_list = np.concatenate(label_list)
probs_list = np.concatenate(probs_list)
probs_list = np.around(probs_list, decimals=4)
preds_list = np.concatenate(preds_list).astype(int)
predict_info = {
"probs": probs_list,
"preds": preds_list
}
if label_list[0] != None:
predict_info.update({"label": label_list})
predict_info.update(combine_prefix)
fold_predict_results.append(predict_info)
print("combine predict folder : ", combine_predict_folder)
if use_assemble_test_dataloader:
np.savetxt(os.path.join(combine_predict_folder, 'ensemble_label.txt'), label_list, delimiter=',',
fmt="%d")
np.savetxt(os.path.join(combine_predict_folder, 'ensemble_pred.txt'), preds_list, delimiter=',',
fmt="%d")
np.savetxt(os.path.join(combine_predict_folder, 'ensemble_prob.txt'), probs_list, delimiter=',',
fmt='%1.4f')
else:
np.savetxt(os.path.join(combine_predict_folder, 'pred.txt'), preds_list, delimiter=',', fmt="%d")
np.savetxt(os.path.join(combine_predict_folder, 'prob.txt'), probs_list, delimiter=',', fmt='%1.4f')
else:
if use_assemble_test_dataloader:
pred = np.loadtxt(os.path.join(combine_predict_folder, 'ensemble_pred.txt'), delimiter=',')
probs = np.loadtxt(os.path.join(combine_predict_folder, 'ensemble_prob.txt'), delimiter=',')
labels = np.loadtxt(os.path.join(combine_predict_folder, 'ensemble_label.txt'), delimiter=',')
predict_info = {
"labels": labels,
"probs": probs,
"preds": pred
}
else:
pred = np.loadtxt(os.path.join(combine_predict_folder, 'pred.txt'), delimiter=',')
probs = np.loadtxt(os.path.join(combine_predict_folder, 'prob.txt'), delimiter=',')
predict_info = {
"probs": probs,
"preds": pred
}
predict_info.update(combine_prefix)
fold_predict_results.append(predict_info)
if not generate_predict:
if not use_assemble_test_dataloader:
generate_pred_MI_label(fold_predict_results, output_dir=cfg.OUTPUT_DIR, relabel=relabel)
else:
generate_assemble_result(fold_predict_results, output_dir=cfg.OUTPUT_DIR,
relabel=relabel)
def main(args):
benchmark = False
deterministic = False # this can help to reproduce the result
cfg = setup_cfg(args)
setup_logger(cfg.OUTPUT_DIR)
seed = 42
pl.seed_everything(seed)
if torch.cuda.is_available() and cfg.USE_CUDA:
print("use determinstic ")
benchmark = False
deterministic = True #this can help to reproduce the result
experiments_setup = generate_setup(cfg)
generate_predict = args.generate_predict
use_assemble_test_dataloader = args.use_assemble_test_dataloader
relabel = args.relabel
generate_ensemble_predict(cfg, experiments_setup,benchmark=benchmark ,deterministic=deterministic, generate_predict=generate_predict,
use_assemble_test_dataloader=use_assemble_test_dataloader,
relabel=relabel,seed=seed)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--root', type=str, default='', help='path to dataset')
parser.add_argument('--test-data', type=str, default='', help='path to test data')
parser.add_argument(
'--output-dir', type=str, default='', help='output directory'
)
parser.add_argument(
'--main-config-file',
type=str,
default='',
help='path to main config file for full setup'
)
parser.add_argument(
'--generate-predict',
# type=bool,
# default=True,
action='store_true',
help='generate predict result '
)
parser.add_argument(
'--use-assemble-test-dataloader',
# type=bool,
# default=True,
action='store_true',
help='use ensemble of multi model to make prediction'
)
parser.add_argument(
'--relabel',
# type=bool,
# default=True,
action='store_true',
help='relabel predict to be 3 categories'
)
parser.add_argument(
'--gpu-id', type=int, default=0, help='gpu '
)
parser.add_argument(
'opts',
default=None,
nargs=argparse.REMAINDER,
help='modify config options using the command-line'
)
args = parser.parse_args()
main(args)
| 40.625 | 168 | 0.631217 |
acec2cbf68c028f31073bcb015d4b56280209bcd | 261 | py | Python | ex021.py | danieldf0l/exercicios-curso-python3 | 8f8a0e2467efee855da47ea47a4c2f4fda152c41 | [
"MIT"
] | null | null | null | ex021.py | danieldf0l/exercicios-curso-python3 | 8f8a0e2467efee855da47ea47a4c2f4fda152c41 | [
"MIT"
] | null | null | null | ex021.py | danieldf0l/exercicios-curso-python3 | 8f8a0e2467efee855da47ea47a4c2f4fda152c41 | [
"MIT"
] | null | null | null | import pygame
pygame.mixer.init()
pygame.mixer.music.load('ex021.mp3')
pygame.mixer.music.play(loops=0, start=0.0)
pygame.event.wait()
### Autor: Prod. Smurph
# Vídeo original: https://www.youtube.com/watch?v=ikMLT729ArM&list=TLPQMjgwMTIwMjK1IRHq1MFulg&index=4 | 37.285714 | 102 | 0.773946 |
acec2f2074076d78916a441ce08c7c483d21633e | 3,149 | py | Python | examples/gin_rummy_dqn.py | drunkpig/rlcard | db8a410bbfefb7f9fd958239aae8d79a8bfb29d3 | [
"MIT"
] | null | null | null | examples/gin_rummy_dqn.py | drunkpig/rlcard | db8a410bbfefb7f9fd958239aae8d79a8bfb29d3 | [
"MIT"
] | null | null | null | examples/gin_rummy_dqn.py | drunkpig/rlcard | db8a410bbfefb7f9fd958239aae8d79a8bfb29d3 | [
"MIT"
] | 1 | 2020-11-20T16:38:37.000Z | 2020-11-20T16:38:37.000Z | '''
File name: rlcard.examples.gin_rummy_dqn.py
Author: William Hale
Date created: 2/12/2020
An example of learning a Deep-Q Agent on GinRummy
'''
import tensorflow as tf
import os
import rlcard
from rlcard.agents.dqn_agent import DQNAgent
from rlcard.agents.random_agent import RandomAgent
from rlcard.utils.utils import set_global_seed, tournament
from rlcard.utils.logger import Logger
# Make environment
env = rlcard.make('gin-rummy')
eval_env = rlcard.make('gin-rummy')
env.game.settings.print_settings()
# Set the iterations numbers and how frequently we evaluate/save plot
evaluate_every = 100
evaluate_num = 100 # mahjong_dqn has 1000
episode_num = 1000 # mahjong_dqn has 100000
# The initial memory size
memory_init_size = 1000
# Train the agent every X steps
train_every = 1
# The paths for saving the logs and learning curves
log_dir = './experiments/gin_rummy_dqn_result/'
# Set a global seed
set_global_seed(0)
with tf.Session() as sess:
# Set agents
global_step = tf.Variable(0, name='global_step', trainable=False)
agent = DQNAgent(sess,
scope='dqn',
action_num=env.action_num,
replay_memory_size=20000,
replay_memory_init_size=memory_init_size,
train_every=train_every,
state_shape=env.state_shape,
mlp_layers=[512, 512])
random_agent = RandomAgent(action_num=eval_env.action_num)
sess.run(tf.global_variables_initializer())
env.set_agents([agent, random_agent])
eval_env.set_agents([agent, random_agent])
# Init a Logger to plot the learning curve
logger = Logger(log_dir)
for episode in range(episode_num):
# Generate data from the environment
trajectories, _ = env.run(is_training=True)
# Feed transitions into agent memory, and train the agent
for ts in trajectories[0]:
agent.feed(ts)
# extra logging
if episode % evaluate_every == 0:
reward = 0
reward2 = 0
for eval_episode in range(evaluate_num):
_, payoffs = eval_env.run(is_training=False)
reward += payoffs[0]
reward2 += payoffs[1]
logger.log("\n\n########## Evaluation {} ##########".format(episode))
reward_text = "{}".format(float(reward)/evaluate_num)
reward2_text = "{}".format(float(reward2)/evaluate_num)
info = "Timestep: {} Average reward is {}, reward2 is {}".format(env.timestep, reward_text, reward2_text)
logger.log(info)
# Evaluate the performance. Play with random agents.
if episode % evaluate_every == 0:
logger.log_performance(env.timestep, tournament(eval_env, evaluate_num)[0])
# Close files in the logger
logger.close_files()
# Plot the learning curve
logger.plot('DQN')
# Save model
save_dir = 'models/gin_rummy_dqn'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
saver = tf.train.Saver()
saver.save(sess, os.path.join(save_dir, 'model'))
| 30.872549 | 117 | 0.651 |
acec305270eeea263e05d6cabf3324c884a42dd0 | 945 | py | Python | hash_retail/database.py | rfayan/hash_retail | 280f0396fc2bf83099a7ba9a8952a0ddc77822b0 | [
"MIT"
] | null | null | null | hash_retail/database.py | rfayan/hash_retail | 280f0396fc2bf83099a7ba9a8952a0ddc77822b0 | [
"MIT"
] | null | null | null | hash_retail/database.py | rfayan/hash_retail | 280f0396fc2bf83099a7ba9a8952a0ddc77822b0 | [
"MIT"
] | null | null | null | from os import environ
from typing import Iterator
from urllib.parse import quote
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import Session, sessionmaker
PG_USER = environ["DATABASE_USERNAME"]
PG_PASS = quote(environ["DATABASE_PASSWORD"])
PG_HOST = environ["DATABASE_HOST"]
PG_PORT = environ["DATABASE_PORT"]
PG_DB = environ["DATABASE_NAME"]
SQLALCHEMY_DATABASE_URL = f"postgresql://{PG_USER}:{PG_PASS}@{PG_HOST}:{PG_PORT}/{PG_DB}"
engine = create_engine(SQLALCHEMY_DATABASE_URL)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
# Verify if all tables exist. If they do not, create them
def verify_and_create_db_tables() -> None:
Base.metadata.create_all(bind=engine)
def get_db() -> Iterator[Session]:
db_session = SessionLocal()
try:
yield db_session
finally:
db_session.close()
| 27.794118 | 89 | 0.769312 |
acec3090e1236b52e7b03762da08a71c0bfe74e1 | 2,511 | py | Python | api/predict.py | xuhdev/MAX-Audio-Classifier | 26f3905a1699bac3500cf9a3be50c9cd68bc40e1 | [
"Apache-2.0"
] | null | null | null | api/predict.py | xuhdev/MAX-Audio-Classifier | 26f3905a1699bac3500cf9a3be50c9cd68bc40e1 | [
"Apache-2.0"
] | null | null | null | api/predict.py | xuhdev/MAX-Audio-Classifier | 26f3905a1699bac3500cf9a3be50c9cd68bc40e1 | [
"Apache-2.0"
] | null | null | null | from core.model import ModelWrapper
from flask_restplus import fields
from werkzeug.datastructures import FileStorage
from werkzeug.exceptions import BadRequest
from maxfw.core import MAX_API, PredictAPI
import os
# set up parser for audio input data
input_parser = MAX_API.parser()
input_parser.add_argument('audio', type=FileStorage, location='files', required=True,
help="signed 16-bit PCM WAV audio file")
input_parser.add_argument('start_time', type=float, default=0,
help='The number of seconds into the audio file the prediction should start at.')
label_prediction = MAX_API.model('LabelPrediction', {
'label_id': fields.String(required=False, description='Label identifier'),
'label': fields.String(required=True, description='Audio class label'),
'probability': fields.Float(required=True)
})
predict_response = MAX_API.model('ModelPredictResponse', {
'status': fields.String(required=True, description='Response status message'),
'predictions': fields.List(fields.Nested(label_prediction), description='Predicted audio classes and probabilities')
})
class ModelPredictAPI(PredictAPI):
model_wrapper = ModelWrapper()
@MAX_API.doc('predict')
@MAX_API.expect(input_parser)
@MAX_API.marshal_with(predict_response)
def post(self):
"""Predict audio classes from input data"""
result = {'status': 'error'}
args = input_parser.parse_args()
audio_data = args['audio'].read()
# clean up from earlier runs
if os.path.exists("/audio.wav"):
os.remove("/audio.wav")
if '.wav' in str(args['audio']):
file = open("/audio.wav", "wb")
file.write(audio_data)
file.close()
else:
e = BadRequest()
e.data = {'status': 'error', 'message': 'Invalid file type/extension'}
raise e
# Getting the predictions
try:
preds = self.model_wrapper.predict("/audio.wav", args['start_time'])
except ValueError:
e = BadRequest()
e.data = {'status': 'error', 'message': 'Invalid start time: value outside audio clip'}
raise e
# Aligning the predictions to the required API format
label_preds = [{'label_id': p[0], 'label': p[1], 'probability': p[2]} for p in preds]
result['predictions'] = label_preds
result['status'] = 'ok'
os.remove("/audio.wav")
return result
| 34.875 | 120 | 0.644763 |
acec30afe74aca51170de9fa754f3cf87b88350c | 7,285 | py | Python | PyFlow/Packages/PyFlowBase/FunctionLibraries/MathAbstractLib.py | liaokongVFX/PyFlow | 337462746acf087432f4dd3248e3a1349c3a3c79 | [
"Apache-2.0"
] | null | null | null | PyFlow/Packages/PyFlowBase/FunctionLibraries/MathAbstractLib.py | liaokongVFX/PyFlow | 337462746acf087432f4dd3248e3a1349c3a3c79 | [
"Apache-2.0"
] | null | null | null | PyFlow/Packages/PyFlowBase/FunctionLibraries/MathAbstractLib.py | liaokongVFX/PyFlow | 337462746acf087432f4dd3248e3a1349c3a3c79 | [
"Apache-2.0"
] | 1 | 2019-08-21T07:36:20.000Z | 2019-08-21T07:36:20.000Z | ## Copyright 2015-2019 Ilgar Lunin, Pedro Cabrera
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from PyFlow.Core import(
FunctionLibraryBase,
IMPLEMENT_NODE
)
from PyFlow.Core.Common import *
class MathAbstractLib(FunctionLibraryBase):
"""doc string for MathAbstractLib"""
def __init__(self, packageName):
super(MathAbstractLib, self).__init__(packageName)
@staticmethod
@IMPLEMENT_NODE(returns=("BoolPin", False), meta={'Category': 'Math|Basic', 'Keywords': ["=", "operator"]})
## Is a equal b
def isEqual(a=("AnyPin", None, {"constraint": "1"}),
b=("AnyPin", None, {"constraint": "1"})):
"""Is a equal b."""
return a == b
@staticmethod
@IMPLEMENT_NODE(returns=("BoolPin", False), meta={'Category': 'Math|Basic', 'Keywords': [">", "operator"]})
def isGreater(a=("AnyPin", None, {"constraint": "1"}),
b=("AnyPin", None, {"constraint": "1"}),
result=("Reference", ("BoolPin", False))):
"""Operator **>**."""
return a > b
@staticmethod
@IMPLEMENT_NODE(returns=("BoolPin", False), meta={'Category': 'Math|Basic', 'Keywords': [">", "operator"]})
def isGreaterOrEqual(a=("AnyPin", None, {"constraint": "1"}),
b=("AnyPin", None, {"constraint": "1"}),
result=("Reference", ("BoolPin", False))):
"""Operator **>=**."""
return a >= b
@staticmethod
@IMPLEMENT_NODE(returns=("BoolPin", False), meta={'Category': 'Math|Basic', 'Keywords': ["<", "operator"]})
def isLess(a=("AnyPin", None, {"constraint": "1"}), b=("AnyPin", None, {"constraint": "1"}),
result=("Reference", ("BoolPin", False))):
"""Operator **<**."""
return a < b
@staticmethod
@IMPLEMENT_NODE(returns=("BoolPin", False), meta={'Category': 'Math|Basic', 'Keywords': ["<", "operator"]})
def isLessOrEqual(a=("AnyPin", None, {"constraint": "1"}), b=("AnyPin", None, {"constraint": "1"})):
"""Operator **<=**."""
return a <= b
@staticmethod
@IMPLEMENT_NODE(returns=(("AnyPin", None, {"constraint": "1"})), meta={'Category': 'Math|Basic', 'Keywords': ['+', 'append', "sum", "operator"]})
def add(a=("AnyPin", None, {"constraint": "1"}), b=("AnyPin", None, {"constraint": "1"})):
"""Operator **+**."""
return a + b
@staticmethod
@IMPLEMENT_NODE(returns=(("AnyPin", None, {"constraint": "1"})), meta={'Category': 'Math|Basic', 'Keywords': ['-', "operator", "minus"]})
def subtract(a=("AnyPin", None, {"constraint": "1"}), b=("AnyPin", None, {"constraint": "1"})):
"""Operator **-**."""
return a - b
@staticmethod
@IMPLEMENT_NODE(returns=("AnyPin", None, {"constraint": "1"}), meta={'Category': 'Math|Basic', 'Keywords': ['/', "divide", "operator"]})
def divide(a=("AnyPin", None, {"constraint": "1"}), b=("AnyPin", None, {"constraint": "1"})):
"""Operator **/**."""
return a / b
@staticmethod
@IMPLEMENT_NODE(returns=(("AnyPin", None, {"constraint": "1"})), meta={'Category': 'Math|Basic', 'Keywords': ['*', "multiply", "operator"]})
def multiply(a=("AnyPin", None, {"constraint": "1"}), b=("AnyPin", None, {"constraint": "1"})):
"""Operator *****."""
return a * b
@staticmethod
@IMPLEMENT_NODE(returns=("BoolPin", False), meta={'Category': 'Math|Basic', 'Keywords': ["in", "range"]})
def inRange(Value=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]}),
RangeMin=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]}),
RangeMax=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]}),
InclusiveMin=("BoolPin", False),
InclusiveMax=("BoolPin", False)):
"""Returns true if value is between Min and Max (V >= Min && V <= Max) If InclusiveMin is true, value needs to be equal or larger than Min,\
else it needs to be larger If InclusiveMax is true, value needs to be smaller or equal than Max, else it needs to be smaller
"""
return ((Value >= RangeMin) if InclusiveMin else (Value > RangeMin)) and ((Value <= RangeMax) if InclusiveMax else (Value < RangeMax))
@staticmethod
@IMPLEMENT_NODE(returns=('FloatPin', 0.0), meta={'Category': 'Math|Basic', 'Keywords': []})
def mapRangeClamped(Value=("FloatPin", 0.0),
InRangeA=("FloatPin", 0.0),
InRangeB=("FloatPin", 0.0),
OutRangeA=("FloatPin", 0.0),
OutRangeB=("FloatPin", 0.0)):
"""Returns Value mapped from one range into another where the Value is clamped to the Input Range.\
(e.g. 0.5 normalized from the range 0->1 to 0->50 would result in 25)"""
ClampedPct = clamp(GetRangePct(InRangeA, InRangeB, Value), 0.0, 1.0)
return lerp(OutRangeA, OutRangeB, ClampedPct)
@staticmethod
@IMPLEMENT_NODE(returns=('FloatPin', 0.0), meta={'Category': 'Math|Basic', 'Keywords': []})
def mapRangeUnclamped(Value=("FloatPin", 0.0),
InRangeA=("FloatPin", 0.0),
InRangeB=("FloatPin", 0.0),
OutRangeA=("FloatPin", 0.0),
OutRangeB=("FloatPin", 0.0)):
"""Returns Value mapped from one range into another where the Value is clamped to the Input Range.\
(e.g. 0.5 normalized from the range 0->1 to 0->50 would result in 25)"""
return lerp(OutRangeA, OutRangeB, GetRangePct(InRangeA, InRangeB, Value))
@staticmethod
@IMPLEMENT_NODE(returns=("FloatPin", None), meta={'Category': 'Math|Basic', 'Keywords': ['clamp']})
def clamp(i=("FloatPin", None),
imin=("FloatPin", 0.0),
imax=("FloatPin", 0)):
"""Clamp."""
return clamp(i, imin, imax)
@staticmethod
@IMPLEMENT_NODE(returns=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]}), meta={'Category': 'Math|Basic', 'Keywords': ["operator"]})
def modulo(a=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]}),
b=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]})):
"""Modulo (A % B)."""
return a % b
@staticmethod
@IMPLEMENT_NODE(returns=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]}), meta={'Category': 'Math|Basic', 'Keywords': []})
def abs(inp=("AnyPin", None, {"constraint": "1", "supportedDataTypes": ["FloatPin", "IntPin"]})):
"""Return the absolute value of a number."""
return abs(inp)
| 50.590278 | 171 | 0.577351 |
acec30bfe3dd1ac900fc19ca04ab2bf34cc4cf6c | 15,791 | py | Python | src/EC_MS/Datapoints.py | ScottSoren/EC_MS | f7c12f9f2180950ca929d6da9ca2767479a2e106 | [
"MIT"
] | 8 | 2018-06-15T06:01:18.000Z | 2022-01-27T11:46:38.000Z | src/EC_MS/Datapoints.py | ScottSoren/EC_MS | f7c12f9f2180950ca929d6da9ca2767479a2e106 | [
"MIT"
] | 4 | 2020-02-25T18:32:21.000Z | 2020-05-20T15:16:54.000Z | src/EC_MS/Datapoints.py | ScottSoren/EC_MS | f7c12f9f2180950ca929d6da9ca2767479a2e106 | [
"MIT"
] | 4 | 2019-07-02T12:57:41.000Z | 2022-01-13T09:30:27.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 28 16:13:26 2017
To have all the tools used for the Errorbars script in versitile,
easily accessible form.
@author: scott
"""
from matplotlib import pyplot as plt
import numpy as np
# one could argue that the following two scripts should be in this module, but instead:
# get_datapoints is in Integrate_Signals.py
# plot_datapoints is in Plottying.py
# import sys
# sys.exit()
def fill_with(quantitydict, value):
"""
generates a (multilayer) dictionary with the same keys as an input
dictionary, but values replaced by value
"""
emptydict = {}
for (key, val) in quantitydict.items():
# print(str(key) + ' ' + str(value))
if type(val) is dict:
emptydict[key] = get_empty(val)
else:
if type(value) in [list, dict]:
value = value.copy() # otherwise they get linked...
emptydict[key] = value
return emptydict
def get_empty(quantitydict):
"""
generates a (multilayer) dictionary with the same keys as an input
dictionary, but values replaced by empty lists
"""
return fill_with(quantitydict, value=[])
def add_datapoint(source, target, index=None, add_key=True):
"""
adds the values in a source dictionary to
"""
# print(str(source))
for key, value in source.items():
if type(value) is dict:
if key not in target.keys() and add_key:
target[key] = {}
add_datapoint(value, target[key], index, add_key=add_key)
continue
if index is None:
v = value
else:
# print(f'key={key}, value={value}, index={index}') # debugging
try:
v = value[index]
except IndexError:
v = value
if key in target.keys():
# print('key in target.keys()') # debugging
# print(f'target={target}') # debugging
if type(target[key]) is np.ndarray:
target[key] = np.append(target[key], v)
elif hasattr(v, "__iter__"):
target[key] += v
else:
target[key] += [v]
# print('adding ' + str(value[index]) + ' to ' + str(key))
elif add_key:
# print('adding key') # debugging
if hasattr(v, "__iter__"):
target[key] = v.copy() # this .copy() is important
else:
target[key] = [v]
def datapoints_to_values(
datapoints, X="all", X_str="V", rnd=2, avoid="blank", verbose=True
):
"""
Reorganizes the datapoints dictionary, such that
the value indicated by X_str is the outer organizational level. A list of
desired X_str to include in values can be input as X. Numerical values
are considered equal if equal to rnd decimals.
The original outermost organizational level (i.e., sample) is lost.
"""
if verbose:
print("\n\nfunction 'datapoints_to_values\ at your service!\n")
if type(avoid) is str:
avoid = [avoid]
empty = get_empty(list(datapoints.values())[0])
values = {}
for (name, data) in datapoints.items():
if type(name) == type(avoid) and len([a for a in avoid if a in name]) > 0:
continue
if verbose:
print("adding {} to values based on ".format(name) + X_str)
try:
x_vec = data[X_str]
x_vec[0]
except IndexError:
x_vec = [data[X_str]]
for i, x in enumerate(x_vec):
if rnd is None:
x_round = x
else:
try:
x_round = float(np.round(x, rnd))
except TypeError: # if it's not a numerical value, just move on.
x_round = x
print(X) # debugging
if X == "all":
if x_round not in values:
values[x_round] = get_empty(empty)
elif x_round not in X:
print(str(x_round) + " not in potentials")
continue
add_datapoint(source=data, target=values[x_round], index=i)
if verbose:
print("\nfunction 'points_to_values' finished!\n\n")
return values
def datapoints_to_datalist(datapoints, avoid=[], verbose=True):
"""Removes the outer layer of the datapoints dictionary, i.e.
sample. The second organizational level (i.e., molecule) becomes the
outermost level. Lists and arrays are appended.
In other words, it just lumps all samples together.
"""
if verbose:
print("\n\nfunction 'datapoints_to_datalists' at your service!\n")
if type(avoid) is str:
avoid = [avoid]
datalists = {}
for name, point in datapoints.items():
if len([a for a in avoid if a in name]) > 0:
print("skipping " + name)
continue
if verbose:
print("working on " + name)
add_datapoint(point, datalists, add_key=True) # should be just that simple
if verbose:
print("\nfunction 'datapoints_to_datalists' finished!\n\n")
return datalists
def values_to_stats(values, logmean=False):
"""
replaces all numerical arrays or lists in the values of a (multilayer)
dictionary with the two-element list: [mean, standard_devation]
"""
# print('\nfunction values_to_stats in Datapoints.py has been called.')
stats = {}
for key, value in values.items():
# print(key)
if type(value) is dict:
stats[key] = values_to_stats(value, logmean=logmean)
# remember to feed arguments inwards in recursive functions!
elif type(value) is list or type(value) is np.ndarray:
if logmean:
# print('logmean is True')
mean = np.exp(np.mean(np.log(value)))
std = np.exp(np.log(mean) + np.std(np.log(value))) - mean
else:
mean = np.mean(value)
# std = 0
std = np.std(value)
stats[key] = [mean, std]
return stats
def get_mlu(stat, logmean=False): # mlu stands for for: mean, [lower, upper]
try:
if len(stat) < 2:
return stat, None
except TypeError:
# print("function 'get_mlu' says: stat must be iterable.") # too verbose
return stat, None
if stat[1] == 0:
return stat[0], None
elif len(stat) == 2:
mean = stat[0]
std = stat[1]
if logmean:
log_mean = np.log(mean)
log_std = np.log((std + mean) / mean)
upper = np.exp(log_mean + log_std)
lower = np.exp(log_mean - log_std)
# print('logmean is True')
else:
upper = mean + std
lower = mean - std
elif len(stat) == 3:
lower = stat[0]
mean = stat[1]
upper = stat[2]
else:
print("need stats of length 2 or 3 for errorbars")
raise ValueError
return mean, [lower, upper]
def plot_errorbar(
xstat,
ystat,
# ax=plt.gca(),
# This was generating the blank figure!!!
# Don't put plt.gca() in a function default!
ax="current", # do it the normal way instead :)
logmean=False,
marker=".",
color="k",
markersize=None,
xfactor=1,
yfactor=1,
specs={},
linespecs={},
**kwargs,
):
specs.update(kwargs) # so that kwargs get fed to plot
if ax == "current":
ax = plt.gca()
elif ax == "new":
ax = plt.figure().add_subplot(111)
x, x_lu = get_mlu(xstat, logmean)
y, y_lu = get_mlu(ystat, logmean)
# print("x_lu={}, y_lu={}".format(x_lu, y_lu)) # debugging
if marker is None and "marker" in specs:
marker = specs.pop("marker")
elif x_lu is None and y_lu is None:
# marker = '.'
specs = {}
if markersize is None:
if marker == ".":
markersize = 10
else:
markersize = 5
# print(f'x={x}, y={y}') # debugging
# print(f'marker = {marker}, specs={specs}') # debugging
ax.plot(
x * xfactor,
y * yfactor,
marker=marker,
markersize=markersize,
color=color,
**specs,
)
if x_lu is not None:
ax.plot(
[x_lu[0], x_lu[1]],
[y * yfactor, y * yfactor],
"|-",
color=color,
**linespecs,
)
if y_lu is not None:
ax.plot(
[x, x],
[y_lu[0] * yfactor, y_lu[1] * yfactor],
"_-",
color=color,
**linespecs,
)
def plot_errorbars_y(stats, colors=None, ax="new", marker=None, factor=1, **kwargs):
if ax == "new":
fig, ax = plt.subplots()
for x, stat in stats.items():
if colors is None:
plot_errorbar(
ax=ax, xstat=x, ystat=stat, marker=marker, yfactor=factor, **kwargs,
)
else:
for key, color in colors.items():
ystat = stat[key]
plot_errorbar(
ax=ax,
xstat=x,
ystat=ystat,
color=color,
marker=marker,
yfactor=factor,
**kwargs,
)
return ax
def plot_errorbars_y_old(
stats,
x="outer",
ax="new",
label="",
logmean=False,
Xrange=None,
verbose=True,
outercall=True,
color="k",
colors=None,
specs=None,
factor=1,
):
if verbose and outercall:
print("\n\nfunction 'plot_errorbars_y' at your service!\n")
if ax == "new":
fig1 = plt.figure()
ax = fig1.add_subplot(111)
# print(type(stats))
if type(stats) is not dict:
if Xrange is None or Xrange[0] <= x <= Xrange[1]:
plot_errorbar(
x, stats, ax=ax, color=colors, logmean=logmean, yfactor=factor
)
# print('I should have just plotted something.')
return ax
# oh, shit, how do I reconcile the following with my desire to use specs{}
# instead of just a color for plotting functions? I just won't for now.
if colors is None:
colors = color
if x not in ["outer", "inner"] and type(colors) is not dict:
colors = fill_with(stats, color)
if x not in ["outer", "inner"] and type(Xrange) is not dict:
colors = fill_with(stats, Xrange)
for key, val in stats.items():
if verbose:
print("working on " + label + str(key))
if x == "outer":
x_val = key
color_val = colors
Xrange_val = Xrange
elif x == "inner":
print("errorbars: x='inner' not yet implemented.")
pass
else:
x_val = x
try:
color_val = colors[key]
except KeyError:
if verbose:
print("skipping " + key)
continue
if Xrange is None:
Xrange_val = None # 17H14
else:
Xrange_val = Xrange[key]
plot_errorbars_y(
val,
x=x_val,
ax=ax,
colors=color_val,
Xrange=Xrange_val,
label=label + str(key) + "_",
outercall=False,
logmean=logmean,
factor=factor,
specs=specs,
)
if verbose and outercall:
print("\nfunction 'plot_errorbars_y' finished!\n\n")
return ax
def get_from_key(item, key, reduced_key=None, delimiter="."):
"""
nice little tool to aid in flexibility when dealing with multilayer dicts.
"""
if type(item) is not dict:
return item
try:
return item[key]
except KeyError:
if reduced_key is None:
reduced_key = key.split(delimiter)[0]
return item[reduced_key]
def plot_datalist_fit(
datalist,
colors,
X_str="V",
Xrange="all",
keys=None,
txt=None,
ax="new",
specs={},
results={},
X=None,
logy=False,
logx=False,
label="",
verbose=True,
outercall=True,
):
"""
Some parts of this function, particularly the writing and plotting bit,
are just for tafel. Otherwise its as general as possible, to an extent
that may be a bit ridiculous...
"""
if verbose and outercall:
print("\n\nfunction 'plot_datalist_fit' at your service!\n")
if type(datalist) is not dict:
print("could't find data for " + label)
return
if ax == "new":
ax = plt.figure().add_subplot(111)
if type(txt) is str:
txt = open(txt, "w")
if keys is None:
if type(Xrange) is dict:
keys = (
Xrange.keys()
) # for multiple vspans for a given quantity, just put a '.' in in the key
elif type(colors) is dict:
keys = colors.keys()
elif type(datalist) is dict:
keys = datalist.keys()
if X_str in datalist.keys():
X = datalist[X_str]
for key in keys:
if key == X_str:
continue
if verbose:
print("working on: " + label + key)
xspan = get_from_key(
Xrange, key
) # so I'm flexible in how deep I define vspan, color, and data.
color = get_from_key(colors, key)
data = get_from_key(datalist, key)
# print(xspan)
if type(color) is dict or type(xspan) is dict or X is None:
results[key] = {}
plot_datalist_fit(
data,
colors=color,
X_str=X_str,
Xrange=xspan,
txt=txt,
ax=ax,
specs=specs,
X=X,
logx=logx,
logy=logy,
label=key + "_",
results=results[key],
verbose=verbose,
outercall=False,
)
continue
y = np.array(data)
x = np.array(X)
if not xspan == "all":
try:
I_keep = [
I for (I, x_I) in enumerate(x) if x_I > xspan[0] and x_I < xspan[1]
]
x = x[I_keep]
y = y[I_keep]
except:
print(xspan)
print(x)
raise
# print('couldn\'t cut x and y')
# print('len(x) = ' + str(len(x)))
# print('xspan = ' + str(xspan)))
if logy:
y = np.log(y)
if logx:
x = np.log(x)
p1 = np.polyfit(x, y, deg=1)
a = p1[0] # slope
b = p1[1] # intercept
if logy:
ts = np.log(10) / a # tafel slope
if txt is not None:
txt.write("---\n" + label + key + " on interval " + str(xspan) + "\n")
txt.write(
"ln("
+ label
+ key
+ "/[nmol]) = "
+ str(b)
+ " + "
+ str(a)
+ " * (V vs RHE / [V])\n"
)
if logy:
txt.write("\ttafel slope = " + str(ts * 1e3) + " mV/decade\n")
if ax is not None:
x_fit = np.array(xspan)
y_fit = b + a * x_fit
if logy:
y_fit = np.exp(y_fit)
ax.plot(x_fit, y_fit, color=color, label=label + key, **specs)
results[key] = p1
if outercall and txt is not None:
txt.close()
if verbose and outercall:
print("\nfunction 'plot_datalist_fit' finished!\n\n")
return results, ax
| 29.405959 | 87 | 0.515737 |
acec31f3544a3ff250d0e7ad89aa2a2cddcbedfb | 717 | py | Python | split.py | nigelab/sklearn-test | baa6a7a01dcd669697d6375c9351f62801c266fc | [
"MIT"
] | null | null | null | split.py | nigelab/sklearn-test | baa6a7a01dcd669697d6375c9351f62801c266fc | [
"MIT"
] | null | null | null | split.py | nigelab/sklearn-test | baa6a7a01dcd669697d6375c9351f62801c266fc | [
"MIT"
] | null | null | null | from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
def demo_load_iris():
"""
sklearn datasets usage demo
:return:
"""
# load iris datasets
iris = load_iris()
# return a Bunch
print("\nload iris:\n", iris)
print("\nload iris desc:\n", iris.DESCR)
print("\nload iris feature names:\n", iris.feature_names)
print("\nload iris data:\n", iris['data'], iris.data.shape)
# datasets split
x_train, x_test, y_train, y_test = train_test_split(
iris.data, iris.target, test_size=0.2, random_state=22)
print("train features: \n", x_train, x_train.shape)
return None
if __name__ == "__main__":
demo_load_iris()
| 25.607143 | 63 | 0.668061 |
acec3293ab1c2f799a8b27454a151fc4f4496ac3 | 824 | py | Python | tutorial/mysite/urls.py | ogurechik/Django_docs_examples | 6b17a2eabb1f562db3858d7f63be33940d37882b | [
"MIT"
] | null | null | null | tutorial/mysite/urls.py | ogurechik/Django_docs_examples | 6b17a2eabb1f562db3858d7f63be33940d37882b | [
"MIT"
] | null | null | null | tutorial/mysite/urls.py | ogurechik/Django_docs_examples | 6b17a2eabb1f562db3858d7f63be33940d37882b | [
"MIT"
] | null | null | null | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.urls import include
urlpatterns = [
path('polls/', include('polls.urls')),
path('admin/', admin.site.urls),
]
| 32.96 | 77 | 0.706311 |
acec329d5f82b4cd2795011bbbfb5e50fae989cf | 4,172 | py | Python | src/test/green_view_join_v1_test.py | DigitalGeographyLab/hope-graph-builder | cc298c143162e4bd3bf3fa15480e622de09b5f1b | [
"MIT"
] | null | null | null | src/test/green_view_join_v1_test.py | DigitalGeographyLab/hope-graph-builder | cc298c143162e4bd3bf3fa15480e622de09b5f1b | [
"MIT"
] | 2 | 2020-08-05T10:29:39.000Z | 2021-02-09T07:58:40.000Z | src/test/green_view_join_v1_test.py | DigitalGeographyLab/hope-graph-tools | cc298c143162e4bd3bf3fa15480e622de09b5f1b | [
"MIT"
] | null | null | null | import sys
sys.path.append('..')
sys.path.append('../green_view_join_v1')
from typing import Dict, List
import pytest
import pandas as pd
from shapely.geometry import LineString
from geopandas import GeoDataFrame
from igraph import Graph
from common.logger import Logger
import common.igraph as ig_utils
from common.igraph import Edge as E
from green_view_join_v1.green_view_join_v1 import (
get_gsv_gvi_list_by_way_id, load_gsv_gvi_gdf,
get_mean_edge_gsv_gvi, get_mean_gsv_gvi_by_way_id,
update_gvi_attributes_to_graph)
log = Logger()
@pytest.fixture
def graph() -> Graph:
g = ig_utils.read_graphml(r'data/test_graph.graphml')
g.es[E.id_way.value] = list(g.es[E.id_ig.value])
yield g
@pytest.fixture
def edge_gdf(graph) -> GeoDataFrame:
yield ig_utils.get_edge_gdf(graph, attrs=[E.id_way, E.length])
@pytest.fixture
def gsv_gvi_gdf() -> GeoDataFrame:
yield load_gsv_gvi_gdf(r'data/greenery_points.gpkg')
@pytest.fixture
def gvi_list_by_way_id(edge_gdf, gsv_gvi_gdf) -> Dict[int, List[float]]:
yield get_gsv_gvi_list_by_way_id(log, edge_gdf, gsv_gvi_gdf)
@pytest.fixture
def mean_gsv_gvi_by_way_id(gvi_list_by_way_id, edge_gdf) -> Dict[int, List[float]]:
yield get_mean_gsv_gvi_by_way_id(log, gvi_list_by_way_id, edge_gdf)
@pytest.fixture
def low_veg_share_by_way_id() -> Dict[int, List[float]]:
df = pd.read_csv(r'data/edge_subset_low_veg_shares.csv')
way_ids = list(df['id_way'])
low_veg_shares = list(df['low_veg_share'])
yield dict(zip(way_ids, low_veg_shares))
@pytest.fixture
def high_veg_share_by_way_id() -> Dict[int, List[float]]:
df = pd.read_csv(r'data/edge_subset_high_veg_shares.csv')
way_ids = list(df['id_way'])
high_veg_shares = list(df['high_veg_share'])
yield dict(zip(way_ids, high_veg_shares))
def test_get_gsv_gvi_list_by_edges(gvi_list_by_way_id):
assert isinstance(gvi_list_by_way_id, dict)
assert len(gvi_list_by_way_id) == 1808
for way_id, gvi_list in gvi_list_by_way_id.items():
assert isinstance(way_id, int)
assert isinstance(gvi_list, list)
assert len(gvi_list) > 0
for gvi in gvi_list:
assert isinstance(gvi, float)
def test_calculate_mean_edge_gsv_gvi():
gvi_list = [0.5, 1, 0]
m_gvi = get_mean_edge_gsv_gvi(10, gvi_list)
assert m_gvi == 0.5
m_gvi = get_mean_edge_gsv_gvi(5, gvi_list)
assert m_gvi == 0.5
m_gvi = get_mean_edge_gsv_gvi(40, gvi_list)
assert m_gvi == 0.5
m_gvi = get_mean_edge_gsv_gvi(70, gvi_list)
assert m_gvi == 0.5
m_gvi = get_mean_edge_gsv_gvi(80, gvi_list)
assert m_gvi is None
def test_mean_get_mean_gsv_gvi_by_way_id(mean_gsv_gvi_by_way_id):
for way_id, mean_gsv_gvi in mean_gsv_gvi_by_way_id.items():
assert isinstance(way_id, int)
assert isinstance(mean_gsv_gvi, float)
assert len(mean_gsv_gvi_by_way_id) == 1718
def test_join_gvi_attributes_to_graph(
graph,
mean_gsv_gvi_by_way_id,
low_veg_share_by_way_id,
high_veg_share_by_way_id
):
updated = update_gvi_attributes_to_graph(
graph,
mean_gsv_gvi_by_way_id,
low_veg_share_by_way_id,
high_veg_share_by_way_id
)
for e in updated.es:
attrs = e.attributes()
expected_type = float if isinstance(attrs[E.geometry.value], LineString) else type(None)
assert isinstance(attrs[E.gvi_gsv.value], (float, type(None)))
assert isinstance(attrs[E.gvi_low_veg_share.value], expected_type)
assert isinstance(attrs[E.gvi_high_veg_share.value], expected_type)
assert isinstance(attrs[E.gvi_comb_gsv_veg.value], expected_type)
assert isinstance(attrs[E.gvi_comb_gsv_high_veg.value], expected_type)
gvi_comb_gsv_veg = [gvi for gvi in list(updated.es[E.gvi_comb_gsv_veg.value]) if gvi]
gvi_comb_gsv_high_veg = [gvi for gvi in list(updated.es[E.gvi_comb_gsv_high_veg.value]) if gvi]
assert len(gvi_comb_gsv_veg) == 3456
assert len(gvi_comb_gsv_high_veg) == 3240
assert max(gvi_comb_gsv_veg) == 0.9
assert max(gvi_comb_gsv_high_veg) == 0.85
assert min(gvi_comb_gsv_veg) == 0.01
assert min(gvi_comb_gsv_high_veg) == 0.01
| 34.479339 | 99 | 0.736817 |
acec331fc965ced3f73f9740049fe4a0b85ff4b9 | 4,178 | py | Python | data_structures_and_algorithms/MyString.py | raymondhfeng/raymondhfeng.github.io | d39348ca68e8f89a72bcee5f2fba1b3d0c3e42bb | [
"MIT"
] | null | null | null | data_structures_and_algorithms/MyString.py | raymondhfeng/raymondhfeng.github.io | d39348ca68e8f89a72bcee5f2fba1b3d0c3e42bb | [
"MIT"
] | null | null | null | data_structures_and_algorithms/MyString.py | raymondhfeng/raymondhfeng.github.io | d39348ca68e8f89a72bcee5f2fba1b3d0c3e42bb | [
"MIT"
] | null | null | null | class String:
def __init__(self, initial):
self.str = list(initial)
def unique(self): # returns true if all of the elements are unique.
seen = set()
for elem in self.str:
if elem in seen:
return False
else:
seen.add(elem)
return True
def unique_no_data_structures(self): # returns true if all of the elements are unique. doesn't use extra data structures
for i in range(len(self.str)):
for j in range(i+1,len(self.str)):
if self.str[i] == self.str[j]:
return False
return True
def reverse(self):
temp = None
for i in range(len(self.str)//2):
temp = self.str[i]
rightIndex = len(self.str) - i - 1
self.str[i] = self.str[rightIndex]
self.str[rightIndex] = temp
def __str__(self):
return "".join(self.str)
def replace_spaces(self): # replaces all the ' ' with '%20'
count = 0
for elem in self.str:
if elem == ' ':
count += 1
newLength = len(self.str) + 2*count
result = [0]*newLength
position = newLength - 1
for i in range(len(self.str)):
if self.str[len(self.str) - i - 1] == ' ':
result[position] = '0'
result[position - 1] = '2'
result[position - 2] = '%'
position -= 3
else:
result[position] = self.str[len(self.str) - i - 1]
position -= 1
self.str = result
def is_permutation_of(self, other):
char_count1 = {}
char_count2 = {}
if len(self.str) != len(other):
return False
for i in range(len(self.str)):
if self.str[i] in char_count1:
char_count1[self.str[i]] += 1
else:
char_count1[self.str[i]] = 1
if other[i] in char_count2:
char_count2[other[i]] += 1
else:
char_count2[other[i]] = 1
for key in char_count1:
if key not in char_count2:
return False
elif char_count1[key] != char_count2[key]:
return False
return True
def compress(self): # aabcccccaaa would become a2b1c5a3
count = 0
currChar = None
for elem in self.str:
if elem != currChar:
currChar = elem
count += 2
if count >= len(self.str):
return
else:
result = [0] * count
rIndex = 0
character = self.str[0]
counter = 0
for i in range(len(self.str)):
if self.str[i] == character:
counter += 1
else:
result[rIndex] = character
result[rIndex+1] = str(counter)
counter = 1
rIndex += 2
character = self.str[i]
result[rIndex] = character
result[rIndex+1] = str(counter)
print(result)
self.str = result
def isSubstring(self, other): #returns true of this is a substring of other
thisBuf = self.stringBuf()
otherBuf = other.stringBuf()
if len(thisBuf) > len(otherBuf):
return False
else:
for i in range(len(otherBuf) - len(thisBuf) + 1):
if otherBuf[i:i+len(thisBuf)] == thisBuf:
return True
return False
def isRotation(self, other): #checks to see if "other" is a rotation of this string
otherCopy1 = other.strCopy()
otherCopy2 = other.strCopy()
otherCopy1.concatEnd(otherCopy2)
concatenated = otherCopy1
print(concatenated)
return self.isSubstring(concatenated)
def strCopy(self): #returns a copy of this string object
copy = String(self.str)
return copy
def stringBuf(self): #returns the string buffer that represents us
return self.str
def concatEnd(self, other): #concatenates "other" to the end of us
self.str += other.stringBuf()
def main():
print("Hello world!")
str1 = String("gygomd")
print(str1.unique_no_data_structures())
print(str1.unique())
str2 = String("raymond")
print(str2.unique())
print(str2.unique_no_data_structures())
str1.reverse()
print(str1)
str2.reverse()
print(str2)
str3 = String("hi my name is raymond and i like turtles")
str3.replace_spaces()
print(str3)
print(str1.is_permutation_of("dmogyg"))
print(str1.is_permutation_of("gygomdg"))
print(str1.is_permutation_of("gyggmd"))
str4 = String("aabcccccaaa")
str4.compress()
print(str4)
str5 = String("raymond")
str6 = String("ray")
str7 = String("mond")
str8 = String("ymo")
str9 = String("mondray")
print(str6.isSubstring(str5))
print(str7.isSubstring(str5))
print(str8.isSubstring(str5))
print(str9.isRotation(str5))
print(str5.isRotation(str9))
if __name__ == "__main__":
main() | 24.721893 | 121 | 0.662039 |
acec33df1133394a3c5624c7336e084b20c5715f | 101 | py | Python | SkyGate/admin.py | ArturDabrowski/django-skygate | f0c48545a24d3ca600216a8319ec00a74b68e86f | [
"BSD-3-Clause"
] | null | null | null | SkyGate/admin.py | ArturDabrowski/django-skygate | f0c48545a24d3ca600216a8319ec00a74b68e86f | [
"BSD-3-Clause"
] | null | null | null | SkyGate/admin.py | ArturDabrowski/django-skygate | f0c48545a24d3ca600216a8319ec00a74b68e86f | [
"BSD-3-Clause"
] | null | null | null | from django.contrib import admin
from .models import ExampleModel
admin.site.register(ExampleModel)
| 20.2 | 33 | 0.841584 |
acec355c2956d3b27b2256274c488b38ee29fff5 | 575 | py | Python | data/train/python/acec355c2956d3b27b2256274c488b38ee29fff5mqttExample.py | harshp8l/deep-learning-lang-detection | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | [
"MIT"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/acec355c2956d3b27b2256274c488b38ee29fff5mqttExample.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 5 | 2018-03-29T11:50:46.000Z | 2021-04-26T13:33:18.000Z | data/train/python/acec355c2956d3b27b2256274c488b38ee29fff5mqttExample.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | #!/usr/bin/python
#
# simple script to repeatedly publish an MQTT message
#
# pip install paho-mqtt
import paho.mqtt.client as paho
import os
import time
broker = "localhost"
port = 1883
topic = "menu_crous_17"
mypid = os.getpid()
pub= "pubclient_"+str(mypid)
mqttc = paho.Client(pub, False) #nocleanstart
#connect to broker
mqttc.connect(broker, port, 60)
#remain connected and publish
while mqttc.loop() == 0:
msg = "test message "+time.ctime()
mqttc.publish(topic, msg, 0, True) #qos=0, retain=y
print "message published"
time.sleep(1.5)
pass
| 18.548387 | 55 | 0.69913 |
acec35b53b1504fc79baecdd27462471231c8242 | 43,865 | py | Python | pyaedt/modules/Material.py | beliaev-maksim/pyaedt | c549de1d0c80f3598afc5475817a332bb6d6df57 | [
"MIT"
] | null | null | null | pyaedt/modules/Material.py | beliaev-maksim/pyaedt | c549de1d0c80f3598afc5475817a332bb6d6df57 | [
"MIT"
] | null | null | null | pyaedt/modules/Material.py | beliaev-maksim/pyaedt | c549de1d0c80f3598afc5475817a332bb6d6df57 | [
"MIT"
] | null | null | null | """
This module contains these data classes for creating a material library:
* `BasicValue`
* `ClosedFormTM`
* `CommonMaterial`
* `Dataset`
* `MatProperties`
* `MatProperty`
* `Material`
* `SurMatProperties`
* `SufaceMaterial`
"""
from collections import OrderedDict
from pyaedt.generic.general_methods import aedt_exception_handler
from pyaedt.generic.DataHandlers import _dict2arg
class MatProperties(object):
"""Contains a list of constant names for all materials with
mappings to their internal XML names.
Internal names are used in scripts, and XML names are used in the XML syntax.
"""
aedtname = [
"permittivity",
"permeability",
"conductivity",
"dielectric_loss_tangent",
"magnetic_loss_tangent",
"thermal_conductivity",
"mass_density",
"specific_heat",
"thermal_expansion_coefficient",
"youngs_modulus",
"poissons_ratio",
"diffusivity",
"molecular_mass",
"viscosity",
"core_loss_kh",
"core_loss_kc",
"core_loss_ke",
]
defaultvalue = [1.0, 1.0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0.8, 0, 0, 0, 0, 0, 0]
defaultunit = [
None,
None,
"[siemens m^-1]",
None,
None,
"[W m^-1 C^-1]",
"[Kg m^-3]",
"[J Kg^-1 C^-1]",
"[C^-1]",
"[Pa]",
None,
None,
None,
None,
None,
None,
None,
None,
]
diel_order = [3, 0, 1, 4, 5, 6, 7, 8, 9, 10, 11, 1]
cond_order = [2, 0, 1, 4, 5, 6, 7, 8, 9, 10, 11, 3]
@classmethod
def get_defaultunit(cls, aedtname=None):
"""Retrieve the default unit for a full name or a category name.
Parameters
----------
aedtname : str, optional
AEDT full name or category name. The default is ``None``.
Returns
-------
str
Default unit if it exists.
"""
if aedtname:
return cls.defaultunit[cls.aedtname.index(aedtname)]
else:
raise TypeError("get_defaultunit: Either the full name or category name must be defined.")
@classmethod
def get_defaultvalue(cls, aedtname):
"""Retrieve the default value for a full name or a category name.
Parameters
----------
aedtname : str
AEDT full name or category name. The default is ``None``.
Returns
-------
float
Default value if it exists.
"""
if aedtname:
return cls.defaultvalue[cls.aedtname.index(aedtname)]
else:
raise TypeError("get_defaultunit: Either the full name or category name must be defined.")
class SurfMatProperties(object):
"""Contains a list of constant names for all surface materials with
mappings to their internal XML names.
Internal names are used in scripts, and XML names are used in the XML syntax.
"""
aedtname = [
"surface_emissivity",
"surface_roughness",
"surface_diffuse_absorptance",
"surface_incident_absorptance",
]
defaultvalue = [1.0, 0, 0.4, 0.4]
defaultunit = [None, "[m]", None, None]
@classmethod
def get_defaultunit(cls, aedtname=None):
"""Retrieve the default unit for a full name or a category name.
Parameters
----------
aedtname : str, optional
AEDT full name or category name. The default is ``None``.
Returns
-------
str
Default unit if it exists.
"""
if aedtname:
return cls.defaultunit[cls.aedtname.index(aedtname)]
else:
raise TypeError("get_defaultunit: either fullname or catname MUST be defined")
@classmethod
def get_defaultvalue(cls, aedtname=None):
"""Get the default value for a full name or a category name.
Parameters
----------
aedtname : str, optional
AEDT full name or category name. The default is ``None``.
Returns
-------
float
Default value if it exists.
"""
if aedtname:
return cls.defaultvalue[cls.aedtname.index(aedtname)]
else:
raise TypeError("get_defaultunit: Either the full name or category name must be defined.")
class ClosedFormTM(object):
"""Manges closed-form thermal modifiers."""
Tref = "22cel"
C1 = 0
C2 = 0
TL = "-273.15cel"
TU = "1000cel"
autocalculation = True
TML = 1000
TMU = 1000
class Dataset(object):
"""Manages datasets."""
ds = []
unitx = ""
unity = ""
unitz = ""
type = "Absolute"
namex = ""
namey = ""
namez = None
class BasicValue(object):
"""Manages thermal modifier calculations."""
value = None
dataset = None
thermalmodifier = None
class MatProperty(object):
"""Manages simple, anisotropic, tensor, and non-linear properties.
Parameters
----------
material : :class:`pyaedt.modules.Material.Material`
Inherited parent object.
name : str
Name of the material property.
val :
The default is ``None``.
thermalmodifier
The default is ``None``.
Examples
--------
>>>from pyaedt import Hfss
>>>app = Hfss()
>>>matproperty = app.materials["copper"].conductivity
"""
def __init__(self, material, name, val=None, thermalmodifier=None):
self._material = material
self.logger = self._material.logger
self._type = "simple"
self.name = name
self._property_value = [BasicValue()]
self._unit = None
if val is not None and isinstance(val, (str, float, int)):
self.value = val
elif val is not None and val["property_type"] == "AnisoProperty":
self.type = "anisotropic"
self.value = [val["component1"], val["component2"], val["component3"]]
if not isinstance(thermalmodifier, list):
thermalmodifier = [thermalmodifier]
for tm in thermalmodifier:
if tm:
if tm["use_free_form"]:
self._property_value[tm["Index:"]].thermalmodifier = tm["free_form_value"]
else:
self._property_value[tm["Index:"]].thermalmodifier = ClosedFormTM()
self._property_value[tm["Index:"]].thermalmodifier.Tref = tm["Tref"]
self._property_value[tm["Index:"]].thermalmodifier.C1 = tm["C1"]
self._property_value[tm["Index:"]].thermalmodifier.C2 = tm["C2"]
self._property_value[tm["Index:"]].thermalmodifier.TL = tm["TL"]
self._property_value[tm["Index:"]].thermalmodifier.TU = tm["TU"]
self._property_value[tm["Index:"]].thermalmodifier.autocalculation = tm["auto_calculation"]
@property
def type(self):
"""Type of the material property.
Parameters
----------
type : str
Type of properties. Options are ``simple"``,
``"anisotropic",`` ``"tensor"``, and ``"nonlinear",``
"""
return self._type
@type.setter
def type(self, type):
self._type = type
if self._type == "simple":
self._property_value = [self._property_value[0]]
elif self._type == "anisotropic":
self._property_value = [self._property_value[0] for i in range(3)]
elif self._type == "tensor":
self._property_value = [self._property_value[0] for i in range(9)]
elif self._type == "nonlinear":
self._property_value = [self._property_value[0]]
@property
def value(self):
"""Value for a material property."""
if len(self._property_value) == 1:
return self._property_value[0].value
else:
return [i.value for i in self._property_value]
@value.setter
def value(self, val):
if isinstance(val, list):
i = 0
for el in val:
if i >= len(self._property_value):
self._property_value.append(BasicValue())
self._property_value[i].value = el
i += 1
else:
self._property_value[0].value = val
@property
def unit(self):
"""Units for a material property value."""
return self._unit
@unit.setter
def unit(self, unit):
self._unit = unit
@property
def data_set(self):
"""Dataset."""
if len(self._property_value) == 1:
return self._property_value[0].dataset
else:
return [i.dataset for i in self._property_value]
@property
def thermalmodifier(self):
"""Thermal modifier."""
if len(self._property_value) == 1:
return self._property_value[0].thermalmodifier
else:
return [i.thermalmodifier for i in self._property_value]
@thermalmodifier.setter
def thermalmodifier(self, thermal_value):
"""Thermal modifier."""
if isinstance(thermal_value, str):
self._add_thermal_modifier(thermal_value, 0)
else:
for i in thermal_value:
self._add_thermal_modifier(i, thermal_value.index(i))
def _add_thermal_modifier(self, formula, index):
"""Add a thermal modifier.
Parameters
----------
formula : str
Formula to apply.
index : int
Value for the index.
Returns
-------
type
"""
if "ModifierData" not in self._material._props:
tm = OrderedDict(
{
"Property:": self.name,
"Index:": index,
"prop_modifier": "thermal_modifier",
"use_free_form": True,
"free_form_value": formula,
}
)
self._material._props["ModifierData"] = OrderedDict({"ThermalModifierData": OrderedDict(
{"modifier_data": "thermal_modifier_data",
"all_thermal_modifiers": OrderedDict({"one_thermal_modifier": tm}), })})
else:
for tmname in self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"]:
if isinstance(
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname],
list):
found = False
for tm in self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][
tmname
]:
if self.name == tm["Property:"] and index == tm["Index:"]:
found = True
tm["use_free_form"] = True
tm["free_form_value"] = formula
tm.pop("Tref", None)
tm.pop("C1", None)
tm.pop("C2", None)
tm.pop("TL", None)
tm.pop("TU", None)
if not found:
tm = OrderedDict(
{
"Property:": self.name,
"Index:": index,
"prop_modifier": "thermal_modifier",
"use_free_form": True,
"free_form_value": formula,
}
)
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][
tmname
].append(tm)
elif (
self.name
== self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname][
"Property:"
]
and index
== self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname][
"Index:"
]
):
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname][
"use_free_form"
] = True
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname][
"free_form_value"
] = formula
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname].pop(
"Tref", None
)
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname].pop(
"C1", None
)
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname].pop(
"C2", None
)
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname].pop(
"TL", None
)
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname].pop(
"TU", None
)
else:
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname] = [
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname]
]
tm = OrderedDict(
{
"Property:": self.name,
"Index:": index,
"prop_modifier": "thermal_modifier",
"use_free_form": True,
"free_form_value": formula,
}
)
self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][
tmname].append(tm)
return self._material.update()
def add_thermal_modifier_free_form(self, formula, index=0):
"""Add a thermal modifier to a material property using a free-form formula.
Parameters
----------
formula : str
Full formula to apply.
index : int, optional
Value for the index. The default is ``0``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
Examples
--------
>>> from pyaedt import Hfss
>>> hfss = Hfss(specified_version="2021.2")
>>> mat1 = hfss.materials.add_material("new_copper2")
>>> mat1.add_thermal_modifier_free_form("if(Temp > 1000cel, 1, if(Temp < -273.15cel, 1, 1))")
"""
self._property_value[index].thermalmodifier = formula
return self._add_thermal_modifier(formula, index)
def add_thermal_modifier_dataset(self, dataset_name, index=0):
"""Add a thermal modifier to a material property using an existing dataset.
Parameters
----------
dataset_name : str
Name of the project dataset.
index : int, optional
Value for the index. The default is ``0``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
Examples
--------
>>> from pyaedt import Hfss
>>> hfss = Hfss(specified_version="2021.2")
>>> mat1 = hfss.materials.add_material("new_copper2")
>>> mat1.add_thermal_modifier_dataset("$ds1")
"""
formula = "pwl({}, Temp)".format(dataset_name)
self._property_value[index].thermalmodifier = formula
self._add_thermal_modifier(formula, index)
def add_thermal_modifier_closed_form(
self, tref=22, c1=0.0001, c2=1e-6, tl=-273.15, tu=1000, units="cel", auto_calc=True, tml=1000, tmu=1000, index=0
):
"""Add a thermal modifier to a material property using a closed-form formula.
Parameters
----------
tref : float, optional
Reference temperature. The default is ``22``.
c1 : float, optional
First coefficient value. The default is ``0.0001``.
c2 : float, optional
Second coefficient value. The default is ``1e-6``.
tl : float, optional
Lower temperature limit. The default is ``273.15``.
tu : float, optional
Upper temperature limit. The default is ``1000``.
units : str, optional
Units for the reference temperature. The default
is ``"cel"``.
auto_calc : bool, optional
Whether to calculate the lower and upper
temperature limits automatically. The default is
``True``.
tml : float, optional
Lower temperature limit when ``auto_calc=True.``
The default is ``1000``.
tmu : float, optional
Upper temperature limit when ``auto_calc=True.``
The default is ``1000``.
index : int, optional
Value for the index. The default is ``0``.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
Examples
--------
>>> from pyaedt import Hfss
>>> hfss = Hfss(specified_version="2021.2")
>>> mat1 = hfss.materials.add_material("new_copper2")
>>> mat1.permittivity.add_thermal_modifier_closed_form(c1 = 1e-3)
"""
if index > len(self._property_value):
self.logger.error(
"Wrong index number. Index must be 0 for simple or nonlinear properties,"
" <=2 for anisotropic materials, <=9 for Tensors"
)
return False
self._property_value[index].thermalmodifier = ClosedFormTM()
self._property_value[index].thermalmodifier.Tref = str(tref) + units
self._property_value[index].thermalmodifier.C1 = str(c1)
self._property_value[index].thermalmodifier.C2 = str(c2)
self._property_value[index].thermalmodifier.TL = str(tl) + units
self._property_value[index].thermalmodifier.TU = str(tu) + units
self._property_value[index].thermalmodifier.autocalculation = auto_calc
if not auto_calc:
self._property_value[index].thermalmodifier.TML = tml
self._property_value[index].thermalmodifier.TMU = tmu
if auto_calc:
tm_new = OrderedDict(
{
"Property:": self.name,
"Index:": index,
"prop_modifier": "thermal_modifier",
"use_free_form": False,
"Tref": str(tref) + units,
"C1": str(c1),
"C2": str(c2),
"TL": str(tl) + units,
"TU": str(tu) + units,
"auto_calculation": True,
}
)
else:
tm_new = OrderedDict(
{
"Property:": self.name,
"Index:": index,
"prop_modifier": "thermal_modifier",
"use_free_form": False,
"Tref": str(tref) + units,
"C1": str(c1),
"C2": str(c2),
"TL": str(tl) + units,
"TU": str(tu) + units,
"auto_calculation": False,
"TML": str(tml),
"TMU": str(tmu),
}
)
if "ModifierData" not in self._material._props:
self._material._props["ModifierData"] = OrderedDict(
{
"ThermalModifierData": OrderedDict(
{
"modifier_data": "thermal_modifier_data",
"all_thermal_modifiers": OrderedDict({"one_thermal_modifier": tm_new}),
}
)
}
)
else:
for tmname in self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"]:
tml = self._material._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"][tmname]
if isinstance(tml, list):
found = False
for tm in tml:
if self.name == tm["Property:"] and index == tm["Index:"]:
found = True
tm["use_free_form"] = False
tm.pop("free_form_value", None)
tm["Tref"] = str(tref) + units
tm["C1"] = str(c1)
tm["C2"] = str(c2)
tm["TL"] = str(tl) + units
tm["TU"] = str(tu) + units
tm["auto_calculation"] = auto_calc
if auto_calc:
tm["TML"] = tml
tm["TMU"] = tmu
else:
tm.pop("TML", None)
tm.pop("TMU", None)
if not found:
tml.append(tm_new)
elif self.name == tml["Property:"] and index == tml["Index:"]:
tml["use_free_form"] = False
tml.pop("free_form_value", None)
tml["Tref"] = str(tref) + units
tml["C1"] = str(c1)
tml["C2"] = str(c1)
tml["TL"] = str(tl) + units
tml["TU"] = str(tl) + units
tml["auto_calculation"] = auto_calc
if not auto_calc:
tml["TML"] = str(tml)
tml["TMU"] = str(tmu)
else:
tml.pop("TML", None)
tml.pop("TMU", None)
else:
tml = [tml]
tml.append(tm_new)
return self._material.update()
class CommonMaterial(object):
"""Manages datasets with frequency-dependent materials.
Parameters
----------
materials : :class:`pyaedt.modules.MaterialLib.Materials`
name : str
props : dict
The default is ``None``.
"""
def __init__(self, materials, name, props=None):
self._materials = materials
self.odefinition_manager = self._materials.odefinition_manager
self._omaterial_manager = self._materials.omaterial_manager
self._oproject = self._materials._oproject
self.logger = self._materials.logger
self.name = name
self.coordinate_system = ""
if props:
self._props = props
else:
self._props = OrderedDict()
if "CoordinateSystemType" in self._props:
self.coordinate_system = self._props["CoordinateSystemType"]
else:
self._props["CoordinateSystemType"] = "Cartesian"
self.coordinate_system = "Cartesian"
if "BulkOrSurfaceType" in self._props:
self.bulkorsurface = self._props["BulkOrSurfaceType"]
else:
self._props["BulkOrSurfaceType"] = 1
if "ModTime" in self._props:
self._modtime = self._props["ModTime"]
del self._props["ModTime"]
if "LibLocation" in self._props:
self.lib_location = self._props["LibLocation"]
del self._props["LibLocation"]
if "ModSinceLib" in self._props:
self.mod_since_lib = self._props["ModSinceLib"]
del self._props["ModSinceLib"]
@aedt_exception_handler
def _get_args(self, props=None):
"""Retrieve the arguments for a property.
Parameters
----------
prop : str, optoinal
Name of the property. The default is ``None``.
"""
if not props:
props = self._props
arg = ["NAME:" + self.name]
_dict2arg(props, arg)
return arg
def _update_props(self, propname, provpavlue, update_aedt=True):
"""Update properties.
Parameters
----------
propname : str
Name of the property.
provpavlue :
Value of the property.
update_aedt : bool, optional
Whether to update the property in AEDT. The default is ``True``.
"""
if (
isinstance(provpavlue, list)
and self.__dict__["_" + propname].type != "simple"
and self.__dict__["_" + propname].type != "nonlinear"
):
i = 1
for val in provpavlue:
self._props[propname]["component" + str(i)] = str(val)
i += 1
if update_aedt:
return self.update()
elif isinstance(provpavlue, (str, float, int)):
self._props[propname] = str(provpavlue)
if update_aedt:
return self.update()
else:
return False
class Material(CommonMaterial, object):
"""Manages material properties.
Parameters
----------
materiallib : :class:`pyaedt.modules.MaterialLib.Materials`
Inherited parent object.
name : str
Name of the material.
props :
The default is ``None``.
Examples
--------
>>>from pyaedt import Hfss
>>>app = Hfss()
>>>material = app.materials["copper"]
"""
def __init__(self, materiallib, name, props=None):
CommonMaterial.__init__(self, materiallib, name, props)
self.thermal_material_type = "Solid"
if "thermal_material_type" in self._props:
self.thermal_material_type = self._props["thermal_material_type"]["Choice"]
if "PhysicsTypes" in self._props:
self.physics_type = self._props["PhysicsTypes"]["set"]
else:
self.physics_type = ["Electromagnetic", "Thermal", "Structural"]
self._props["PhysicsTypes"] = OrderedDict({"set": ["Electromagnetic", "Thermal", "Structural"]})
if "AttachedData" in self._props:
self._material_appearance = []
self._material_appearance.append(self._props["AttachedData"]["MatAppearanceData"]["Red"])
self._material_appearance.append(self._props["AttachedData"]["MatAppearanceData"]["Green"])
self._material_appearance.append(self._props["AttachedData"]["MatAppearanceData"]["Blue"])
else:
self._material_appearance = [128, 128, 128]
self._props["AttachedData"] = OrderedDict(
{
"MatAppearanceData": OrderedDict(
{"property_data": "appearance_data", "Red": 128, "Green": 128, "Blue": 128}
)
}
)
for property in MatProperties.aedtname:
if property in self._props:
mods = None
if "ModifierData" in self._props:
modifiers = self._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"]
for mod in modifiers:
if isinstance(modifiers[mod], list):
for one_tm in modifiers[mod]:
if one_tm["Property:"] == property:
if mods:
mods = [mods]
mods.append(one_tm)
else:
mods = one_tm
else:
if modifiers[mod]["Property:"] == property:
mods = modifiers[mod]
self.__dict__["_" + property] = MatProperty(self, property, self._props[property], mods)
else:
self.__dict__["_" + property] = MatProperty(
self, property, MatProperties.get_defaultvalue(aedtname=property), None
)
pass
@property
def material_appearance(self):
"""Material Appearance specified as an RGB list.
Returns
-------
list
Color of the material in RGB. Values are in the range ``[0, 255]``.
Examples
--------
Create a new material with color ``[0, 153, 153]`` (darker cyan).
>>> from pyaedt import Hfss
>>> hfss = Hfss(specified_version="2021.2")
>>> mat1 = hfss.materials.add_material("new_material")
>>> rgbcolor = mat1.material_appearance
>>> mat1.material_appearance = [0, 153, 153]
"""
return self._material_appearance
@material_appearance.setter
def material_appearance(self, rgb):
if not isinstance(rgb, (list, tuple)):
raise TypeError("`material_apperance` must be a list or tuple")
if len(rgb) != 3:
raise ValueError("`material_appearance` must be three items (RGB)")
value_int = []
for rgb_item in rgb:
rgb_int = int(rgb_item)
if rgb_int < 0 or rgb_int > 255:
raise ValueError("RGB value must be between 0 and 255")
value_int.append(rgb_int)
self._material_appearance = value_int
self._props["AttachedData"] = OrderedDict(
{
"MatAppearanceData": OrderedDict(
{
"property_data": "appearance_data",
"Red": value_int[0],
"Green": value_int[1],
"Blue": value_int[2],
}
)
}
)
@property
def permittivity(self):
"""Permittivity.
Returns
-------
type
Permittivity of the material.
"""
return self._permittivity
@permittivity.setter
def permittivity(self, value):
self._permittivity.value = value
self._update_props("permittivity", value)
@property
def permeability(self):
"""Permeability.
Returns
-------
type
Permeability of the material.
"""
return self._permeability
@permeability.setter
def permeability(self, value):
self._permeability.value = value
self._update_props("permeability", value)
@property
def conductivity(self):
"""Conductivity.
Returns
-------
type
Conductivity of the material.
"""
return self._conductivity
@conductivity.setter
def conductivity(self, value):
self._conductivity.value = value
self._update_props("conductivity", value)
@property
def dielectric_loss_tangent(self):
"""Dielectric loss tangent.
Returns
-------
type
Dielectric loss tangent of the material.
"""
return self._dielectric_loss_tangent
@dielectric_loss_tangent.setter
def dielectric_loss_tangent(self, value):
self._dielectric_loss_tangent.value = value
self._update_props("dielectric_loss_tangent", value)
@property
def magnetic_loss_tangent(self):
"""Magnetic loss tangent.
Returns
-------
type
Magnetic loss tangent of the material.
"""
return self._magnetic_loss_tangent
@magnetic_loss_tangent.setter
def magnetic_loss_tangent(self, value):
self._magnetic_loss_tangent.value = value
self._update_props("magnetic_loss_tangent", value)
@property
def thermal_conductivity(self):
"""Thermal conductivity.
Returns
-------
type
Thermal conductivity of the material.
"""
return self._thermal_conductivity
@thermal_conductivity.setter
def thermal_conductivity(self, value):
self._thermal_conductivity.value = value
self.physics_type = ["Electromagnetic", "Thermal", "Structural"]
self._props["PhysicsTypes"] = OrderedDict({"set": ["Electromagnetic", "Thermal", "Structural"]})
self._update_props("thermal_conductivity", value)
@property
def mass_density(self):
"""Mass density.
Returns
-------
type
Mass density of the material.
"""
return self._mass_density
@mass_density.setter
def mass_density(self, value):
self._mass_density.value = value
self._update_props("mass_density", value)
@property
def specific_heat(self):
"""Specific heat.
Returns
-------
type
Specific heat of the material.
"""
return self._specific_heat
@specific_heat.setter
def specific_heat(self, value):
self._specific_heat.value = value
self._update_props("specific_heat", value)
@property
def thermal_expansion_coefficient(self):
"""Thermal expansion coefficient.
Returns
-------
type
Thermal expansion coefficient of the material.
"""
return self._thermal_expansion_coefficient
@thermal_expansion_coefficient.setter
def thermal_expansion_coefficient(self, value):
self._thermal_expansion_coefficient.value = value
self._update_props("thermal_expansion_coefficient", value)
@property
def youngs_modulus(self):
"""Young's modulus.
Returns
-------
type
Young's modulus of the material.
"""
return self._youngs_modulus
@youngs_modulus.setter
def youngs_modulus(self, value):
self._youngs_modulus.value = value
self.physics_type = ["Electromagnetic", "Thermal", "Structural"]
self._props["PhysicsTypes"] = OrderedDict({"set": ["Electromagnetic", "Thermal", "Structural"]})
self._update_props("youngs_modulus", value)
@property
def poissons_ratio(self):
"""Poisson's ratio.
Returns
-------
type
Poisson's ratio of the material.
"""
return self._poissons_ratio
@poissons_ratio.setter
def poissons_ratio(self, value):
self._poissons_ratio.value = value
self.physics_type = ["Electromagnetic", "Thermal", "Structural"]
self._props["PhysicsTypes"] = OrderedDict({"set": ["Electromagnetic", "Thermal", "Structural"]})
self._update_props("poissons_ratio", value)
@property
def diffusivity(self):
"""Diffusivity.
Returns
-------
type
Diffusivity of the material.
"""
return self._diffusivity
@diffusivity.setter
def diffusivity(self, value):
self._diffusivity.value = value
self._update_props("diffusivity", value)
@property
def molecular_mass(self):
"""Molecular mass.
Returns
-------
type
Molecular mass of the material.
"""
return self._molecular_mass
@molecular_mass.setter
def molecular_mass(self, value):
self._molecular_mass.value = value
self._update_props("molecular_mass", value)
@property
def viscosity(self):
"""Viscosity.
Returns
-------
type
Viscosity of the material.
"""
return self._viscosity
@viscosity.setter
def viscosity(self, value):
self._viscosity.value = value
self._update_props("viscosity", value)
@property
def core_loss_kh(self):
"""Core loss in kilohertz.
Returns
-------
type
Core loss of the material in kilohertz.
"""
return self._core_loss_kh
@core_loss_kh.setter
def core_loss_kh(self, value):
self._core_loss_kh.value = value
self._update_props("core_loss_kh", value)
@property
def core_loss_kc(self):
"""Core loss in kilocalories.
Returns
-------
type
Core loss of the material in kilocalories.
"""
return self._core_loss_kc
@core_loss_kc.setter
def core_loss_kc(self, value):
self._core_loss_kc.value = value
self._update_props("core_loss_kc", value)
@property
def core_loss_ke(self):
"""Core loss in kinetic energy.
Returns
-------
type
Core loss of the material in kinetic energy.
"""
return self._core_loss_ke
@core_loss_ke.setter
def core_loss_ke(self, value):
self._core_loss_ke.value = value
self._update_props("core_loss_ke", value)
def is_conductor(self, threshold=100000):
"""Check if the material is a conductor.
Parameters
----------
threshold : float, optional
Threshold to define if a material is a conductor. The
default is ``100000``. If the conductivity is equal to or
greater than the threshold, the material is
considered a conductor.
Returns
-------
bool
``True`` when the material is a condutor, ``False`` otherwise.
"""
cond = self.conductivity.value
if not cond:
return False
if "Freq" in str(cond):
return True
try:
if float(cond) >= threshold:
return True
except:
return False
return False
@aedt_exception_handler
def is_dielectric(self, threshold=100000):
"""Check if the material is dielectric.
Parameters
----------
threshold : float, optional
Threshold to define if a material is dielectric. The
default is ``100000``. If the conductivity is equal to or
greater than the threshold, the material is
considered dielectric.
Returns
-------
bool
``True`` when the material is dielectric, ``False`` otherwise.
"""
return not self.is_conductor()
@aedt_exception_handler
def update(self):
"""Update the material in AEDT.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
"""
args = self._get_args()
if self._does_material_exists(self.name):
self.odefinition_manager.EditMaterial(self.name, args)
else:
self.odefinition_manager.AddMaterial(args)
return True
@aedt_exception_handler
def _does_material_exists(self, material_name):
listmatprj = [i.lower() for i in list(self.odefinition_manager.GetProjectMaterialNames())]
if material_name.lower() in listmatprj:
return True
else:
return False
class SurfaceMaterial(CommonMaterial, object):
"""Manages surface material properties.
Parameters
----------
materiallib : :class:`pyaedt.modules.MaterialLib.Materials`
Inherited parent object.
name : str
Name of the surface material
props :
The default is ``None``.
"""
def __init__(self, materiallib, name, props=None):
CommonMaterial.__init__(self, materiallib, name, props)
self.surface_clarity_type = "Opaque"
if "surface_clarity_type" in self._props:
self.surface_clarity_type = self._props["surface_clarity_type"]["Choice"]
if "PhysicsTypes" in self._props:
self.physics_type = self._props["PhysicsTypes"]["set"]
else:
self.physics_type = ["Thermal"]
for property in SurfMatProperties.aedtname:
if property in self._props:
mods = None
if "ModifierData" in self._props:
modifiers = self._props["ModifierData"]["ThermalModifierData"]["all_thermal_modifiers"]
for mod in modifiers:
if isinstance(modifiers[mod], list):
for one_tm in modifiers[mod]:
if one_tm["Property:"] == property:
if mods:
mods = [mods]
mods.append(one_tm)
else:
mods = one_tm
else:
if modifiers[mod]["Property:"] == property:
mods = modifiers[mod]
self.__dict__["_" + property] = MatProperty(self, property, self._props[property], mods)
else:
self.__dict__["_" + property] = MatProperty(
self, property, SurfMatProperties.get_defaultvalue(aedtname=property)
)
pass
@property
def emissivity(self):
"""Emissivity.
Returns
-------
type
Emissivity of the surface material.
"""
return self._surface_emissivity
@emissivity.setter
def emissivity(self, value):
self._surface_emissivity.value = value
self._update_props("surface_emissivity", value)
@property
def surface_diffuse_absorptance(self):
"""Surface diffuse absorptance.
Returns
-------
type
Surface diffuse absorptance of the surface material.
"""
return self._surface_diffuse_absorptance
@surface_diffuse_absorptance.setter
def surface_diffuse_absorptance(self, value):
self._surface_diffuse_absorptance.value = value
self._update_props("surface_diffuse_absorptance", value)
@property
def surface_incident_absorptance(self):
"""Surface incident absorptance.
Returns
-------
type
Surface incident absorptance of the surface material.
"""
return self._surface_incident_absorptance
@surface_incident_absorptance.setter
def surface_incident_absorptance(self, value):
self._surface_incident_absorptance.value = value
self._update_props("surface_incident_absorptance", value)
@property
def surface_roughness(self):
"""Surface roughness.
Returns
-------
type
Surface roughness of the surface material.
"""
return self._surface_roughness
@surface_roughness.setter
def surface_roughness(self, value):
self._surface_roughness.value = value
self._update_props("surface_roughness", value)
@aedt_exception_handler
def update(self):
"""Update the surface material in AEDT.
Returns
-------
bool
``True`` when successful, ``False`` when failed.
"""
args = self._get_args()
if self._does_material_exists(self.name):
self.odefinition_manager.EditSurfaceMaterial(self.name, args)
else:
self.odefinition_manager.AddSurfaceMaterial(args)
return True
@aedt_exception_handler
def _does_material_exists(self, szMat):
a = self.odefinition_manager.DoesSurfaceMaterialExist(szMat)
if a != 0:
return True
return False
| 32.468542 | 120 | 0.536669 |
acec3619074f700dba9f804b672609588109ea26 | 159 | py | Python | core_get/vendor/project_source_file.py | core-get/core-get | 8fb960e4e51d0d46b5e3b2f4832eb4a39e0e60f7 | [
"MIT"
] | null | null | null | core_get/vendor/project_source_file.py | core-get/core-get | 8fb960e4e51d0d46b5e3b2f4832eb4a39e0e60f7 | [
"MIT"
] | null | null | null | core_get/vendor/project_source_file.py | core-get/core-get | 8fb960e4e51d0d46b5e3b2f4832eb4a39e0e60f7 | [
"MIT"
] | null | null | null | from dataclasses import dataclass
from pathlib import PurePath
@dataclass(frozen=True)
class ProjectSourceFile:
path: PurePath
library: str = 'work'
| 17.666667 | 33 | 0.767296 |
acec36a6d76bc6cc492447918c8de8ed51673ab0 | 48,226 | py | Python | bin/trender.py | lsst-camera-dh/mutils | 80edb76c16bb3f00f22f77cf6aa2b2a1d02d73fe | [
"BSD-3-Clause-LBNL"
] | null | null | null | bin/trender.py | lsst-camera-dh/mutils | 80edb76c16bb3f00f22f77cf6aa2b2a1d02d73fe | [
"BSD-3-Clause-LBNL"
] | null | null | null | bin/trender.py | lsst-camera-dh/mutils | 80edb76c16bb3f00f22f77cf6aa2b2a1d02d73fe | [
"BSD-3-Clause-LBNL"
] | 1 | 2020-07-26T20:29:45.000Z | 2020-07-26T20:29:45.000Z | #!/usr/bin/env python
""" trending data app: gets specified channels for requested time period
"""
import re
import os
import argparse
import textwrap
import logging
import sys
import copy
import math
import datetime as dt
from lxml import etree
from dateutil.tz import gettz
import numpy as np
from astropy import stats
import matplotlib.pyplot as plt
import matplotlib.dates as mdate
from astropy.time import Time
import astropy.units as au
# put parent directory into sys.path
bp = os.path.dirname(os.path.realpath(__file__)).split(os.sep)
modpath = os.sep.join(bp[:-1] + ["lib"])
sys.path.insert(0, modpath)
# local imports
try:
import trendutils as tu
import mutils as mu
import plotutils as pu
from lsst_camera_data import rafts_of_type
except ImportError as e:
logging.error("Import failed: %s", e)
sys.exit(1)
if sys.version_info[0] < 3 or sys.version_info[1] < 7:
raise Exception("Must be using Python >=3.7")
def parse_args():
"""handle command line"""
style_list = ["default"] + sorted(
[
"fast",
"ggplot",
"seaborn-poster",
"seaborn-notebook",
"seaborn-darkgrid",
"fivethirtyeight",
]
)
sites_list = tu.get_sites()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(
"""
Fetch and present trending data from camera database
"""
),
epilog=textwrap.dedent(
"""
This application expects to access the CCS trending database,
either directly, at lsst-mcm:8080, or via an ssh tunnel which
must be set up in advance to use localhost:8080.
Alternately trending data can come from a local file saved
in an earlier run. See the "input_file" options. This allows
multiple output runs (stats, plots, etc.) using the same trending
data w/out re-querying the server.
The "channel_source"(s) are either all files or all patterns (re's).
Files are best constructed using the sibling application
"trendingChannels.py" and editing the resuling files to choose
which channels to activate. Patterns (regex's) are most useful for
interactive use. The "--match" option supports checking which
channels result from a given set of regex's.
The interval and start/stop date specifiers can be nearly any
variant of a full date/time spec as output by the unix
"date <options>" command. A suggestd choice is the format
from "date --iso-8601=seconds". Formats containing spaces should be
quoted. If no timezone is specified, (eg PST, -07:00, etc), the
local timezone will be used.
"""
),
)
# Input args
parser.add_argument(
"channel_source", nargs="+", help="filename|regex specifying channels"
)
parser.add_argument(
"--input_file", nargs="+", help="XML file with trending data, =>no db query"
)
parser.add_argument(
"--reject", nargs="+", help="filename|regex providing channels to reject"
)
#
# Output options for text based outputs
#
ogroup = parser.add_mutually_exclusive_group() # all to stdout
ogroup.add_argument(
"--xml", action="store_true", help="Print formatted xml from trending to stdout"
)
ogroup.add_argument(
"--text", action="store_true", help="Print (timestamp, value, path) colum text"
)
ogroup.add_argument(
"--stats", action="store_true", help="Print statistics for each channel"
)
ogroup.add_argument(
"--match", action="store_true", help="print list of matching channels and exit"
)
parser.add_argument(
"--rstats",
action="store_true",
help="Print additional robust stats per channel",
)
#
# Plotting specifications
#
parser.add_argument(
"--plot", action="store_true", help="produce plots for each channel vs. time"
)
parser.add_argument("--saveplot", metavar="filename.<pdf|png|..>", help="save as")
parser.add_argument(
"--overlay",
action="store_true",
help="all channels on a single plot, eg. no subplots",
)
parser.add_argument(
"--overlayunits",
action="store_true",
help="channels grouped by units on same (sub)plot",
)
parser.add_argument(
"--overlayregex",
action="store_true",
help="channels grouped by regex/units on (sub)plot",
)
#
parser.add_argument(
"--normalize",
action="store_true",
help="normalize chan j as (x-<x>)/std(x) + 5j*std(x)",
)
#
ogroup = parser.add_mutually_exclusive_group() # all to stdout
ogroup.add_argument(
"--overlaytime",
action="store_true",
help="time axis is union of all time intervals",
)
ogroup.add_argument(
"--overlaystart",
action="store_true",
help="overlay channel data vs time from tstart",
)
ogroup.add_argument(
"--overlaystop",
action="store_true",
help="overlay channel data vs time until tstop",
)
#
pgroup = parser.add_mutually_exclusive_group() # all to stdout
pgroup.add_argument(
"--nolegends",
dest="placement",
const="None",
action="store_const",
help="Don't place any legends",
)
pgroup.add_argument(
"--insidelegends",
dest="placement",
const="inside",
action="store_const",
help="Force legends to be inside each plot",
)
pgroup.add_argument(
"--outsidelegends",
dest="placement",
const="outside",
action="store_const",
help="Force legends to be outside each plot",
)
pgroup.set_defaults(placement="heuristic")
#
parser.add_argument(
"--fmt",
nargs=1,
metavar="format_str",
default="-",
help="Matplotlib format string (eg. 'o-')",
)
parser.add_argument(
"--title",
nargs="?",
metavar="Title (or blank)",
const="auto",
help="specify Title String or get auto-generated title",
)
parser.add_argument("--style", default="ggplot", required=False, choices=style_list)
parser.add_argument(
"--layout", default="portrait", help='"landscape"|"portrait"|"nxm"'
)
parser.add_argument("--dpi", type=int, help="set screen dpi")
parser.add_argument("--fsize", help="set figsize (x-width,y-height)")
#
# Time interval specifications
#
igroup = parser.add_mutually_exclusive_group()
igroup.add_argument(
"--interval",
metavar=("tstart", "tstop"),
nargs=2,
action="append",
help="Pair of date/time specifiers",
)
igroup.add_argument(
"--start", metavar="tstart", help="Date/time specifier(s)", nargs="+"
)
igroup.add_argument(
"--stop", metavar="tstop", help="Date/time specifier(s)", nargs="+"
)
parser.add_argument(
"--duration",
metavar="seconds",
default=None,
help="duration [s]|(*s,*m,*h,*d,*w) start/stop spec",
)
parser.add_argument(
"--timebins",
nargs="?",
const=0,
type=int,
metavar="nBins (blank for autosize)",
help="retrieve and plot time avg'd bins (esp for long durations)",
)
parser.add_argument(
"--sharex",
action="store_true",
default=True,
help="use same x-axis limits on all plots",
)
#
# General options
#
parser.add_argument(
"--site",
required=False,
choices=sites_list,
help="Specify trending site",
)
parser.add_argument(
"--debug", action="store_true", help="Print additional debugging info"
)
parser.add_argument(
"--noshow", action="store_true", help="make but don't show the plot"
)
parser.add_argument("--itl", action="store_true", help="limit to ITL devices")
parser.add_argument("--e2v", action="store_true", help="limit to E2V devices")
parser.add_argument("--science", action="store_true", help="limit to science rafts")
parser.add_argument("--corner", action="store_true", help="limit to corner rafts")
parser.add_argument(
"--mjd",
nargs="?",
default=argparse.SUPPRESS,
const=int(Time.now().mjd),
type=float,
metavar="offset (blank for none)",
help="use MJD time axis",
)
parser.add_argument(
"--forceupdate", action="store_true", help="Force update of cached channel file"
)
#
#
return parser.parse_args()
def trender():
"""main logic"""
# get command args and options
optlist = parse_args()
try:
getattr(optlist, "mjd")
use_mjd = True
except AttributeError:
use_mjd = False
mu.init_logging(optlist.debug)
mu.init_warnings()
logging.debug("optlist: %s", optlist)
# get list of time intervals to process
intervals = tu.get_unique_time_intervals(
optlist.start, optlist.stop, optlist.interval, optlist.duration
)
if intervals: # interval accounting
intcnt = len(intervals)
inttot = int(sum([t[1] - t[0] for t in intervals]) / 1000)
tmin = intervals[0][0]
tmax = intervals[-1][1]
else:
logging.error("time interval spec failed")
sys.exit(1)
# set up the trending source(chached-on-disk, slac, base, summit etc.)
if not optlist.input_file:
tsite = tu.get_trending_server(optlist.site)
if tsite and tsite["server"]:
data_url = "http://{}:{}/rest/data/dataserver".format(
tsite["server"], tsite["port"]
)
else:
logging.error("failed to determine trending server")
sys.exit(1)
# access the file with the channel list and update if needed
if optlist.forceupdate:
channel_file = tu.update_trending_channels_xml(tsite["name"])
else:
channel_file = tu.update_trending_channels_xml(
tsite["name"], tmin / 1000, tmax / 1000
)
else: # get site and data from input file
logging.debug("using input file %s", optlist.input_file)
tsite = tu.init_trending_from_input_xml(optlist.input_file)
if not tsite:
logging.error("failed to determine trending server")
sys.exit(1)
channel_file = None
# construct the dict of input channels as {id:path} and store regexes as list
oflds = dict() # dict to hold channel information
regexes = []
oflds, regexes = tu.parse_channel_sources(optlist.channel_source, channel_file)
if oflds:
logging.debug("found %d channels", len(oflds))
else:
logging.error("no channels found")
sys.exit(1)
if regexes:
logging.debug("found %d regexes with valid channels", len(regexes))
# remove channels on the reject list (eg bad RTDs etc)
rflds, rregexes = tu.parse_channel_sources(optlist.reject, channel_file)
if rflds:
logging.debug("found %d channels to reject", len(rflds))
for rid in rflds.keys():
if rid in oflds.keys():
removed = oflds.pop(rid)
logging.debug("removing %s from channels to process", removed)
else:
logging.debug("NOT removing %s from channels to process", rflds[rid])
logging.debug("%d channels remaining", len(oflds))
# filter on E2V, ITL, science, corner by removing other types
rafts_to_reject = []
if optlist.e2v:
rafts_to_reject.extend(rafts_of_type["ITL"])
if optlist.itl:
rafts_to_reject.extend(rafts_of_type["E2V"])
if optlist.science:
rafts_to_reject.extend(rafts_of_type["CORNER"])
if optlist.corner:
rafts_to_reject.extend(rafts_of_type["SCIENCE"])
if rafts_to_reject:
rids = []
for chid in oflds: # loop over paths
logging.debug("id= %5d path= %s", int(chid), oflds[chid])
for raft in set(rafts_to_reject): # loop over rafts of type
logging.debug("raft to reject = %s", raft)
if re.search(f"/{raft}/", oflds[chid]):
rids.append(chid)
logging.debug("adding %s to channels to reject", oflds[chid])
break
else:
logging.debug("NOT adding %s to channels to reject", oflds[chid])
for rid in rids:
removed = oflds.pop(rid)
logging.debug("removing %s from channels to process", removed)
logging.debug("%d channels remaining", len(oflds))
# now have info needed to query the CCS trending db
if optlist.match:
print("#--- Found matching channels:")
for chid in oflds:
print(" id: {} path: {}".format(chid, oflds[chid]))
sys.exit(0)
logging.debug("Found matching channels:")
for chid in oflds:
logging.debug("id= %5d path= %s", int(chid), oflds[chid])
# Get the trending data either from local saved files or via
# trending db queries to the rest service
if optlist.input_file:
# get input from files rather than trending service
# an issue is that the input file need not have the
# same set of channels or time intervals as requested on command line.
# The output time intervals will be restricted to the intersection
# of the intervals present in the input files.
responses = []
parser = etree.XMLParser(remove_blank_text=True)
for ifile in optlist.input_file:
logging.debug("using %s for input", ifile)
logging.debug("test for well-formed xml...")
try:
tree = etree.parse(ifile, parser)
except etree.ParseError as e:
logging.debug("parsing %s failed: %s", ifile, e)
sys.exit(1)
except etree.XMLSyntaxError as e:
logging.debug("parsing %s failed: %s", ifile, e)
sys.exit(1)
else:
logging.debug("successfully parsed %s", ifile)
logging.debug("appending to responses...")
responses.append(
etree.tostring(
tree.getroot(),
encoding="UTF-8",
xml_declaration=True,
pretty_print=False,
)
)
logging.debug("deleting the etree")
del tree
else:
# CCS is pre-binned at 5m, 30m, or will rebin on-the-fly
# default is raw data, timebins triggers stat data
# query the rest server and place responses into a list
# join the ids requested as "id0&id=id1&id=id2..." for query
idstr = "&id=".join(id for id in oflds)
responses = []
timebins = 0
nbins = 0
if optlist.timebins == 0: # autosize it
for ival in intervals: # only one interval per query allowed
logging.debug("timebins=0")
logging.debug("ival[1]= %d, ival[0]= %d", ival[1], ival[0])
if int((ival[1] - ival[0]) / 1000 / 60) < 5: # <5m => raw data
timebins = None
elif int((ival[1] - ival[0]) / 1000 / 3600) < 10: # <10h => 1m bins
timebins = int(((ival[1] - ival[0]) / 1000.0) / 60.0)
elif int((ival[1] - ival[0]) / 1000 / 3600) < 50: # <50h => 5m bins
timebins = int(((ival[1] - ival[0]) / 1000.0) / 300.0)
else: # 30m bins
timebins = int(((ival[1] - ival[0]) / 1000.0) / 1800.0)
logging.debug("timebins= %d", timebins)
if timebins and nbins < timebins:
nbins = timebins
else:
nbins = optlist.timebins # is None or an integer
for ival in intervals: # only one interval per query allowed
res = tu.query_rest_server(ival[0], ival[1], data_url, idstr, nbins)
responses.append(res)
# Now have the data from trending service
# Output to stdout a well formed xml tree aggregating the xml received
# Main use is to save to local file, and re-use for subsequent queries
# for statistics, plots etc. with subset of channels and time periods
# Also useful fo debugging and verification of data
# need to have server info as attribs to get tz correct
if optlist.xml:
xml_dec = b'<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
os.write(1, xml_dec)
datas_str = '<datas {}="{}" {}="{}" {}="{}">\n'.format(
"trending_server",
tsite["server"],
"trending_port",
tsite["port"],
"trending_tz",
tsite["tz"],
)
os.write(1, str.encode(datas_str))
for res in responses:
root = etree.fromstring(res)
for data in root.iter("data"):
os.write(
1,
etree.tostring(
data, encoding="UTF-8", xml_declaration=False, pretty_print=True
),
)
try:
os.write(1, b"</datas>")
except OSError:
# 'Broken pipe' OSError when stdout is closed
pass
sys.exit(0)
# Translate the xml responses into internal arrays etc.
# XML Tree structure looks like this:
# 1: data [id, path]
# 2: trendingresult [-]
# 3: channelmetadata [-]
# 4: channelmetadatavalue [tstart, tstop, name, value]
# 3: trendingdata [-]
# 4: datavalue [name, value]
# 4: axisvalue [name, value, loweredge, upperedge]
# where [id, path] could appear multiple times and input time intervals are
# allowed to overlap
#
chanspec = dict() # where keys are chids, element is also a dict
chanmd = dict() # key is chid, elements will be dicts holding arrays
chandata = dict() # key is chid, element is list of (time, value) tuples
datacnt = 0
for res in responses:
root = etree.fromstring(res)
for data in root.iter("data"):
datacnt += 1
chid = data.attrib.get("id")
path = data.attrib.get("path")
# verify this element's (chid, path) matches the input list
# logging.debug('id=%s path=%s', chid, path)
if chid not in oflds:
continue
if path is None or oflds[chid] != path:
logging.warning(
"inputpath(id=%s): %s != %s (xmlpath), using %s",
chid,
oflds[chid],
path,
oflds[chid],
)
path = oflds[chid]
# check if chid in
if chid in chanspec:
if chanspec[chid]["path"] != path:
logging.warning("path mismatch for channel_id= %d", chid)
logging.warning(
" %s != %s, skipping....", chanspec[chid]["path"], path
)
else:
chanspec[chid] = dict()
chanspec[chid]["path"] = path
chanspec[chid]["units"] = "none"
# channelmetadata:
# each element is a name, value and time interval
# a name can appear multiple times with distinct time intervals
# convert to a list, per name, of ordered pairs (value,time)
# that could be plotted using those points
#
if chid not in chanmd: # check if already exists
chanmd[chid] = dict()
# metadata:
# parse all but only using units for now
for mdval in data.iter("channelmetadatavalue"):
if mdval.keys(): # empty sequence is false
mdname = mdval.attrib.get("name") # key
mdvalue = mdval.attrib.get("value") # value
mdstart = mdval.attrib.get("tstart")
mdstop = mdval.attrib.get("tstop")
if mdname in chanmd[chid]:
chanmd[chid][mdname].append((mdstart, mdvalue))
chanmd[chid][mdname].append((mdstop, mdvalue))
else: # first assignment
chanmd[chid][mdname] = [(mdstart, mdvalue), (mdstop, mdvalue)]
# trendingdata:
# extract timestamp, value pairs in axisvalue, datavalue tags
if chid not in chandata: # first time
chandata[chid] = [] # empty list
for tdval in data.iter("trendingdata"):
dataval = tdval.find("datavalue")
if dataval is not None:
tvalue = dataval.attrib.get("value")
else:
continue
axisval = tdval.find("axisvalue")
if axisval is not None:
tstamp = axisval.attrib.get("value")
else:
continue
# if tstamp is in intervals then append
for ival in intervals: # slow, but no other way?
if ival[0] < int(tstamp) < ival[1]:
chandata[chid].append((tstamp, tvalue))
break
# Done translating the xml responses into internal lists etc.
# Delete all the raw xml responses
logging.debug("processed %d xml channel responses", len(responses))
logging.debug("processed %d uniq channel requests", len(chanspec))
logging.debug("processed %d total channel queries", datacnt)
del responses
# chanspec = dict() # where keys are chids, values are ccs paths
# chanmd = dict() # key is chid, elements will be dicts holding lists
# chandata = dict() # key is chid, elements are (time, value) pair lists
# so all responses processed, now have data organized by a set of dicts
# with the the index on channel id. Multiple queries for a given channel
# id are grouped together and there could be duplicate values.
#
# To facilitate operating on the data, transform chandat from list[] based
# (which was easy to append to) to np.array based data.
chandt = np.dtype({"names": ["tstamp", "value"], "formats": ["int", "float"]})
trimids = []
for chid in chanspec:
path = chanspec[chid]["path"]
logging.debug("id=%s path=%s", chid, path)
for mdname in chanmd[chid]:
# pick out and process the md's we want
if mdname == "units" and chanspec[chid]["units"] == "none":
chanspec[chid]["units"] = chanmd[chid][mdname][-1][1]
logging.debug(" units=%s", chanspec[chid]["units"])
# sort and remove duplicates from chandata[chid] where:
# chandata[chid] = [(t0, v0), (t1, v1), ...., (tn, vn)]
# and convert to np array
tmparr = np.array(chandata[chid], dtype=chandt)
chandata[chid] = np.unique(tmparr)
logging.debug(
" chandata: %d uniq/sorted values from %d entries",
np.size(chandata[chid]),
np.size(tmparr),
)
del tmparr
if np.size(chandata[chid]) == 0: # append chid to trimid list
logging.debug("%s has no data", chanspec[chid]["path"])
# arrange to trim empty data
trimids.append(chid)
for chid in trimids:
del chandata[chid]
del chanmd[chid]
del chanspec[chid]
# print to stdout a text dump of the data, in time order per channel
#
if optlist.text:
# print a header for the text
#
print("#")
print("# {}".format(optlist.title))
print("#")
print(
"# CCS trending dump at {}".format(
dt.datetime.now(gettz()).isoformat(timespec="seconds")
)
)
print(
"# Data for {} total seconds from {} intervals".format(inttot, intcnt),
end="",
)
print(
" over {} (h:m:s) from:".format(
dt.timedelta(seconds=(tmax / 1000 - tmin / 1000))
)
)
print(
'# tmin={}: "{}"'.format(
tmin,
dt.datetime.fromtimestamp(tmin / 1000, gettz(tsite["tz"])).isoformat(
timespec="seconds"
),
)
)
print(
'# tmax={}: "{}"'.format(
tmax,
dt.datetime.fromtimestamp(tmax / 1000, gettz(tsite["tz"])).isoformat(
timespec="seconds"
),
)
)
print(
"#{:<{wt}s} {:>{wv}s} {:<{wu}s} {:<{wp}s} {:<{wd}s}".format(
" 'time (ms)'",
"'value'",
"'unit'",
"'channel CCS path'",
"'iso-8601 Date'",
wt=13,
wv=12,
wu=6,
wp=30,
wd=30,
)
)
# loop over all channels sorted on units then path
for chid in sorted(
chanspec.keys(), key=lambda x: (chanspec[x]["units"], chanspec[x]["path"])
):
path = chanspec[chid]["path"]
unitstr = chanspec[chid]["units"]
if np.size(chandata[chid]) == 0:
continue
for (tstamp, value) in chandata[chid]:
try:
date = dt.datetime.fromtimestamp(
tstamp / 1000.0, gettz(tsite["tz"])
).isoformat(timespec="milliseconds")
print(
"{:<{wt}d} {:>{wv}g} {:>{wu}s} ".format(
int(tstamp), float(value), unitstr, wt=14, wv="12.7", wu=6
),
end="",
)
print(
"{:<{wp}s} {:<{wd}s}".format(
path, date, wt=14, wv="12.7", wu=6, wp=30, wd=30
)
)
except IOError:
# 'Broken pipe' IOError when stdout is closed
pass
# print some statistics for each channel
#
if optlist.stats:
# print a header for the stats
#
print("#")
print("# {}".format(optlist.title))
print("#")
print(
"# CCS trending stats at {}".format(
dt.datetime.now(gettz()).isoformat(timespec="seconds")
)
)
print(
"# Data for {} total seconds from {} intervals".format(inttot, intcnt),
end="",
)
print(
" over {} (h:m:s) from:".format(
dt.timedelta(seconds=(tmax / 1000 - tmin / 1000))
)
)
print(
'# tmin="{}"'.format(
dt.datetime.fromtimestamp(tmin / 1000, gettz(tsite["tz"])).isoformat(
timespec="seconds"
)
)
)
print(
'# tmax="{}"'.format(
dt.datetime.fromtimestamp(tmax / 1000, gettz(tsite["tz"])).isoformat(
timespec="seconds"
)
)
)
print(
"# {:>4s} {:>8s} {:>8s} {:>8s} {:>8s} {:>8s} {:>11s}".format(
"cnt", "mean", "median", "stddev", "min", "max", "d/dt 1/m"
),
end="",
)
if optlist.rstats:
print(
"{:>8s} {:>8s} {:>8s} ".format("rmean", "rmedian", "rstddev"), end=""
)
print(" {:<{wt}s} {:>{wu}s}".format("path", "units", wt=40, wu=6))
# loop over all channels sorted on units then path
for chid in sorted(
chanspec.keys(), key=lambda x: (chanspec[x]["units"], chanspec[x]["path"])
):
path = chanspec[chid]["path"]
unitstr = chanspec[chid]["units"]
tstamp = chandata[chid]["tstamp"]
nelem = tstamp.size
if nelem > 0:
y = chandata[chid]["value"]
avg = np.mean(y)
med = np.median(y)
std = np.std(y)
npmin = np.min(y)
npmax = np.max(y)
if y.size > 5:
# silly but better than taking last value
npgrad = np.gradient(y, tstamp)
grad = (
60
* 1000
* (
npgrad[-4] * 1.0
+ npgrad[-3] * 1.0
+ npgrad[-2] * 1.0
+ npgrad[-1] * 1.0
)
/ 4.0
)
else:
grad = math.nan
if optlist.rstats:
rmean, rmedian, rstd = stats.sigma_clipped_stats(y)
else:
avg = med = std = npmin = npmax = 0
grad = rmean = rmedian = rstd = 0
try:
print(
"{:>6g} {:>8.4g} {:>8.4g} {:>8.4g} ".format(
nelem,
avg,
med,
std,
),
end="",
)
print("{:>8.4g} {:>8.4g} ".format(npmin, npmax), end="")
print("{:>11.3g} ".format(grad), end="")
if optlist.rstats:
print(
"{:>8.4g} {:>8.4g} {:>8.4g} ".format(rmean, rmedian, rstd),
end="",
)
print("{:<{wt}s} {:>{wu}s}".format(path, unitstr, wt=40, wu=6))
except IOError:
# 'Broken pipe' IOError when stdout is closed
pass
# Plotting:
#
if optlist.plot:
# make one or more plots of the time series data
# default is plot per channel per interval
# option to combine intervals and channels by units
# and to overlay all
# update/override some critical parameters
plt.style.use(optlist.style)
pu.update_rcparams()
# figure out how many distinct plots and windows to make
# subplots layout and shape are determined
# nax will store the number of actual plots
# the nxm array of plots may be larger
#
nax = len(chanspec) # default
if optlist.overlayunits: # axis set per unit
unit_map = dict()
unit_idx = 0 # counts types of units
# loop over all channels sorted on units then path
for chid in sorted(
chanspec, key=lambda x: (chanspec[x]["units"], chanspec[x]["path"])
):
unit = chanspec[chid]["units"]
if unit not in unit_map:
unit_map[unit] = unit_idx
unit_idx += 1
nax = len(unit_map)
logging.debug("unit_map=%s", unit_map)
elif optlist.overlayregex: # axis set per regex and per unit
# regexes[] is a list of regex's used to select channels
regex_map = dict()
axis_idx = 0 # will be the axis index
# loop over all channels sorted on units then path
for chid in sorted(
chanspec, key=lambda x: (chanspec[x]["units"], chanspec[x]["path"])
):
chid_matched = False
path = chanspec[chid]["path"]
unit = chanspec[chid]["units"]
for regex in regexes:
if re.search(regex, path):
logging.debug("regex_map[%s] matches %s", regex, path)
if regex not in regex_map:
regex_map[regex] = dict()
regex_map[regex][unit] = axis_idx
axis_idx += 1
elif unit not in regex_map[regex]:
regex_map[regex][unit] = axis_idx
axis_idx += 1
else: # both regex and unit accounted for
pass
chid_matched = True
break # found match
if not chid_matched:
logging.error("no regex matches %s", path)
sys.exit(1)
nax = axis_idx # so now have an axis count, need to re-assign
# now re-assign axis ids to match command line regexes order
regex_map_tmp = copy.deepcopy(regex_map)
aix = 0
for regex in regexes:
for unit in sorted(regex_map[regex].keys()):
regex_map_tmp[regex][unit] = aix
aix += 1
regex_map = regex_map_tmp
logging.debug("regex_map=%s", regex_map)
elif optlist.overlay:
nax = 1
if nax == 0:
logging.error("no data to plot, check inputs?")
logging.error("try running with --debug")
sys.exit(1)
if (
not optlist.overlaytime
and not optlist.overlaystart
and not optlist.overlaystop
):
nax = nax * len(intervals) # per interval, per channel
logging.debug("nax=%d", nax)
# logging.debug('nrows= %d ncols=%d', nrows, ncols)
if not optlist.overlaytime and len(intervals) > 1 and optlist.sharex:
sharex = False
else:
sharex = optlist.sharex
fig, axes = pu.get_fig_and_axes(
nax,
optlist.layout,
optlist.overlay,
sharex,
False,
optlist.dpi,
optlist.fsize,
)
logging.debug("len(axes)=%d", len(axes))
logging.debug("axes.shape= %s", axes.shape)
nrows, ncols = (axes.shape[0], axes.shape[1])
# loop over data channels and plot them on correct axis
# chids = list(chanspec.keys())
chids = sorted(
chanspec, key=lambda x: (chanspec[x]["units"], chanspec[x]["path"])
)
logging.debug("chids=%s", chids)
unit = None
for chidx in range(0, len(chids)): # channels
chid = chids[chidx]
unit = chanspec[chid]["units"]
path = chanspec[chid]["path"]
tstamp = chandata[chid]["tstamp"]
mcolor = None
labeled = False
for idx in range(0, len(intervals)):
#
# choose on which axis to plot
if optlist.overlayunits:
axcnt = unit_map[unit] # map unit to correct axis
elif optlist.overlayregex:
chid_matched = False
for regex in regexes:
if re.search(regex, path):
logging.debug("regex_map[%s] matches %s", regex, path)
axcnt = regex_map[regex][unit]
chid_matched = True
logging.debug(
"using axcnt=%d for regex_map[%s]", axcnt, regex
)
if not chid_matched:
logging.error("no regex match found for %s", path)
else:
axcnt = chidx
if not (
optlist.overlaytime or optlist.overlaystart or optlist.overlaystop
):
# stride is number of intervals
axcnt = axcnt * len(intervals) + idx
if optlist.overlay:
axcnt = 0
#
# now set up this axis
logging.debug("using axcnt=%d", axcnt)
ax = np.ravel(axes)[axcnt]
rowid = int(axcnt / ncols)
colid = int(axcnt % ncols)
logging.debug("axcnt= %d idx= %d", axcnt, idx)
logging.debug("rowid = %d colid = %d", rowid, colid)
ax.grid(True)
ax.set_frame_on(True)
ax.get_xaxis().set_visible(True)
ax.get_yaxis().set_visible(True)
ax.xaxis.set_tick_params(labelsize="x-small")
ax.yaxis.set_tick_params(labelsize="x-small")
if optlist.style == "ggplot":
ax.plot([], []) # consumes the first color (red)
#
# mask the tstamps outside of the interval
mask = (intervals[idx][0] < tstamp) & (tstamp < intervals[idx][1])
mask_start = intervals[idx][0] / 1000.0
mask_stop = intervals[idx][1] / 1000.0
x = chandata[chid]["tstamp"][mask] / 1000.0 # time in seconds
y = chandata[chid]["value"][mask]
#
# deal with point/line format
if optlist.fmt:
fmt = optlist.fmt[0]
else:
if optlist.timebins:
fmt = "|-"
else:
fmt = "o-"
# do the actual plotting
#
if not (optlist.overlaystart or optlist.overlaystop):
#
# convert time axis to matplotlib dates sequence
dates = [
dt.datetime.fromtimestamp(ts, gettz(tsite["tz"])) for ts in x
]
mds = mdate.date2num(dates)
if mds.size == 0 and not (
optlist.overlay
or optlist.overlaytime
or optlist.overlayunits
or optlist.overlayregex
):
#
# no data, blank, annotate as empty and skip
ax.grid(False)
ax.set_frame_on(True)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
anno_string = "{}:{} empty".format(chanspec[chid]["path"], idx)
ax.annotate(
anno_string,
xy=(0.03, 0.55),
xycoords="axes fraction",
horizontalalignment="left",
verticalalignment="bottom",
fontsize="small",
)
anno_string = "{} (tstart)".format(
dt.datetime.fromtimestamp(
intervals[idx][0] / 1000, gettz(tsite["tz"])
).isoformat(timespec="seconds")
)
ax.annotate(
anno_string,
xy=(0.03, 0.45),
xycoords="axes fraction",
horizontalalignment="left",
verticalalignment="top",
fontsize="small",
)
continue
# # normalization and shift
# if optlist.normalize:
# make label for legend
if not labeled: # label first valid interval, save color
mlabel = "{}".format(chanspec[chid]["path"])
if not use_mjd:
line = ax.plot_date(
mds, y, fmt, label=mlabel, tz=gettz(tsite["tz"])
)
else:
mjd = Time(mds, format="plot_date").mjd - float(optlist.mjd)
line = ax.plot(mjd, y, fmt, color=mcolor, label=None)
mcolor = line[0].get_color()
logging.debug("mcolor= %s", mcolor)
labeled = True
else: # no label on later intervals, use saved color
if not use_mjd:
line = ax.plot_date(
mds,
y,
fmt,
color=mcolor,
label=None,
tz=gettz(tsite["tz"]),
)
else:
mjd = Time(mds, format="plot_date").mjd - float(optlist.mjd)
line = ax.plot(mjd, y, fmt, color=mcolor, label=None)
# set x,y-axis label format
if not ax.get_ylabel():
ax.set_ylabel("{}".format(unit), size="small")
ax.ticklabel_format(axis="y", style="sci", scilimits=(-3, 5))
if not ax.get_xlabel():
# xlabel and tick labels on bottom plots
# only unless multiple intervals
if (
len(intervals) > 1 and not optlist.overlaytime
) or nax - axcnt - 1 < ncols:
xlabel_str = "{} (tstart)".format(
dt.datetime.fromtimestamp(
intervals[idx][0] / 1000, gettz(tsite["tz"])
).isoformat(timespec="seconds")
)
if optlist.timebins:
xlabel_str = "{} [{} bins]".format(
xlabel_str, optlist.timebins
)
if use_mjd:
xlabel_str = "{} MJD".format(xlabel_str)
if optlist.mjd:
xlabel_str = "{}-{}".format(xlabel_str, optlist.mjd)
logging.debug("ax.set_xlabel(%s)", xlabel_str)
ax.set_xlabel(
"{}".format(xlabel_str),
position=(0.0, 1e6),
size="small",
horizontalalignment="left",
)
ax.tick_params(axis="x", labelbottom=True)
# rotate the labels
if not use_mjd:
for xtick in ax.get_xticklabels():
xtick.set_rotation(30)
xtick.set_horizontalalignment("right")
else:
ax.tick_params(axis="x", labelbottom=False)
else: # overlay start or stop
# convert x to duration axis units (s+/-)
#
if optlist.overlaystart:
x = x - mask_start
elif optlist.overlaystop:
x = x - mask_stop
else:
logging.error("overlaystart/stop problem")
sys.exit(1)
mlabel = "{}[{}]".format(chanspec[chid]["path"], idx)
line = ax.plot(x, y, fmt, label=mlabel)
mcolor = line[0].get_color()
logging.debug("mcolor= %s", mcolor)
if not ax.get_ylabel():
ax.set_ylabel("{}".format(unit), size="small")
ax.ticklabel_format(axis="y", style="sci", scilimits=(-3, 5))
# xlabel for this axis
if not ax.get_xlabel():
if nax - axcnt - 1 < ncols:
if optlist.overlaystart:
xstr = "tstart"
xid = 0
if optlist.overlaystop:
xstr = "tstop"
xid = 1
xlabel_str = "{} ({}[0])".format(
dt.datetime.fromtimestamp(
intervals[0][xid] / 1000, gettz(tsite["tz"])
).isoformat(timespec="seconds"),
xstr,
)
if len(intervals) > 1:
xlabel_last = "{} ({}[{}])".format(
dt.datetime.fromtimestamp(
intervals[-1][xid] / 1000, gettz(tsite["tz"])
).isoformat(timespec="seconds"),
xstr,
len(intervals) - 1,
)
if len(intervals) > 2:
xlabel_last = "...{}".format(xlabel_last)
xlabel_str = "{}\n{}".format(xlabel_str, xlabel_last)
ax.set_xlabel(
"{}".format(xlabel_str),
fontsize="small",
position=(0.0, 1e6),
horizontalalignment="left",
)
#
ax.tick_params(
axis="x", labelbottom=True, labelrotation=30.0
)
else:
ax.tick_params(axis="x", labelbottom=False)
# plot array padded with invisible boxes
for pcnt in range(nax, nrows * ncols):
ax = np.ravel(axes)[pcnt]
ax.grid(False)
ax.set_frame_on(False)
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
# make the legends for each plot in the array
# with legend placement/size adjustments
for pcnt in range(0, nax):
logging.debug("pcnt= %d, nax= %d", pcnt, nax)
ax = np.ravel(axes)[pcnt]
handles, labels = ax.get_legend_handles_labels()
if not handles or not labels:
continue
# sort the labels for easier reading
# using https://stackoverflow.com/questions/9764298
labels, handles = (list(t) for t in zip(*sorted(zip(labels, handles))))
if labels:
pu.mk_legend(optlist.placement, nrows, handles, labels, ax)
if optlist.title: # set the suptitle
suptitle = optlist.title
if optlist.title == "auto":
suptitle = mu.mkglob(
[c["path"] for c in list(chanspec.values())], False
)
if suptitle: # mkglob returns None on no solution
logging.debug("using suptitle=%s", suptitle)
fig.set_tight_layout(
{"h_pad": 0.02, "w_pad": 1.0, "rect": [0, 0, 1, 0.97]}
)
fig.suptitle(suptitle, size="medium")
else:
fig.set_tight_layout({"h_pad": 0.02, "w_pad": 1.0, "rect": [0, 0, 1, 1]})
if optlist.saveplot:
fig.savefig(f"{optlist.saveplot}", dpi=600)
if optlist.noshow:
pass
else:
plt.show()
# end of main()
sys.exit(0)
if __name__ == "__main__":
trender()
| 39.271987 | 88 | 0.487476 |
acec36cb00637a376ed01f4d0cfb5a2b9462c47f | 886 | py | Python | quiz/ui/base.py | KelstonClub/quiz | 5f6fca87ca21c376937f50f00e1d3ff2fbe3425a | [
"MIT"
] | null | null | null | quiz/ui/base.py | KelstonClub/quiz | 5f6fca87ca21c376937f50f00e1d3ff2fbe3425a | [
"MIT"
] | null | null | null | quiz/ui/base.py | KelstonClub/quiz | 5f6fca87ca21c376937f50f00e1d3ff2fbe3425a | [
"MIT"
] | null | null | null | class BaseUI:
def __init__(self):
raise NotImplementedError
def reset(self):
"""Initialise the screen ready to start
"""
raise NotImplementedError
def execute_loop(self):
"""Execute whatever main loop the UI needs
"""
raise NotImplementedError
def show_question(self, question):
"""Show the text or image for a single question
"""
raise NotImplementedError
def show_multichoice_answers(self, answers):
"""Show the possibilities for multiple choice answers"""
raise NotImplementedError
def detect_multichoice_answers(self):
"""Return which answer(s) a user selected"""
raise NotImplementedError
def detect_text_answer(self):
"""Return what text the user entered"""
raise NotImplementedError
| 26.848485 | 65 | 0.623025 |
acec3740e48d2c94d73ade1c37954faa50171440 | 7,071 | py | Python | english/clustering/DBSCAN/MeasureScore.py | Lyuyangdaisy/DS_package | ca0f220598ee156028646fbefccde08b2ece62ea | [
"MIT"
] | 6 | 2021-03-13T10:33:47.000Z | 2022-01-23T07:22:40.000Z | english/clustering/DBSCAN/MeasureScore.py | Lyuyangdaisy/DS_package | ca0f220598ee156028646fbefccde08b2ece62ea | [
"MIT"
] | null | null | null | english/clustering/DBSCAN/MeasureScore.py | Lyuyangdaisy/DS_package | ca0f220598ee156028646fbefccde08b2ece62ea | [
"MIT"
] | 2 | 2021-05-05T17:47:34.000Z | 2021-10-10T16:13:53.000Z | # -*- coding: utf-8 -*-
"""
Created on 17/08/2020
@author: Jingwei Liu
Version 1.1
"""
from sklearn import metrics
# Return model evaluation value
def get_measure_scores(measure, data, pred_labels, real_labels = None):
"""
Get the evaluation score of the corresponding evaluation standard
There are two evaluation methods: (more methods will be added in the future)
1. ARC : Adjusted Rand Score
2. AMIC : Adjusted Mutual Information Score
3. V : the harmonic mean of homogeneity and completeness (V Measure Score)
4. Homogeneity
5. Completeness
6. Silouette : Silhouette Coefficient
7. CHS : Calinski Harabasz Score
Input parameters
----------
measure : string
evaluation criteria.
data : pandas dataframe
Data to be evaluated.
pred_labels : array-like of shape
Forecast data label.
real_labels : array-like of shape, optional
Real data label, the default value is None.
错误抛出
------
ValueError
Evaluation criteria string input error.
Return value
-------
Evaluation value
"""
if measure == 'ARC':
score = _get_Adjust_rand_score(real_labels, pred_labels)
elif measure == 'AMIC':
score = _get_adjusted_mutual_info_score(real_labels, pred_labels)
elif measure == 'V':
score = _get_v_measure_score(real_labels, pred_labels)
elif measure == 'Homegeneity':
score = _get_homogeneity_score(real_labels, pred_labels)
elif measure == 'Completeness':
score = _get_completeness_score(real_labels, pred_labels)
elif measure == 'Silouette':
score = _get_silhouette_score(data, pred_labels)
elif measure == 'CHS':
score = _get_calinski_harabasz_score(data, pred_labels)
else:
raise ValueError("Please select the correct evaluation criteria.")
return(score)
# Display all evaluation values of the model
def get_marks(data, pred_labels, real_labels):
"""Display all evaluation values of the model
Input parameters
----------
data : pandas dataframe
Data to be evaluated.
pred_labels : array-like of shape
Forecast data label.
real_labels : array-like of shape, optional
Real data label, the default value is None.
Return value
-------
None.
"""
print("Adjusted Rand Score:{}",format(_get_Adjust_rand_score(real_labels, pred_labels)))
print("Adjusted Mutual Info Score:{}",format(_get_adjusted_mutual_info_score(real_labels, pred_labels)))
print("V Measure Score:{}",format(_get_v_measure_score(real_labels, pred_labels)))
print("Homogeneity Score:{}",format(_get_homogeneity_score(real_labels, pred_labels)))
print("Completeness Score:{}",format(_get_completeness_score(real_labels, pred_labels)))
print("Silhouette Score:{}",format(_get_silhouette_score(data, pred_labels)))
print("Calinski Harabasz Score:{}",format(_get_calinski_harabasz_score(data, pred_labels) ))
# Find the location index of the best score
def get_best_score_index(score_list, measure):
"""Returns the location index of the best evaluation value
Input parameters
----------
score_list : list
Score list(list)
measure : string
Evaluation criteria,
Return value
-------
best_index: int
location index
"""
if measure == 'ARC':
best_index = score_list.index(max(score_list))
elif measure == 'AMIC':
best_index = score_list.index(max(score_list))
elif measure == 'V':
best_index = score_list.index(max(score_list))
elif measure == 'Homegeineity':
best_index = score_list.index(max(score_list))
elif measure == 'Completeness':
best_index = score_list.index(max(score_list))
elif measure == 'Silouette':
best_index = score_list.index(max(score_list))
elif measure == 'CHS':
best_index = score_list.index(max(score_list))
else:
raise ValueError("Please select the correct evaluation criteria.")
return(best_index)
# Adjusted Rand Score
def _get_Adjust_rand_score(real_labels, pred_labels):
"""Return Adjusted Rand Score
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
Adjusted Rand Score
"""
return(metrics.adjusted_rand_score(real_labels, pred_labels))
# Adjust Mutual Information
def _get_adjusted_mutual_info_score(real_labels, pred_labels):
"""Return adjust mutual information
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
Adjust Mutual Information
"""
return(metrics.adjusted_mutual_info_score(real_labels, pred_labels))
# V Measure Score
def _get_v_measure_score(real_labels, pred_labels):
"""Return V Measure Score
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
V Measure Score
"""
return(metrics.v_measure_score(real_labels, pred_labels))
# homogeneity
def _get_homogeneity_score(real_labels, pred_labels):
"""Return Homogeneity Score
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
homogeneity
"""
return(metrics.homogeneity_score(real_labels, pred_labels))
# completeness
def _get_completeness_score(real_labels, pred_labels):
"""Return Completeness Score
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
completeness
"""
return(metrics.completeness_score(real_labels, pred_labels))
# Silhouette
def _get_silhouette_score(data, pred_labels):
"""Return Silhouette Score
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
Silhouette
"""
return(metrics.silhouette_score(data, pred_labels))
# Calinski Harabasz Score
def _get_calinski_harabasz_score(data, pred_labels):
"""Return Calinski Harabasz Score
Input parameters
--------
real_labels : array-like of shape
Real data label.
pred_labels : array-like of shape
Forecast data label.
Return value
--------
Calinski Harabasz Score
"""
return(metrics.calinski_harabasz_score(data, pred_labels))
| 26.483146 | 108 | 0.650403 |
acec379153e6969477912f1bb091427cc9a3fd3d | 27 | py | Python | vega/algorithms/nas/modnas/optim/model_optim/__init__.py | jie311/vega | 1bba6100ead802697e691403b951e6652a99ccae | [
"MIT"
] | 724 | 2020-06-22T12:05:30.000Z | 2022-03-31T07:10:54.000Z | vega/algorithms/nas/modnas/optim/model_optim/__init__.py | jie311/vega | 1bba6100ead802697e691403b951e6652a99ccae | [
"MIT"
] | 147 | 2020-06-30T13:34:46.000Z | 2022-03-29T11:30:17.000Z | vega/algorithms/nas/modnas/optim/model_optim/__init__.py | jie311/vega | 1bba6100ead802697e691403b951e6652a99ccae | [
"MIT"
] | 160 | 2020-06-29T18:27:58.000Z | 2022-03-23T08:42:21.000Z | from . import sampling, sa
| 13.5 | 26 | 0.740741 |
acec37d3dbaf39acefb55569bb04455427977452 | 1,048 | py | Python | pytorch_toolbelt/losses/soft_ce.py | mohitktanwr/toolkits | f3acfca5da05cd7ccdd85e8d343d75fa40fb44d9 | [
"MIT"
] | 1,281 | 2019-03-17T18:32:39.000Z | 2022-03-31T03:47:22.000Z | pytorch_toolbelt/losses/soft_ce.py | mohitktanwr/toolkits | f3acfca5da05cd7ccdd85e8d343d75fa40fb44d9 | [
"MIT"
] | 28 | 2019-04-05T10:49:25.000Z | 2022-03-11T10:40:28.000Z | pytorch_toolbelt/losses/soft_ce.py | mohitktanwr/toolkits | f3acfca5da05cd7ccdd85e8d343d75fa40fb44d9 | [
"MIT"
] | 99 | 2019-03-18T08:40:18.000Z | 2022-03-26T10:52:57.000Z | from typing import Optional
from torch import nn, Tensor
import torch.nn.functional as F
from .functional import label_smoothed_nll_loss
__all__ = ["SoftCrossEntropyLoss"]
class SoftCrossEntropyLoss(nn.Module):
"""
Drop-in replacement for nn.CrossEntropyLoss with few additions:
- Support of label smoothing
"""
__constants__ = ["reduction", "ignore_index", "smooth_factor"]
def __init__(self, reduction: str = "mean", smooth_factor: float = 0.0, ignore_index: Optional[int] = -100, dim=1):
super().__init__()
self.smooth_factor = smooth_factor
self.ignore_index = ignore_index
self.reduction = reduction
self.dim = dim
def forward(self, input: Tensor, target: Tensor) -> Tensor:
log_prob = F.log_softmax(input, dim=self.dim)
return label_smoothed_nll_loss(
log_prob,
target,
epsilon=self.smooth_factor,
ignore_index=self.ignore_index,
reduction=self.reduction,
dim=self.dim,
)
| 30.823529 | 119 | 0.657443 |
acec37e4f22b28562fbc30f857cf2ecd14417bd5 | 1,221 | py | Python | mysite/mysite/urls.py | Inderjeet0007/AirPass-Tracking-System | 4715bfbceff67836d9e4e43a314826d0369aab13 | [
"MIT"
] | null | null | null | mysite/mysite/urls.py | Inderjeet0007/AirPass-Tracking-System | 4715bfbceff67836d9e4e43a314826d0369aab13 | [
"MIT"
] | null | null | null | mysite/mysite/urls.py | Inderjeet0007/AirPass-Tracking-System | 4715bfbceff67836d9e4e43a314826d0369aab13 | [
"MIT"
] | null | null | null | """mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import include, path, re_path
from Airpass.views import contact, user_login, user_logout
urlpatterns = [
re_path(r'^', include('favicon.urls')),
path('admin/', admin.site.urls),
path('',user_login, name='user_login'),
path('logout/',user_logout, name='user_logout'),
path('apts_main/', include('Airpass.app_urls.dashboard_url')),
path('apts_main/', include('Airpass.app_urls.reader_url')),
path('apts_main/', include('Airpass.app_urls.beacons_url')),
path('apts_main/contact', contact, name = 'contact'),
]
| 39.387097 | 77 | 0.70516 |
acec37fa513a93b2a8f3aac796eb6bca94410345 | 509 | py | Python | Programming-Basics-with-Python-April-2019/07_conditional_statements_more_exercises/01_pipes_in_pool.py | marinakolova/Python-Courses | eb95c782307be561b5026c5adafaa001b04caf4f | [
"MIT"
] | null | null | null | Programming-Basics-with-Python-April-2019/07_conditional_statements_more_exercises/01_pipes_in_pool.py | marinakolova/Python-Courses | eb95c782307be561b5026c5adafaa001b04caf4f | [
"MIT"
] | null | null | null | Programming-Basics-with-Python-April-2019/07_conditional_statements_more_exercises/01_pipes_in_pool.py | marinakolova/Python-Courses | eb95c782307be561b5026c5adafaa001b04caf4f | [
"MIT"
] | null | null | null | volume = int(input())
p1 = int(input())
p2 = int(input())
hours = float(input())
water_from_p1 = p1 * hours
water_from_p2 = p2 * hours
total_water = water_from_p1 + water_from_p2
if total_water <= volume:
print(f"The pool is {(total_water / volume * 100):.2f}% full. "
f"Pipe 1: {(water_from_p1 / total_water * 100):.2f}%. "
f"Pipe 2: {(water_from_p2 / total_water * 100):.2f}%")
else:
print(f"For {hours:.2f} hours the pool overflows with {(total_water - volume):.2f} liters.")
| 31.8125 | 96 | 0.640472 |
acec3806c9481e9ab5181dca4733aa56301cd6d4 | 3,456 | py | Python | src/try_django/settings.py | aasthakasera/Django | c34c0a2a0dfc732f0d18afa3dfa58d61285f7399 | [
"MIT"
] | null | null | null | src/try_django/settings.py | aasthakasera/Django | c34c0a2a0dfc732f0d18afa3dfa58d61285f7399 | [
"MIT"
] | 4 | 2021-04-08T19:30:43.000Z | 2022-02-10T07:58:16.000Z | src/try_django/settings.py | aasthakasera/Django | c34c0a2a0dfc732f0d18afa3dfa58d61285f7399 | [
"MIT"
] | null | null | null | """
Django settings for try_django project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'gt1lm8^o5u#5t1wa@fop6tkje1kq7$-&xsc_*6gh+sj4ne5cyn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog' ,
'searches',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'try_django.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'try_django.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
LOCAL_STATIC_CON_PATH = os.path.join(os.path.dirname(BASE_DIR), 'static_cdn_test')
STATIC_ROOT = os.path.join(LOCAL_STATIC_CON_PATH, 'static')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'staticfiles')
]
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'media')
MEDIA_URL = '/media/'
| 25.984962 | 91 | 0.7011 |
acec3899715e328484814fde824f9ff5d950d19a | 1,199 | py | Python | setup.py | uraxy/gaaqoo | 360399b56960d178e7d6721dbdfc06dcbc59c5f2 | [
"MIT"
] | 1 | 2016-09-08T16:34:43.000Z | 2016-09-08T16:34:43.000Z | setup.py | uraxy/gaaqoo | 360399b56960d178e7d6721dbdfc06dcbc59c5f2 | [
"MIT"
] | 10 | 2016-09-08T16:23:34.000Z | 2021-09-07T23:47:59.000Z | setup.py | uraxy/gaaqoo | 360399b56960d178e7d6721dbdfc06dcbc59c5f2 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
from gaaqoo import __version__, __description__
# https://setuptools.readthedocs.io/en/latest/setuptools.html#basic-use
setup(
name='gaaqoo',
version=__version__,
packages=find_packages(),
# scripts = ['say_hello.py'],
install_requires=[
'Pillow',
'PyYAML',
],
# http://doc.pytest.org/en/latest/goodpractices.html#integrating-with-setuptools-python-setup-py-test-pytest-runner
setup_requires=['pytest-runner'],
tests_require=['pytest'],
# metadata for upload to PyPI
author='uraxy',
author_email='uraxy123@gmail.com',
description=__description__,
license='MIT',
# keywords=['dummy1', 'dumm2'],
url='https://github.com/uraxy/gaaqoo',
entry_points={
'console_scripts': ['gaaqoo=gaaqoo.command_line:main'],
},
classifiers=[
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: MIT License',
'Topic :: Multimedia :: Graphics :: Graphics Conversion',
'Topic :: Utilities',
],
)
| 30.74359 | 119 | 0.64387 |
acec38ae61b4bf6633c1c7122cd5a452b2964ce0 | 674 | py | Python | runs/snort/10KB/src2-tgt1/mysql-par-noids-iter00500.cfg.py | Largio/broeval | 89e831d07f066100afdd1a5b220f9f08f1c10b3d | [
"MIT"
] | null | null | null | runs/snort/10KB/src2-tgt1/mysql-par-noids-iter00500.cfg.py | Largio/broeval | 89e831d07f066100afdd1a5b220f9f08f1c10b3d | [
"MIT"
] | null | null | null | runs/snort/10KB/src2-tgt1/mysql-par-noids-iter00500.cfg.py | Largio/broeval | 89e831d07f066100afdd1a5b220f9f08f1c10b3d | [
"MIT"
] | null | null | null |
# Write results to this file
OUTFILE = 'runs/snort/10KB/src2-tgt1/mysql-par-noids-iter00500.result.csv'
# Source computers for the request
SOURCE = ['10.0.0.1', '10.0.0.3']
# Target machines for the requests (aka server)
TARGET = ['10.0.0.2']
# IDS Mode. (ATM: noids, min, max, http, ssl, ftp, icmp, mysql)
IDSMODE = 'noids'
# Connection mode (par = parallel, seq = sequential)
MODE = 'par'
# Number of evaluation repititions to run
EPOCHS = 100
# Number of iterations to be run in each evaluation repitition
ITER = 500
# Size of the file to be downloaded from target (in Bytes * 10^SIZE)
SIZE = 4
# Protocol to be used e.g. HTTP, SSL, FTP, MYSQL
PROTOCOL = 'mysql' | 24.962963 | 74 | 0.700297 |
acec38f88599f0dd8dcc08afcc5e9b41a14eadb8 | 429 | py | Python | String_Manipulation/Common_Child.py | NikolayVaklinov10/Interview_Preparation_Kit | 517c4c7e83a7bcc99a4f570dff6959b5229b1a29 | [
"MIT"
] | null | null | null | String_Manipulation/Common_Child.py | NikolayVaklinov10/Interview_Preparation_Kit | 517c4c7e83a7bcc99a4f570dff6959b5229b1a29 | [
"MIT"
] | null | null | null | String_Manipulation/Common_Child.py | NikolayVaklinov10/Interview_Preparation_Kit | 517c4c7e83a7bcc99a4f570dff6959b5229b1a29 | [
"MIT"
] | null | null | null |
def commonChild(s1, s2):
m, n = len(s1), len(s2)
prev, cur = [0]*(n+1), [0]*(n+1)
for i in range(1, m+1):
for j in range(1, n+1):
if s1[i-1] == s2[j-1]:
cur[j] = 1 + prev[j-1]
else:
if cur[j-1] > prev[j]:
cur[j] = cur[j-1]
else:
cur[j] = prev[j]
cur, prev = prev, cur
return prev[n]
| 25.235294 | 38 | 0.361305 |
acec39854af21d879e4a120ef260635559f3393e | 343 | py | Python | project/program/migrations/0004_auto_20181113_2334.py | giannisdaras/tedxntua2019 | 0f9ebdb2946cc8da8c44562313be740db8a394ea | [
"MIT"
] | 1 | 2019-04-26T13:56:39.000Z | 2019-04-26T13:56:39.000Z | project/program/migrations/0004_auto_20181113_2334.py | giannisdaras/tedxntua2019 | 0f9ebdb2946cc8da8c44562313be740db8a394ea | [
"MIT"
] | null | null | null | project/program/migrations/0004_auto_20181113_2334.py | giannisdaras/tedxntua2019 | 0f9ebdb2946cc8da8c44562313be740db8a394ea | [
"MIT"
] | null | null | null | # Generated by Django 2.1.2 on 2018-11-13 23:34
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('program', '0003_auto_20181113_2330'),
]
operations = [
migrations.AlterModelManagers(
name='presenter',
managers=[
],
),
]
| 18.052632 | 47 | 0.571429 |
acec39a54b200539fe686b9b8b192b4ab76a4033 | 2,350 | py | Python | alerta/webhooks/stackdriver.py | asherf/alerta | 9b00e02b764cc3875a0350c0a20509aeff3992db | [
"Apache-2.0"
] | null | null | null | alerta/webhooks/stackdriver.py | asherf/alerta | 9b00e02b764cc3875a0350c0a20509aeff3992db | [
"Apache-2.0"
] | null | null | null | alerta/webhooks/stackdriver.py | asherf/alerta | 9b00e02b764cc3875a0350c0a20509aeff3992db | [
"Apache-2.0"
] | 1 | 2021-03-11T18:19:22.000Z | 2021-03-11T18:19:22.000Z | import json
from datetime import datetime
from typing import Any, Dict
from flask import current_app
from alerta.models.alert import Alert
from . import WebhookBase
JSON = Dict[str, Any]
class StackDriverWebhook(WebhookBase):
"""
StackDriver Notification webhook
See https://cloud.google.com/monitoring/support/notification-options#webhooks
"""
def incoming(self, query_string, payload):
incident = payload['incident']
state = incident['state']
# 'documentation' is an optional field that you can use to customize
# your alert sending a json
if 'documentation' in incident:
try:
content = json.loads(incident['documentation']['content'])
incident.update(content)
except Exception as e:
current_app.logger.warning("Invalid documentation content: '{}'".format(incident['documentation']))
service = []
status = None
create_time = None # type: ignore
severity = incident.get('severity', 'critical')
if incident['policy_name']:
service.append(incident['policy_name'])
if state == 'open':
create_time = datetime.utcfromtimestamp(incident['started_at'])
elif state == 'acknowledged':
status = 'ack'
elif state == 'closed':
severity = 'ok'
create_time = datetime.utcfromtimestamp(incident['ended_at'])
else:
severity = 'indeterminate'
return Alert(
resource=incident['resource_name'],
event=incident['condition_name'],
environment=incident.get('environment', 'Production'),
severity=severity,
status=status,
service=service,
group=incident.get('group', 'Cloud'),
text=incident['summary'],
attributes={
'incidentId': incident['incident_id'],
'resourceId': incident['resource_id'],
'moreInfo': '<a href="%s" target="_blank">Stackdriver Console</a>' % incident['url']
},
customer=incident.get('customer'),
origin=incident.get('origin', 'Stackdriver'),
event_type='stackdriverAlert',
create_time=create_time,
raw_data=payload
)
| 32.638889 | 115 | 0.591915 |
acec39fd5ccee3ba25c4a3f7b87c7f4c4b2e13df | 186 | py | Python | containers/mobilenet/training/scripts/labels.py | GabrielDeml/Axon | dfaa702e1cd2a7ade4b9aa19f8378deeeaf32b89 | [
"BSD-3-Clause"
] | 18 | 2019-11-18T03:16:46.000Z | 2022-03-12T19:33:35.000Z | containers/mobilenet/training/scripts/labels.py | GabrielDeml/Axon | dfaa702e1cd2a7ade4b9aa19f8378deeeaf32b89 | [
"BSD-3-Clause"
] | 250 | 2019-08-29T04:27:11.000Z | 2022-02-27T07:46:57.000Z | containers/mobilenet/training/scripts/labels.py | GabrielDeml/Axon | dfaa702e1cd2a7ade4b9aa19f8378deeeaf32b89 | [
"BSD-3-Clause"
] | 14 | 2020-01-02T20:36:03.000Z | 2021-02-17T00:47:36.000Z | import os
def get_total(directory):
a = ""
with open(os.path.join(directory, "map.pbtxt"), 'r') as f:
for i in f.readlines():
a += i
return a.count("id")
| 23.25 | 62 | 0.543011 |
acec3a313921b9c6f1b291ff8be8cfc56d71ee71 | 6,030 | py | Python | src/ros_detection_legs/deep_learning/libpreprocessing/segmentation.py | PouceHeure/detection_legs | 54538132bc5be58e6429fc9e300205d4fe957e6c | [
"Apache-2.0"
] | 7 | 2021-01-10T11:39:42.000Z | 2022-02-22T01:58:53.000Z | src/ros_detection_legs/deep_learning/libpreprocessing/segmentation.py | PouceHeure/detection_legs | 54538132bc5be58e6429fc9e300205d4fe957e6c | [
"Apache-2.0"
] | 2 | 2021-01-20T01:21:41.000Z | 2021-09-04T22:08:51.000Z | src/ros_detection_legs/deep_learning/libpreprocessing/segmentation.py | PouceHeure/detection_legs | 54538132bc5be58e6429fc9e300205d4fe957e6c | [
"Apache-2.0"
] | 2 | 2021-01-10T11:43:36.000Z | 2021-09-04T22:00:32.000Z | import os
import math
import csv
import matplotlib.pyplot as plt
PATH_FILE_CURRENT = os.path.dirname(os.path.realpath(__file__))
PATH_FOLDER_DATA = os.path.join(PATH_FILE_CURRENT,"../../../../data/")
PATH_FOLDER_DATA_PROCESSED = os.path.join(PATH_FOLDER_DATA,"processed/")
PATH_FOLDER_DATASET_LIDAR = os.path.join(PATH_FOLDER_DATA,"dataset_lidar2D_legs/")
PATH_FOLDER_DATASET_LIDAR_50cm = os.path.join(PATH_FOLDER_DATASET_LIDAR,"50cm/")
class PointPolar:
COUNTER_ID = 0
def __init__(self,theta,r,selected=None):
self.theta = theta
self.r = r
self.selected = selected
self.id = PointPolar.COUNTER_ID
PointPolar.COUNTER_ID += 1
@staticmethod
def COMPUTE_DISTANCE(p1,p2):
return math.sqrt(p1.r**2+p2.r**2 - 2*p1.r*p2.r*math.cos(p1.theta-p2.theta))
@staticmethod
def COMPUTE_RADIUS(p1,p2):
return abs(p1.r - p2.r)
def __repr__(self):
return f"[id: {self.id}] (theta: {self.theta} r: {self.r})"# - selected: {self.selected}"
class Cluster:
COUNTER_ID = 0
def __init__(self,label=None):
self._points = []
self._label = label
self.id = Cluster.COUNTER_ID
Cluster.COUNTER_ID += 1
def get_points(self):
return self._points
def get_label(self):
return self._label
def add(self,point):
# keep only point with r > 0
if(point.r != 0):
self._points.append(point)
def compute_center(self):
theta_sum = 0
r_sum = 0
n = len(self._points)*1.
for p in self._points:
theta_sum += p.theta
r_sum += p.r
return PointPolar(theta_sum/n,r_sum/n)
def _define_type(self,gamma):
n = len(self._points)*1.
n_selected = 0
for p in self._points:
n_selected += p.selected
if(n_selected/n >= gamma):
return 1
return 0
def update_information(self,gamma,type="train"):
if(type == "train"):
self._label = self._define_type(gamma)
def to_array(self):
array = []
for p in self._points:
array.append([p.theta,p.r])
return array
class LidarData:
def __init__(self,type="train"):
self._type = type
self._points = []
self._clusters = []
def get_points(self):
return self._points
def get_clusters(self):
return self._clusters
def load_data_from_csv(self,file_path):
points = []
with open(file_path, newline='') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='|')
for row in spamreader:
theta = float(row[0])
r = float(row[1])
selected = None
if(self._type == "train"):
selected = int(row[2])
points.append(PointPolar(theta,r,selected))
self._points = points
def load_data_from_array(self,array):
points = []
for row in array:
theta = float(row[0])
r = float(row[1])
selected = None
if(self._type == "train"):
selected = int(row[2])
points.append(PointPolar(theta,r,selected))
self._points = points
def processing(self,limit_distance=1,limit_cluster_valid=0.8,limit_jump=5,limit_radius=0.5):
self._clusters = []
points = self._points
while(len(points) != 0):
p = points[0]
points.remove(p)
if(p.r != 0):
current_cluster = Cluster()
self._clusters.append(current_cluster)
current_cluster.add(p)
points_jump = 0
i = 0
while(i < len(points) and points_jump < limit_jump):
p_compare = points[i]
distance = PointPolar.COMPUTE_DISTANCE(p,p_compare)
radius_delta = PointPolar.COMPUTE_RADIUS(p,p_compare)
if(distance < limit_distance and radius_delta < limit_radius):
points_jump = 0
p = p_compare
current_cluster.add(p_compare)
points.remove(p_compare)
else:
i += 1
points_jump += 1
current_cluster.update_information(limit_cluster_valid,self._type)
def generate_dataset(self,type="train"):
X = []
y = []
for cluster in self._clusters:
points = []
for p in cluster._points:
points.append("%".join(list(map(str,[p.theta,p.r]))))
X.append(points)
if(self._type == "train"):
y.append(cluster.get_label())
return X,y
def plot_clusters(self):
fig = plt.figure()
ax = fig.add_subplot(111, projection='polar')
for cluster in self._clusters:
thetas = []
rs = []
for p in cluster.get_points():
thetas.append(p.theta)
rs.append(p.r)
alpha=0.1
if(cluster.get_label() == 1):
alpha = 1
ax.scatter(thetas,rs,alpha=alpha)
plt.show()
def incrase_positive_data(self, angles):
import copy
current_cluster = copy.copy(self._clusters)
for cluster in current_cluster:
if(cluster.get_label() == 1):
for angle in angles:
new_cluster = Cluster(label=1)
for point in cluster.get_points():
theta = math.copysign(1, point.theta) * ((abs(point.theta) + angle) % math.pi)
r = point.r
new_cluster.add(PointPolar(theta,r))
self._clusters.append(new_cluster)
def __repr__(self):
return str(self._points)
| 30.765306 | 102 | 0.536816 |
acec3d3f3e95e5c3be62fd263568861854f0afd9 | 1,305 | py | Python | multistack/api/v1.py | siel-iiith/MultiStack | c2558cd6d121172d24b18ddcbb31c3269b63493e | [
"Apache-2.0"
] | 2 | 2015-08-01T16:08:05.000Z | 2016-12-23T23:05:50.000Z | multistack/api/v1.py | siel-iiith/MultiStack | c2558cd6d121172d24b18ddcbb31c3269b63493e | [
"Apache-2.0"
] | null | null | null | multistack/api/v1.py | siel-iiith/MultiStack | c2558cd6d121172d24b18ddcbb31c3269b63493e | [
"Apache-2.0"
] | null | null | null | from flask import Blueprint, Flask, request, session, url_for, redirect, jsonify
from multistack.services import job
from multistack.services import cluster
import simplejson
import json
import multistack.main
import requests
import os
from bson import objectid
app_v1 = Blueprint('v1', __name__, url_prefix='/v1')
@app_v1.route('/')
def version():
'''
GET request of the cluster API
'''
@app_v1.route('/jobs', methods=['GET', 'POST'])
def jobs_api():
'''
Jobs API
'''
if request.method == 'GET':
return jsonify(**job.job_list())
elif request.method == 'POST':
data = request.json
return job.create(data)
@app_v1.route('/jobs/<job_id>', methods = ['GET','DELETE'])
def job_api(job_id):
if request.method == "GET":
if job.info(job_id)[0]:
return jsonify(job.info(job_id)[1])
else:
return job.info(job_id)[1]
elif request.method == "DELETE":
return job.delete(job_id)
@app_v1.route('/jobs/<job_id>/add', methods = ['POST'])
def add(job_id):
if request.method == "POST":
return job.add(request.json, job_id)
@app_v1.route('/jobs/<job_id>/rm', methods = ['POST'])
def remove(job_id):
if request.method == "POST":
return job.remove(request.json, job_id)
| 23.727273 | 80 | 0.632184 |
acec3d4bfedf70c9f8b1002a8b7c27e7514e9a0d | 4,225 | py | Python | preprocess/process_conll2012.py | ohmygod481999/deep_srl_pytorch | 97909bf1cdcae9b7a3089ce3d0c33105f038bca0 | [
"Apache-2.0"
] | null | null | null | preprocess/process_conll2012.py | ohmygod481999/deep_srl_pytorch | 97909bf1cdcae9b7a3089ce3d0c33105f038bca0 | [
"Apache-2.0"
] | null | null | null | preprocess/process_conll2012.py | ohmygod481999/deep_srl_pytorch | 97909bf1cdcae9b7a3089ce3d0c33105f038bca0 | [
"Apache-2.0"
] | null | null | null | import codecs
import os
import sys
input_data_path=sys.argv[1]
output_file_path=sys.argv[2]
output_props_file=sys.argv[3]
output_propid_file=sys.argv[4]
output_domains_file=sys.argv[5]
tag_dict={}
fout = codecs.open(output_file_path, 'w', 'ascii')
fout_props = codecs.open(output_props_file, 'w', 'ascii')
fout_propid = codecs.open(output_propid_file, 'w', 'ascii')
fd_out = open(output_domains_file, 'w')
#flist_out = open('filelist.out', 'w')
total_props = 0
total_props2 = 0
total_sents = 0
total_sents2 = 0
prev_words = ''
domain = ''
dpath = []
doc_counts = 0
v_counts = 0
ner_counts = 0
words = []
props = []
tags = []
spans = []
all_props = []
label_dict = {}
def print_new_sentence():
global total_props
global total_props2
global total_sents
global words
global props
global tags
global span
global all_props
global fout
global fout_props
global fout_propid
global fd_out
global domain
''' ALso output sentences without any predicates '''
#if len(props) > 0:
total_props += len(props)
total_sents += 1
assert len(props) == len(tags)
propid_labels = ['O' for _ in words]
for t in range(len(props)):
assert len(tags[t]) == len(words)
assert tags[t][props[t]] in {"B-V", "B-I"}
fout.write(str(props[t]) + " " + " ".join(words).encode('ascii') + " ||| " + " ".join(tags[t]) + "\n")
propid_labels[props[t]] = 'V'
fd_out.write(domain + '\n')
fout_propid.write(" ".join(words).encode('ascii') + " ||| " + " ".join(propid_labels) + "\n")
total_props2 += len(all_props)
words = []
props = []
tags = []
spans = []
all_props = []
for root, dirs, files in os.walk(input_data_path):
for f in files:
print(f)
if not 'gold_conll' in f:
continue
#print root, dirs, f
dpath = root.split('/')
domain = '_'.join(dpath[dpath.index('annotations')+1:-1])
fin = codecs.open(root + "/" + f, mode='r', encoding='utf8')
#flist_out.write(f + '\n')
doc_counts += 1
for line in fin:
line = line.strip()
if line == '':
joined_words = " ".join(words)
#if joined_words == prev_words:
# print "Skipping dup sentence in: ", root, f
#else:
prev_words = joined_words
print_new_sentence()
fout_props.write('\n')
total_sents2 += 1
words = []
props = []
tags = []
spans = []
all_props = []
continue
if line[0] == "#":
prev_words = ""
if len(words) > 0:
print_new_sentence()
fout_props.write('\n')
total_sents2 += 1
continue
info = line.split()
try:
word = info[3].encode('ascii')
except UnicodeEncodeError:
print root, dirs, f
print info[3]
word = "*UNKNOWN*";
words.append(word)
idx = len(words) - 1
if idx == 0:
tags = [[] for _ in info[11:-1]]
spans = ["" for _ in info[11:-1]]
is_predicate = (info[7] != '-')
is_verbal_predicate = False
lemma = info[6] if info[7] != '-' else '-'
fout_props.write(lemma + '\t' + '\t'.join(info[11:-1]) + '\n')
for t in range(len(tags)):
arg = info[11 + t]
label = arg.strip("()*")
label_dict[arg] = 1
if "(" in arg:
tags[t].append("B-" + label)
spans[t] = label
elif spans[t] != "":
tags[t].append("I-" + spans[t])
else:
tags[t].append("O")
if ")" in arg:
spans[t] = ""
if "(V" in arg:
is_verbal_predicate = True
v_counts += 1
if '(' in info[10]:
ner_counts += 1
if is_verbal_predicate:
props.append(idx)
if is_predicate:
all_props.append(idx)
fin.close()
''' Output last sentence.'''
if len(words) > 0:
print_new_sentence()
fout_props.write('\n')
total_sents2 += 1
fout.close()
fout_props.close()
fout_propid.close()
fd_out.close()
#flist_out.close()
print 'documents', doc_counts
print 'all sentences', total_sents, total_sents2
print 'props', total_props
print 'verbal props:', v_counts
print 'ner counts:', ner_counts
print 'sentences', total_sents
| 23.214286 | 106 | 0.570888 |
acec3d9e4ed8985af594b67f120cfe93c11aa8ba | 1,070 | py | Python | setup.py | cgpipline/strack-python-api | c2c26a491fbb73fe55b5774f0065b8f3dfbf742c | [
"Apache-2.0"
] | null | null | null | setup.py | cgpipline/strack-python-api | c2c26a491fbb73fe55b5774f0065b8f3dfbf742c | [
"Apache-2.0"
] | 1 | 2022-01-07T14:41:05.000Z | 2022-01-07T14:41:05.000Z | setup.py | cgpipline/strack-python-api | c2c26a491fbb73fe55b5774f0065b8f3dfbf742c | [
"Apache-2.0"
] | 2 | 2022-03-20T19:58:33.000Z | 2022-03-20T19:58:49.000Z | # coding=utf8
import os
import re
from setuptools import setup, find_packages
ROOT_PATH = os.path.dirname(os.path.realpath(__file__))
SOURCE_PATH = os.path.join(ROOT_PATH, 'src')
README_PATH = os.path.join(ROOT_PATH, 'README.md')
requires = [
'chardet >= 4.0',
'requests >= 2, <3',
'six >=1.16,<2'
]
# Read version from src.
with open(
os.path.join(SOURCE_PATH, 'strack_api', '_version.py'), 'r', encoding='UTF-8'
) as _version_file:
VERSION = re.match(
r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL
).group(1)
# Call main setup.
setup(
name='strack-api',
version=VERSION,
description='Python API for Strack.',
long_description=open(README_PATH, 'r', encoding='UTF-8').read(),
keywords='strack, python, api',
url='https://github.com/cgpipline/strack',
author='strack',
author_email='weiwei163@foxmail.com',
license='Apache License (2.0)',
packages=find_packages(SOURCE_PATH),
package_dir={
'': 'src'
},
install_requires=requires,
zip_safe=False
)
| 23.777778 | 85 | 0.64486 |
acec3e104bbc9da961fee9ba520628d561672645 | 4,221 | py | Python | keras_frcnn/RoiPoolingConv.py | ravali27/Keras-FasterRCNN | e0847e18a85c988ae8baaba50ea40a2a7c05c855 | [
"MIT"
] | 363 | 2018-09-30T23:44:08.000Z | 2022-03-24T13:23:39.000Z | keras_frcnn/RoiPoolingConv.py | joseildofilho/Keras-FasterRCNN | f10a5b42a62afcc264e3e457ede42ab8a6e6d5a1 | [
"MIT"
] | 77 | 2018-10-01T21:46:21.000Z | 2022-02-10T01:23:06.000Z | keras_frcnn/RoiPoolingConv.py | joseildofilho/Keras-FasterRCNN | f10a5b42a62afcc264e3e457ede42ab8a6e6d5a1 | [
"MIT"
] | 238 | 2018-10-27T15:03:22.000Z | 2022-03-17T18:53:50.000Z | from keras.engine.topology import Layer
import keras.backend as K
if K.backend() == 'tensorflow':
import tensorflow as tf
class RoiPoolingConv(Layer):
'''
ROI pooling layer for 2D inputs.
See Spatial Pyramid Pooling in Deep Convolutional Networks for Visual Recognition,
K. He, X. Zhang, S. Ren, J. Sun
# Arguments
pool_size: int
Size of pooling region to use. pool_size = 7 will result in a 7x7 region.
num_rois: number of regions of interest to be used
# Input shape
list of two 4D tensors [X_img,X_roi] with shape:
X_img:
`(1, channels, rows, cols)` if dim_ordering='th'
or 4D tensor with shape:
`(1, rows, cols, channels)` if dim_ordering='tf'.
X_roi:
`(1,num_rois,4)` list of rois, with ordering (x,y,w,h)
# Output shape
3D tensor with shape:
`(1, num_rois, channels, pool_size, pool_size)`
'''
def __init__(self, pool_size, num_rois, **kwargs):
self.dim_ordering = K.image_dim_ordering()
assert self.dim_ordering in {'tf', 'th'}, 'dim_ordering must be in {tf, th}'
self.pool_size = pool_size
self.num_rois = num_rois
super(RoiPoolingConv, self).__init__(**kwargs)
def build(self, input_shape):
if self.dim_ordering == 'th':
self.nb_channels = input_shape[0][1]
elif self.dim_ordering == 'tf':
self.nb_channels = input_shape[0][3]
def compute_output_shape(self, input_shape):
if self.dim_ordering == 'th':
return None, self.num_rois, self.nb_channels, self.pool_size, self.pool_size
else:
return None, self.num_rois, self.pool_size, self.pool_size, self.nb_channels
def call(self, x, mask=None):
assert(len(x) == 2)
img = x[0]
rois = x[1]
input_shape = K.shape(img)
outputs = []
for roi_idx in range(self.num_rois):
x = rois[0, roi_idx, 0]
y = rois[0, roi_idx, 1]
w = rois[0, roi_idx, 2]
h = rois[0, roi_idx, 3]
row_length = w / float(self.pool_size)
col_length = h / float(self.pool_size)
num_pool_regions = self.pool_size
#NOTE: the RoiPooling implementation differs between theano and tensorflow due to the lack of a resize op
# in theano. The theano implementation is much less efficient and leads to long compile times
if self.dim_ordering == 'th':
for jy in range(num_pool_regions):
for ix in range(num_pool_regions):
x1 = x + ix * row_length
x2 = x1 + row_length
y1 = y + jy * col_length
y2 = y1 + col_length
x1 = K.cast(x1, 'int32')
x2 = K.cast(x2, 'int32')
y1 = K.cast(y1, 'int32')
y2 = K.cast(y2, 'int32')
x2 = x1 + K.maximum(1,x2-x1)
y2 = y1 + K.maximum(1,y2-y1)
new_shape = [input_shape[0], input_shape[1],
y2 - y1, x2 - x1]
x_crop = img[:, :, y1:y2, x1:x2]
xm = K.reshape(x_crop, new_shape)
pooled_val = K.max(xm, axis=(2, 3))
outputs.append(pooled_val)
elif self.dim_ordering == 'tf':
x = K.cast(x, 'int32')
y = K.cast(y, 'int32')
w = K.cast(w, 'int32')
h = K.cast(h, 'int32')
rs = tf.image.resize_images(img[:, y:y+h, x:x+w, :], (self.pool_size, self.pool_size))
outputs.append(rs)
final_output = K.concatenate(outputs, axis=0)
final_output = K.reshape(final_output, (1, self.num_rois, self.pool_size, self.pool_size, self.nb_channels))
if self.dim_ordering == 'th':
final_output = K.permute_dimensions(final_output, (0, 1, 4, 2, 3))
else:
final_output = K.permute_dimensions(final_output, (0, 1, 2, 3, 4))
return final_output
| 35.175 | 117 | 0.536129 |
acec3f8f17219d586b429002f4b7380576a511f4 | 357 | py | Python | python/figurl/core/Sync.py | magland/figurl | f254fb72cc23d95c5d53f8829d70e4621162c7f7 | [
"Apache-2.0"
] | 2 | 2021-12-03T14:29:01.000Z | 2022-01-28T16:07:49.000Z | python/figurl/core/Sync.py | magland/figurl | f254fb72cc23d95c5d53f8829d70e4621162c7f7 | [
"Apache-2.0"
] | 21 | 2021-08-31T19:56:34.000Z | 2021-12-08T17:14:17.000Z | python/figurl/core/Sync.py | magland/figurl | f254fb72cc23d95c5d53f8829d70e4621162c7f7 | [
"Apache-2.0"
] | null | null | null | import random
class Sync:
def __init__(self) -> None:
self._id = _random_string(10)
@property
def object(self):
return {'_syncId': self._id}
def _random_string(num_chars: int) -> str:
chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
return ''.join(random.choice(chars) for _ in range(num_chars)) | 29.75 | 76 | 0.70028 |
acec411a2e5469fcc3512fd4dc9f9276625b75a9 | 46,279 | py | Python | pandas/io/excel.py | danielballan/pandas | 576818f169c0d494e74f787f7486d090e5e6662f | [
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | pandas/io/excel.py | danielballan/pandas | 576818f169c0d494e74f787f7486d090e5e6662f | [
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | pandas/io/excel.py | danielballan/pandas | 576818f169c0d494e74f787f7486d090e5e6662f | [
"PSF-2.0",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | """
Module parse to/from Excel
"""
#----------------------------------------------------------------------
# ExcelFile class
import os
import datetime
import abc
import numpy as np
from pandas.io.parsers import TextParser
from pandas.io.common import _is_url, _urlopen
from pandas.tseries.period import Period
from pandas import json
from pandas.compat import map, zip, reduce, range, lrange, u, add_metaclass
from pandas.core import config
from pandas.core.common import pprint_thing
import pandas.compat as compat
import pandas.compat.openpyxl_compat as openpyxl_compat
import pandas.core.common as com
from warnings import warn
from distutils.version import LooseVersion
__all__ = ["read_excel", "ExcelWriter", "ExcelFile"]
_writer_extensions = ["xlsx", "xls", "xlsm"]
_writers = {}
def register_writer(klass):
"""Adds engine to the excel writer registry. You must use this method to
integrate with ``to_excel``. Also adds config options for any new
``supported_extensions`` defined on the writer."""
if not compat.callable(klass):
raise ValueError("Can only register callables as engines")
engine_name = klass.engine
_writers[engine_name] = klass
for ext in klass.supported_extensions:
if ext.startswith('.'):
ext = ext[1:]
if ext not in _writer_extensions:
config.register_option("io.excel.%s.writer" % ext,
engine_name, validator=str)
_writer_extensions.append(ext)
def get_writer(engine_name):
if engine_name == 'openpyxl':
try:
import openpyxl
# with version-less openpyxl engine
# make sure we make the intelligent choice for the user
if LooseVersion(openpyxl.__version__) < '2.0.0':
return _writers['openpyxl1']
else:
return _writers['openpyxl2']
except ImportError:
# fall through to normal exception handling below
pass
try:
return _writers[engine_name]
except KeyError:
raise ValueError("No Excel writer '%s'" % engine_name)
def read_excel(io, sheetname=0, **kwds):
"""Read an Excel table into a pandas DataFrame
Parameters
----------
io : string, file-like object, or xlrd workbook.
The string could be a URL. Valid URL schemes include http, ftp, s3,
and file. For file URLs, a host is expected. For instance, a local
file could be file://localhost/path/to/workbook.xlsx
sheetname : string or int, default 0
Name of Excel sheet or the page number of the sheet
header : int, default 0
Row to use for the column labels of the parsed DataFrame
skiprows : list-like
Rows to skip at the beginning (0-indexed)
skip_footer : int, default 0
Rows at the end to skip (0-indexed)
converters : dict, default None
Dict of functions for converting values in certain columns. Keys can
either be integers or column labels, values are functions that take one
input argument, the Excel cell content, and return the transformed
content.
index_col : int, default None
Column to use as the row labels of the DataFrame. Pass None if
there is no such column
parse_cols : int or list, default None
* If None then parse all columns,
* If int then indicates last column to be parsed
* If list of ints then indicates list of column numbers to be parsed
* If string then indicates comma separated list of column names and
column ranges (e.g. "A:E" or "A,C,E:F")
na_values : list-like, default None
List of additional strings to recognize as NA/NaN
keep_default_na : bool, default True
If na_values are specified and keep_default_na is False the default NaN
values are overridden, otherwise they're appended to
verbose : boolean, default False
Indicate number of NA values placed in non-numeric columns
engine: string, default None
If io is not a buffer or path, this must be set to identify io.
Acceptable values are None or xlrd
convert_float : boolean, default True
convert integral floats to int (i.e., 1.0 --> 1). If False, all numeric
data will be read in as floats: Excel stores all numbers as floats
internally
has_index_names : boolean, default False
True if the cols defined in index_col have an index name and are
not in the header. Index name will be placed on a separate line below
the header.
Returns
-------
parsed : DataFrame
DataFrame from the passed in Excel file
"""
if 'kind' in kwds:
kwds.pop('kind')
warn("kind keyword is no longer supported in read_excel and may be "
"removed in a future version", FutureWarning)
engine = kwds.pop('engine', None)
return ExcelFile(io, engine=engine).parse(sheetname=sheetname, **kwds)
class ExcelFile(object):
"""
Class for parsing tabular excel sheets into DataFrame objects.
Uses xlrd. See ExcelFile.parse for more documentation
Parameters
----------
io : string, file-like object or xlrd workbook
If a string, expected to be a path to xls or xlsx file
engine: string, default None
If io is not a buffer or path, this must be set to identify io.
Acceptable values are None or xlrd
"""
def __init__(self, io, **kwds):
import xlrd # throw an ImportError if we need to
ver = tuple(map(int, xlrd.__VERSION__.split(".")[:2]))
if ver < (0, 9): # pragma: no cover
raise ImportError("pandas requires xlrd >= 0.9.0 for excel "
"support, current version " + xlrd.__VERSION__)
self.io = io
engine = kwds.pop('engine', None)
if engine is not None and engine != 'xlrd':
raise ValueError("Unknown engine: %s" % engine)
if isinstance(io, compat.string_types):
if _is_url(io):
data = _urlopen(io).read()
self.book = xlrd.open_workbook(file_contents=data)
else:
self.book = xlrd.open_workbook(io)
elif engine == 'xlrd' and isinstance(io, xlrd.Book):
self.book = io
elif not isinstance(io, xlrd.Book) and hasattr(io, "read"):
# N.B. xlrd.Book has a read attribute too
data = io.read()
self.book = xlrd.open_workbook(file_contents=data)
else:
raise ValueError('Must explicitly set engine if not passing in'
' buffer or path for io.')
def parse(self, sheetname=0, header=0, skiprows=None, skip_footer=0,
index_col=None, parse_cols=None, parse_dates=False,
date_parser=None, na_values=None, thousands=None, chunksize=None,
convert_float=True, has_index_names=False, converters=None, **kwds):
"""Read an Excel table into DataFrame
Parameters
----------
sheetname : string or integer
Name of Excel sheet or the page number of the sheet
header : int, default 0
Row to use for the column labels of the parsed DataFrame
skiprows : list-like
Rows to skip at the beginning (0-indexed)
skip_footer : int, default 0
Rows at the end to skip (0-indexed)
converters : dict, default None
Dict of functions for converting values in certain columns. Keys can
either be integers or column labels
index_col : int, default None
Column to use as the row labels of the DataFrame. Pass None if
there is no such column
parse_cols : int or list, default None
* If None then parse all columns
* If int then indicates last column to be parsed
* If list of ints then indicates list of column numbers to be
parsed
* If string then indicates comma separated list of column names and
column ranges (e.g. "A:E" or "A,C,E:F")
parse_dates : boolean, default False
Parse date Excel values,
date_parser : function default None
Date parsing function
na_values : list-like, default None
List of additional strings to recognize as NA/NaN
thousands : str, default None
Thousands separator
chunksize : int, default None
Size of file chunk to read for lazy evaluation.
convert_float : boolean, default True
convert integral floats to int (i.e., 1.0 --> 1). If False, all
numeric data will be read in as floats: Excel stores all numbers as
floats internally.
has_index_names : boolean, default False
True if the cols defined in index_col have an index name and are
not in the header
Returns
-------
parsed : DataFrame
DataFrame parsed from the Excel file
"""
skipfooter = kwds.pop('skipfooter', None)
if skipfooter is not None:
skip_footer = skipfooter
return self._parse_excel(sheetname=sheetname, header=header,
skiprows=skiprows,
index_col=index_col,
has_index_names=has_index_names,
parse_cols=parse_cols,
parse_dates=parse_dates,
date_parser=date_parser, na_values=na_values,
thousands=thousands, chunksize=chunksize,
skip_footer=skip_footer,
convert_float=convert_float,
converters=converters,
**kwds)
def _should_parse(self, i, parse_cols):
def _range2cols(areas):
"""
Convert comma separated list of column names and column ranges to a
list of 0-based column indexes.
>>> _range2cols('A:E')
[0, 1, 2, 3, 4]
>>> _range2cols('A,C,Z:AB')
[0, 2, 25, 26, 27]
"""
def _excel2num(x):
"Convert Excel column name like 'AB' to 0-based column index"
return reduce(lambda s, a: s * 26 + ord(a) - ord('A') + 1,
x.upper().strip(), 0) - 1
cols = []
for rng in areas.split(','):
if ':' in rng:
rng = rng.split(':')
cols += lrange(_excel2num(rng[0]), _excel2num(rng[1]) + 1)
else:
cols.append(_excel2num(rng))
return cols
if isinstance(parse_cols, int):
return i <= parse_cols
elif isinstance(parse_cols, compat.string_types):
return i in _range2cols(parse_cols)
else:
return i in parse_cols
def _parse_excel(self, sheetname=0, header=0, skiprows=None, skip_footer=0,
index_col=None, has_index_names=None, parse_cols=None,
parse_dates=False, date_parser=None, na_values=None,
thousands=None, chunksize=None, convert_float=True,
**kwds):
import xlrd
from xlrd import (xldate, XL_CELL_DATE,
XL_CELL_ERROR, XL_CELL_BOOLEAN,
XL_CELL_NUMBER)
epoch1904 = self.book.datemode
# xlrd >= 0.9.3 can return datetime objects directly.
if LooseVersion(xlrd.__VERSION__) >= LooseVersion("0.9.3"):
xlrd_0_9_3 = True
else:
xlrd_0_9_3 = False
if isinstance(sheetname, compat.string_types):
sheet = self.book.sheet_by_name(sheetname)
else: # assume an integer if not a string
sheet = self.book.sheet_by_index(sheetname)
data = []
should_parse = {}
for i in range(sheet.nrows):
row = []
for j, (value, typ) in enumerate(zip(sheet.row_values(i),
sheet.row_types(i))):
if parse_cols is not None and j not in should_parse:
should_parse[j] = self._should_parse(j, parse_cols)
if parse_cols is None or should_parse[j]:
if typ == XL_CELL_DATE:
if xlrd_0_9_3:
# Use the newer xlrd datetime handling.
value = xldate.xldate_as_datetime(value, epoch1904)
# Excel doesn't distinguish between dates and time,
# so we treat dates on the epoch as times only.
# Also, Excel supports 1900 and 1904 epochs.
year = (value.timetuple())[0:3]
if ((not epoch1904 and year == (1899, 12, 31))
or (epoch1904 and year == (1904, 1, 1))):
value = datetime.time(value.hour,
value.minute,
value.second,
value.microsecond)
else:
# Use the xlrd <= 0.9.2 date handling.
dt = xldate.xldate_as_tuple(value, epoch1904)
if dt[0] < datetime.MINYEAR:
value = datetime.time(*dt[3:])
else:
value = datetime.datetime(*dt)
elif typ == XL_CELL_ERROR:
value = np.nan
elif typ == XL_CELL_BOOLEAN:
value = bool(value)
elif convert_float and typ == XL_CELL_NUMBER:
# GH5394 - Excel 'numbers' are always floats
# it's a minimal perf hit and less suprising
val = int(value)
if val == value:
value = val
row.append(value)
data.append(row)
if header is not None:
data[header] = _trim_excel_header(data[header])
parser = TextParser(data, header=header, index_col=index_col,
has_index_names=has_index_names,
na_values=na_values,
thousands=thousands,
parse_dates=parse_dates,
date_parser=date_parser,
skiprows=skiprows,
skip_footer=skip_footer,
chunksize=chunksize,
**kwds)
return parser.read()
@property
def sheet_names(self):
return self.book.sheet_names()
def close(self):
"""close io if necessary"""
if hasattr(self.io, 'close'):
self.io.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def _trim_excel_header(row):
# trim header row so auto-index inference works
# xlrd uses '' , openpyxl None
while len(row) > 0 and (row[0] == '' or row[0] is None):
row = row[1:]
return row
def _conv_value(val):
# Convert numpy types to Python types for the Excel writers.
if com.is_integer(val):
val = int(val)
elif com.is_float(val):
val = float(val)
elif com.is_bool(val):
val = bool(val)
elif isinstance(val, Period):
val = "%s" % val
return val
@add_metaclass(abc.ABCMeta)
class ExcelWriter(object):
"""
Class for writing DataFrame objects into excel sheets, default is to use
xlwt for xls, openpyxl for xlsx. See DataFrame.to_excel for typical usage.
Parameters
----------
path : string
Path to xls or xlsx file.
engine : string (optional)
Engine to use for writing. If None, defaults to
``io.excel.<extension>.writer``. NOTE: can only be passed as a keyword
argument.
date_format : string, default None
Format string for dates written into Excel files (e.g. 'YYYY-MM-DD')
datetime_format : string, default None
Format string for datetime objects written into Excel files
(e.g. 'YYYY-MM-DD HH:MM:SS')
"""
# Defining an ExcelWriter implementation (see abstract methods for more...)
# - Mandatory
# - ``write_cells(self, cells, sheet_name=None, startrow=0, startcol=0)``
# --> called to write additional DataFrames to disk
# - ``supported_extensions`` (tuple of supported extensions), used to
# check that engine supports the given extension.
# - ``engine`` - string that gives the engine name. Necessary to
# instantiate class directly and bypass ``ExcelWriterMeta`` engine
# lookup.
# - ``save(self)`` --> called to save file to disk
# - Mostly mandatory (i.e. should at least exist)
# - book, cur_sheet, path
# - Optional:
# - ``__init__(self, path, engine=None, **kwargs)`` --> always called
# with path as first argument.
# You also need to register the class with ``register_writer()``.
# Technically, ExcelWriter implementations don't need to subclass
# ExcelWriter.
def __new__(cls, path, engine=None, **kwargs):
# only switch class if generic(ExcelWriter)
if cls == ExcelWriter:
if engine is None:
ext = os.path.splitext(path)[-1][1:]
try:
engine = config.get_option('io.excel.%s.writer' % ext)
except KeyError:
error = ValueError("No engine for filetype: '%s'" % ext)
raise error
cls = get_writer(engine)
return object.__new__(cls)
# declare external properties you can count on
book = None
curr_sheet = None
path = None
@abc.abstractproperty
def supported_extensions(self):
"extensions that writer engine supports"
pass
@abc.abstractproperty
def engine(self):
"name of engine"
pass
@abc.abstractmethod
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
"""
Write given formated cells into Excel an excel sheet
Parameters
----------
cells : generator
cell of formated data to save to Excel sheet
sheet_name : string, default None
Name of Excel sheet, if None, then use self.cur_sheet
startrow: upper left cell row to dump data frame
startcol: upper left cell column to dump data frame
"""
pass
@abc.abstractmethod
def save(self):
"""
Save workbook to disk.
"""
pass
def __init__(self, path, engine=None,
date_format=None, datetime_format=None, **engine_kwargs):
# validate that this engine can handle the extension
ext = os.path.splitext(path)[-1]
self.check_extension(ext)
self.path = path
self.sheets = {}
self.cur_sheet = None
if date_format is None:
self.date_format = 'YYYY-MM-DD'
else:
self.date_format = date_format
if datetime_format is None:
self.datetime_format = 'YYYY-MM-DD HH:MM:SS'
else:
self.datetime_format = datetime_format
def _get_sheet_name(self, sheet_name):
if sheet_name is None:
sheet_name = self.cur_sheet
if sheet_name is None: # pragma: no cover
raise ValueError('Must pass explicit sheet_name or set '
'cur_sheet property')
return sheet_name
@classmethod
def check_extension(cls, ext):
"""checks that path's extension against the Writer's supported
extensions. If it isn't supported, raises UnsupportedFiletypeError."""
if ext.startswith('.'):
ext = ext[1:]
if not any(ext in extension for extension in cls.supported_extensions):
msg = (u("Invalid extension for engine '%s': '%s'") %
(pprint_thing(cls.engine), pprint_thing(ext)))
raise ValueError(msg)
else:
return True
# Allow use as a contextmanager
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
"""synonym for save, to make it more file-like"""
return self.save()
class _Openpyxl1Writer(ExcelWriter):
engine = 'openpyxl1'
supported_extensions = ('.xlsx', '.xlsm')
openpyxl_majorver = 1
def __init__(self, path, engine=None, **engine_kwargs):
if not openpyxl_compat.is_compat(major_ver=self.openpyxl_majorver):
raise ValueError('Installed openpyxl is not supported at this '
'time. Use {0}.x.y.'
.format(self.openpyxl_majorver))
# Use the openpyxl module as the Excel writer.
from openpyxl.workbook import Workbook
super(_Openpyxl1Writer, self).__init__(path, **engine_kwargs)
# Create workbook object with default optimized_write=True.
self.book = Workbook()
# Openpyxl 1.6.1 adds a dummy sheet. We remove it.
if self.book.worksheets:
self.book.remove_sheet(self.book.worksheets[0])
def save(self):
"""
Save workbook to disk.
"""
return self.book.save(self.path)
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
# Write the frame cells using openpyxl.
from openpyxl.cell import get_column_letter
sheet_name = self._get_sheet_name(sheet_name)
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.create_sheet()
wks.title = sheet_name
self.sheets[sheet_name] = wks
for cell in cells:
colletter = get_column_letter(startcol + cell.col + 1)
xcell = wks.cell("%s%s" % (colletter, startrow + cell.row + 1))
xcell.value = _conv_value(cell.val)
style = None
if cell.style:
style = self._convert_to_style(cell.style)
for field in style.__fields__:
xcell.style.__setattr__(field,
style.__getattribute__(field))
if isinstance(cell.val, datetime.datetime):
xcell.style.number_format.format_code = self.datetime_format
elif isinstance(cell.val, datetime.date):
xcell.style.number_format.format_code = self.date_format
if cell.mergestart is not None and cell.mergeend is not None:
cletterstart = get_column_letter(startcol + cell.col + 1)
cletterend = get_column_letter(startcol + cell.mergeend + 1)
wks.merge_cells('%s%s:%s%s' % (cletterstart,
startrow + cell.row + 1,
cletterend,
startrow + cell.mergestart + 1))
# Excel requires that the format of the first cell in a merged
# range is repeated in the rest of the merged range.
if style:
first_row = startrow + cell.row + 1
last_row = startrow + cell.mergestart + 1
first_col = startcol + cell.col + 1
last_col = startcol + cell.mergeend + 1
for row in range(first_row, last_row + 1):
for col in range(first_col, last_col + 1):
if row == first_row and col == first_col:
# Ignore first cell. It is already handled.
continue
colletter = get_column_letter(col)
xcell = wks.cell("%s%s" % (colletter, row))
for field in style.__fields__:
xcell.style.__setattr__(
field, style.__getattribute__(field))
@classmethod
def _convert_to_style(cls, style_dict):
"""
converts a style_dict to an openpyxl style object
Parameters
----------
style_dict: style dictionary to convert
"""
from openpyxl.style import Style
xls_style = Style()
for key, value in style_dict.items():
for nk, nv in value.items():
if key == "borders":
(xls_style.borders.__getattribute__(nk)
.__setattr__('border_style', nv))
else:
xls_style.__getattribute__(key).__setattr__(nk, nv)
return xls_style
register_writer(_Openpyxl1Writer)
class _OpenpyxlWriter(_Openpyxl1Writer):
engine = 'openpyxl'
register_writer(_OpenpyxlWriter)
class _Openpyxl2Writer(_Openpyxl1Writer):
"""
Note: Support for OpenPyxl v2 is currently EXPERIMENTAL (GH7565).
"""
engine = 'openpyxl2'
openpyxl_majorver = 2
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
# Write the frame cells using openpyxl.
from openpyxl.cell import get_column_letter
sheet_name = self._get_sheet_name(sheet_name)
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.create_sheet()
wks.title = sheet_name
self.sheets[sheet_name] = wks
for cell in cells:
colletter = get_column_letter(startcol + cell.col + 1)
xcell = wks.cell("%s%s" % (colletter, startrow + cell.row + 1))
xcell.value = _conv_value(cell.val)
style_kwargs = {}
# Apply format codes before cell.style to allow override
if isinstance(cell.val, datetime.datetime):
style_kwargs.update(self._convert_to_style_kwargs({
'number_format':{'format_code': self.datetime_format}}))
elif isinstance(cell.val, datetime.date):
style_kwargs.update(self._convert_to_style_kwargs({
'number_format':{'format_code': self.date_format}}))
if cell.style:
style_kwargs.update(self._convert_to_style_kwargs(cell.style))
if style_kwargs:
xcell.style = xcell.style.copy(**style_kwargs)
if cell.mergestart is not None and cell.mergeend is not None:
cletterstart = get_column_letter(startcol + cell.col + 1)
cletterend = get_column_letter(startcol + cell.mergeend + 1)
wks.merge_cells('%s%s:%s%s' % (cletterstart,
startrow + cell.row + 1,
cletterend,
startrow + cell.mergestart + 1))
# Excel requires that the format of the first cell in a merged
# range is repeated in the rest of the merged range.
if style_kwargs:
first_row = startrow + cell.row + 1
last_row = startrow + cell.mergestart + 1
first_col = startcol + cell.col + 1
last_col = startcol + cell.mergeend + 1
for row in range(first_row, last_row + 1):
for col in range(first_col, last_col + 1):
if row == first_row and col == first_col:
# Ignore first cell. It is already handled.
continue
colletter = get_column_letter(col)
xcell = wks.cell("%s%s" % (colletter, row))
xcell.style = xcell.style.copy(**style_kwargs)
@classmethod
def _convert_to_style_kwargs(cls, style_dict):
"""
Convert a style_dict to a set of kwargs suitable for initializing
or updating-on-copy an openpyxl v2 style object
Parameters
----------
style_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'font'
'fill'
'border' ('borders')
'alignment'
'number_format'
'protection'
Returns
-------
style_kwargs : dict
A dict with the same, normalized keys as ``style_dict`` but each
value has been replaced with a native openpyxl style object of the
appropriate class.
"""
_style_key_map = {
'borders': 'border',
}
style_kwargs = {}
for k, v in style_dict.items():
if k in _style_key_map:
k = _style_key_map[k]
_conv_to_x = getattr(cls, '_convert_to_{0}'.format(k),
lambda x: None)
new_v = _conv_to_x(v)
if new_v:
style_kwargs[k] = new_v
return style_kwargs
@classmethod
def _convert_to_color(cls, color_spec):
"""
Convert ``color_spec`` to an openpyxl v2 Color object
Parameters
----------
color_spec : str, dict
A 32-bit ARGB hex string, or a dict with zero or more of the
following keys.
'rgb'
'indexed'
'auto'
'theme'
'tint'
'index'
'type'
Returns
-------
color : openpyxl.styles.Color
"""
from openpyxl.styles import Color
if isinstance(color_spec, str):
return Color(color_spec)
else:
return Color(**color_spec)
@classmethod
def _convert_to_font(cls, font_dict):
"""
Convert ``font_dict`` to an openpyxl v2 Font object
Parameters
----------
font_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'name'
'size' ('sz')
'bold' ('b')
'italic' ('i')
'underline' ('u')
'strikethrough' ('strike')
'color'
'vertAlign' ('vertalign')
'charset'
'scheme'
'family'
'outline'
'shadow'
'condense'
Returns
-------
font : openpyxl.styles.Font
"""
from openpyxl.styles import Font
_font_key_map = {
'sz': 'size',
'b': 'bold',
'i': 'italic',
'u': 'underline',
'strike': 'strikethrough',
'vertalign': 'vertAlign',
}
font_kwargs = {}
for k, v in font_dict.items():
if k in _font_key_map:
k = _font_key_map[k]
if k == 'color':
v = cls._convert_to_color(v)
font_kwargs[k] = v
return Font(**font_kwargs)
@classmethod
def _convert_to_stop(cls, stop_seq):
"""
Convert ``stop_seq`` to a list of openpyxl v2 Color objects,
suitable for initializing the ``GradientFill`` ``stop`` parameter.
Parameters
----------
stop_seq : iterable
An iterable that yields objects suitable for consumption by
``_convert_to_color``.
Returns
-------
stop : list of openpyxl.styles.Color
"""
return map(cls._convert_to_color, stop_seq)
@classmethod
def _convert_to_fill(cls, fill_dict):
"""
Convert ``fill_dict`` to an openpyxl v2 Fill object
Parameters
----------
fill_dict : dict
A dict with one or more of the following keys (or their synonyms),
'fill_type' ('patternType', 'patterntype')
'start_color' ('fgColor', 'fgcolor')
'end_color' ('bgColor', 'bgcolor')
or one or more of the following keys (or their synonyms).
'type' ('fill_type')
'degree'
'left'
'right'
'top'
'bottom'
'stop'
Returns
-------
fill : openpyxl.styles.Fill
"""
from openpyxl.styles import PatternFill, GradientFill
_pattern_fill_key_map = {
'patternType': 'fill_type',
'patterntype': 'fill_type',
'fgColor': 'start_color',
'fgcolor': 'start_color',
'bgColor': 'end_color',
'bgcolor': 'end_color',
}
_gradient_fill_key_map = {
'fill_type': 'type',
}
pfill_kwargs = {}
gfill_kwargs = {}
for k, v in fill_dict.items():
pk = gk = None
if k in _pattern_fill_key_map:
pk = _pattern_fill_key_map[k]
if k in _gradient_fill_key_map:
gk = _gradient_fill_key_map[k]
if pk in ['start_color', 'end_color']:
v = cls._convert_to_color(v)
if gk == 'stop':
v = cls._convert_to_stop(v)
if pk:
pfill_kwargs[pk] = v
elif gk:
gfill_kwargs[gk] = v
else:
pfill_kwargs[k] = v
gfill_kwargs[k] = v
try:
return PatternFill(**pfill_kwargs)
except TypeError:
return GradientFill(**gfill_kwargs)
@classmethod
def _convert_to_side(cls, side_spec):
"""
Convert ``side_spec`` to an openpyxl v2 Side object
Parameters
----------
side_spec : str, dict
A string specifying the border style, or a dict with zero or more
of the following keys (or their synonyms).
'style' ('border_style')
'color'
Returns
-------
side : openpyxl.styles.Side
"""
from openpyxl.styles import Side
_side_key_map = {
'border_style': 'style',
}
if isinstance(side_spec, str):
return Side(style=side_spec)
side_kwargs = {}
for k, v in side_spec.items():
if k in _side_key_map:
k = _side_key_map[k]
if k == 'color':
v = cls._convert_to_color(v)
side_kwargs[k] = v
return Side(**side_kwargs)
@classmethod
def _convert_to_border(cls, border_dict):
"""
Convert ``border_dict`` to an openpyxl v2 Border object
Parameters
----------
border_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'left'
'right'
'top'
'bottom'
'diagonal'
'diagonal_direction'
'vertical'
'horizontal'
'diagonalUp' ('diagonalup')
'diagonalDown' ('diagonaldown')
'outline'
Returns
-------
border : openpyxl.styles.Border
"""
from openpyxl.styles import Border
_border_key_map = {
'diagonalup': 'diagonalUp',
'diagonaldown': 'diagonalDown',
}
border_kwargs = {}
for k, v in border_dict.items():
if k in _border_key_map:
k = _border_key_map[k]
if k == 'color':
v = cls._convert_to_color(v)
if k in ['left', 'right', 'top', 'bottom', 'diagonal']:
v = cls._convert_to_side(v)
border_kwargs[k] = v
return Border(**border_kwargs)
@classmethod
def _convert_to_alignment(cls, alignment_dict):
"""
Convert ``alignment_dict`` to an openpyxl v2 Alignment object
Parameters
----------
alignment_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'horizontal'
'vertical'
'text_rotation'
'wrap_text'
'shrink_to_fit'
'indent'
Returns
-------
alignment : openpyxl.styles.Alignment
"""
from openpyxl.styles import Alignment
return Alignment(**alignment_dict)
@classmethod
def _convert_to_number_format(cls, number_format_dict):
"""
Convert ``number_format_dict`` to an openpyxl v2.1.0 number format
initializer.
Parameters
----------
number_format_dict : dict
A dict with zero or more of the following keys.
'format_code' : str
Returns
-------
number_format : str
"""
try:
# >= 2.0.0 < 2.1.0
from openpyxl.styles import NumberFormat
return NumberFormat(**number_format_dict)
except:
# >= 2.1.0
return number_format_dict['format_code']
@classmethod
def _convert_to_protection(cls, protection_dict):
"""
Convert ``protection_dict`` to an openpyxl v2 Protection object.
Parameters
----------
protection_dict : dict
A dict with zero or more of the following keys.
'locked'
'hidden'
Returns
-------
"""
from openpyxl.styles import Protection
return Protection(**protection_dict)
register_writer(_Openpyxl2Writer)
class _XlwtWriter(ExcelWriter):
engine = 'xlwt'
supported_extensions = ('.xls',)
def __init__(self, path, engine=None, encoding=None, **engine_kwargs):
# Use the xlwt module as the Excel writer.
import xlwt
super(_XlwtWriter, self).__init__(path, **engine_kwargs)
if encoding is None:
encoding = 'ascii'
self.book = xlwt.Workbook(encoding=encoding)
self.fm_datetime = xlwt.easyxf(num_format_str=self.datetime_format)
self.fm_date = xlwt.easyxf(num_format_str=self.date_format)
def save(self):
"""
Save workbook to disk.
"""
return self.book.save(self.path)
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
# Write the frame cells using xlwt.
sheet_name = self._get_sheet_name(sheet_name)
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.add_sheet(sheet_name)
self.sheets[sheet_name] = wks
style_dict = {}
for cell in cells:
val = _conv_value(cell.val)
num_format_str = None
if isinstance(cell.val, datetime.datetime):
num_format_str = self.datetime_format
elif isinstance(cell.val, datetime.date):
num_format_str = self.date_format
stylekey = json.dumps(cell.style)
if num_format_str:
stylekey += num_format_str
if stylekey in style_dict:
style = style_dict[stylekey]
else:
style = self._convert_to_style(cell.style, num_format_str)
style_dict[stylekey] = style
if cell.mergestart is not None and cell.mergeend is not None:
wks.write_merge(startrow + cell.row,
startrow + cell.mergestart,
startcol + cell.col,
startcol + cell.mergeend,
val, style)
else:
wks.write(startrow + cell.row,
startcol + cell.col,
val, style)
@classmethod
def _style_to_xlwt(cls, item, firstlevel=True, field_sep=',',
line_sep=';'):
"""helper which recursively generate an xlwt easy style string
for example:
hstyle = {"font": {"bold": True},
"border": {"top": "thin",
"right": "thin",
"bottom": "thin",
"left": "thin"},
"align": {"horiz": "center"}}
will be converted to
font: bold on; \
border: top thin, right thin, bottom thin, left thin; \
align: horiz center;
"""
if hasattr(item, 'items'):
if firstlevel:
it = ["%s: %s" % (key, cls._style_to_xlwt(value, False))
for key, value in item.items()]
out = "%s " % (line_sep).join(it)
return out
else:
it = ["%s %s" % (key, cls._style_to_xlwt(value, False))
for key, value in item.items()]
out = "%s " % (field_sep).join(it)
return out
else:
item = "%s" % item
item = item.replace("True", "on")
item = item.replace("False", "off")
return item
@classmethod
def _convert_to_style(cls, style_dict, num_format_str=None):
"""
converts a style_dict to an xlwt style object
Parameters
----------
style_dict: style dictionary to convert
num_format_str: optional number format string
"""
import xlwt
if style_dict:
xlwt_stylestr = cls._style_to_xlwt(style_dict)
style = xlwt.easyxf(xlwt_stylestr, field_sep=',', line_sep=';')
else:
style = xlwt.XFStyle()
if num_format_str is not None:
style.num_format_str = num_format_str
return style
register_writer(_XlwtWriter)
class _XlsxWriter(ExcelWriter):
engine = 'xlsxwriter'
supported_extensions = ('.xlsx',)
def __init__(self, path, engine=None,
date_format=None, datetime_format=None, **engine_kwargs):
# Use the xlsxwriter module as the Excel writer.
import xlsxwriter
super(_XlsxWriter, self).__init__(path, engine=engine,
date_format=date_format,
datetime_format=datetime_format,
**engine_kwargs)
self.book = xlsxwriter.Workbook(path, **engine_kwargs)
def save(self):
"""
Save workbook to disk.
"""
return self.book.close()
def write_cells(self, cells, sheet_name=None, startrow=0, startcol=0):
# Write the frame cells using xlsxwriter.
sheet_name = self._get_sheet_name(sheet_name)
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.add_worksheet(sheet_name)
self.sheets[sheet_name] = wks
style_dict = {}
for cell in cells:
num_format_str = None
if isinstance(cell.val, datetime.datetime):
num_format_str = self.datetime_format
elif isinstance(cell.val, datetime.date):
num_format_str = self.date_format
stylekey = json.dumps(cell.style)
if num_format_str:
stylekey += num_format_str
if stylekey in style_dict:
style = style_dict[stylekey]
else:
style = self._convert_to_style(cell.style, num_format_str)
style_dict[stylekey] = style
if cell.mergestart is not None and cell.mergeend is not None:
wks.merge_range(startrow + cell.row,
startcol + cell.col,
startrow + cell.mergestart,
startcol + cell.mergeend,
cell.val, style)
else:
wks.write(startrow + cell.row,
startcol + cell.col,
cell.val, style)
def _convert_to_style(self, style_dict, num_format_str=None):
"""
converts a style_dict to an xlsxwriter format object
Parameters
----------
style_dict: style dictionary to convert
num_format_str: optional number format string
"""
# If there is no formatting we don't create a format object.
if num_format_str is None and style_dict is None:
return None
# Create a XlsxWriter format object.
xl_format = self.book.add_format()
if num_format_str is not None:
xl_format.set_num_format(num_format_str)
if style_dict is None:
return xl_format
# Map the cell font to XlsxWriter font properties.
if style_dict.get('font'):
font = style_dict['font']
if font.get('bold'):
xl_format.set_bold()
# Map the alignment to XlsxWriter alignment properties.
alignment = style_dict.get('alignment')
if alignment:
if (alignment.get('horizontal')
and alignment['horizontal'] == 'center'):
xl_format.set_align('center')
if (alignment.get('vertical')
and alignment['vertical'] == 'top'):
xl_format.set_align('top')
# Map the cell borders to XlsxWriter border properties.
if style_dict.get('borders'):
xl_format.set_border()
return xl_format
register_writer(_XlsxWriter)
| 35.246763 | 82 | 0.54193 |
acec416e432737cba93aca853de29e1923a1f003 | 10,815 | py | Python | IP9258.indigoPlugin/Contents/Server Plugin/plugin.py | smudger4/IP9258 | 6d8b83527f11d6502310ec24e720be9c4f61ded9 | [
"MIT"
] | null | null | null | IP9258.indigoPlugin/Contents/Server Plugin/plugin.py | smudger4/IP9258 | 6d8b83527f11d6502310ec24e720be9c4f61ded9 | [
"MIT"
] | 1 | 2016-08-28T21:30:38.000Z | 2018-04-26T19:40:33.000Z | IP9258.indigoPlugin/Contents/Server Plugin/plugin.py | smudger4/IP9258 | 6d8b83527f11d6502310ec24e720be9c4f61ded9 | [
"MIT"
] | 1 | 2016-09-06T03:32:50.000Z | 2016-09-06T03:32:50.000Z | #! /usr/bin/env python
# -*- coding: utf-8 -*-
####################
# Control an Aviosys IP9528 IP PDU from Indigo
# V1.1 4 November 2012
# Copyright (c) 2012, Nick Smith
# MIT licence - refer to licence.txt
#
# Based on example code from Indigo SDK v1.02
# Copyright (c) 2012, Perceptive Automation, LLC.
# http://www.perceptiveautomation.com
# V1.1: added code to keep connection alive to improve response time
# V1.0: initial version
# import os
# import sys
import urllib2
import socket
import string
# Note the "indigo" module is automatically imported and made available inside
# our global name space by the host process.
################################################################################
class Plugin(indigo.PluginBase):
########################################
def __init__(self, pluginId, pluginDisplayName, pluginVersion, pluginPrefs):
indigo.PluginBase.__init__(self, pluginId, pluginDisplayName, pluginVersion, pluginPrefs)
self.debug = pluginPrefs.get("showDebugInfo", False)
self.interval = int(pluginPrefs.get("interval", False))
def __del__(self):
indigo.PluginBase.__del__(self)
########################################
def startup(self):
self.debugLog(u"startup called")
# setup a socket timeout
timeout = 5
socket.setdefaulttimeout(timeout)
########################################
def shutdown(self):
self.debugLog(u"shutdown called")
########################################
def runConcurrentThread(self):
self.debugLog("Starting concurrent thread")
try:
while True:
prId = "com.nickandmeryl.ip9258"
self.debugLog("Found prId " + prId)
for device in indigo.devices.iter(prId):
self.debugLog("Found device " + device.name)
self.readAndUpdateState(device)
self.debugLog("Sleeping for " + str(self.interval * 60) + " minutes")
self.sleep(self.interval * 60)
except self.StopThread:
self.debugLog("runConcurrentThread stopping: ")
pass
########################################
def validateDeviceConfigUi(self, valuesDict, typeId, devId):
# validate supplied values
outletNum = int(valuesDict["outlet"])
if outletNum < 1 or outletNum > 4:
self.errorLog(u"Error: Outlet \"%s\" must be between 1 & 4" % str(outletNum))
errorDict = indigo.Dict()
errorDict["outlet"] = "The value of this field must be between 1 & 4"
return (False, valuesDict, errorDict)
else:
return True
########################################
def getDeviceStateList(self, dev):
typeId = dev.deviceTypeId
statesList = self.devicesTypeDict[typeId][u'States']
if dev.pluginProps['model'] == 'IP9255Pro':
stateDict = {'Disabled': False, 'Key': 'temp', 'StateLabel': 'temp', 'TriggerLabel': 'temp', 'Type': 100}
statesList.append(stateDict)
stateDict = {'Disabled': False, 'Key': 'current', 'StateLabel': 'current', 'TriggerLabel': 'current', 'Type': 100}
statesList.append(stateDict)
return statesList
########################################
# Relay / Dimmer Action callback
######################
def actionControlDimmerRelay(self, action, dev):
###### TURN ON ######
if action.deviceAction == indigo.kDeviceAction.TurnOn:
# Command hardware module (dev) to turn ON here:
if self.setPDUState(dev, "on") == 0:
sendSuccess = True
else:
sendSuccess = False
if sendSuccess:
# If success then log that the command was successfully sent.
indigo.server.log(u"Turned \"%s\" %s" % (dev.name, "on"))
# And then tell the Indigo Server to update the state.
dev.updateStateOnServer("onOffState", True)
else:
# Else log failure but do NOT update state on Indigo Server.
indigo.server.log(u"send \"%s\" %s failed" % (dev.name, "on"), isError=True)
###### TURN OFF ######
elif action.deviceAction == indigo.kDeviceAction.TurnOff:
# Command hardware module (dev) to turn OFF here:
if self.setPDUState(dev, "off") == 0:
sendSuccess = True
else:
sendSuccess = False
if sendSuccess:
# If success then log that the command was successfully sent.
indigo.server.log(u"Turned \"%s\" %s" % (dev.name, "off"))
# And then tell the Indigo Server to update the state:
dev.updateStateOnServer("onOffState", False)
else:
# Else log failure but do NOT update state on Indigo Server.
indigo.server.log(u"send \"%s\" %s failed" % (dev.name, "off"), isError=True)
###### TOGGLE ######
elif action.deviceAction == indigo.kDeviceAction.Toggle:
# Command hardware module (dev) to toggle here:
sendSuccess = False
newOnState = not dev.onState
if newOnState == True:
# currently off, so turn on
if self.setPDUState(dev, "on") == 0:
sendSuccess = True
else:
# currently on, so turn off
if self.setPDUState(dev, "off") == 0:
sendSuccess = True
if sendSuccess:
# If success then log that the command was successfully sent.
indigo.server.log(u"sent \"%s\" %s" % (dev.name, "toggle"))
# And then tell the Indigo Server to update the state:
dev.updateStateOnServer("onOffState", newOnState)
else:
# Else log failure but do NOT update state on Indigo Server.
indigo.server.log(u"send \"%s\" %s failed" % (dev.name, "toggle"), isError=True)
###### STATUS REQUEST ######
elif action.deviceAction == indigo.kDeviceAction.RequestStatus:
# Query hardware module (dev) for its current states here:
self.readAndUpdateState(dev)
########################################
def setPDUState(self, dev, state):
# send command to PDU to change state of an outlet
# state argument is either "on" or "off"
# validate inputs
state_num = 2
if string.lower(state) == "on":
state_num = 1
elif string.lower(state) == "off":
state_num = 0
else:
self.errorLog(u"Error: State must be on or off")
return(state_num)
userName = dev.pluginProps["userName"]
password = dev.pluginProps["password"]
pduIpAddr = dev.pluginProps["ipAddr"]
outlet = dev.pluginProps["outlet"]
self.debugLog("Username: " + userName)
self.debugLog("Password: " + password)
self.debugLog("IP address: " + pduIpAddr)
self.debugLog("Outlet: " + outlet)
# build the request string to send to the PDU
base_url_cmd = "http://" + pduIpAddr + "/set.cmd?user=" + userName + "+pass=" + password + "+"
url_cmd = base_url_cmd + "cmd=setpower+p6" + outlet + "=" + str(state_num)
self.debugLog(u"Sending to PDU: " + url_cmd)
try:
# send the command to the PDU & clean up afterwards
response = urllib2.urlopen(url_cmd)
response.read()
response.close()
except socket.timeout:
self.errorLog(u"Timed out when talking to PDU")
return("2")
except urllib2.URLError, e:
if hasattr(e, 'reason'):
self.errorLog(u"Error: We failed to reach a server.")
self.errorLog("Reason: " + str(e.reason))
return(2)
elif hasattr(e, 'code'):
self.errorLog(u"Error: The server couldn\'t fulfill the request.")
self.errorLog("Error code: " + str(e.code))
return(2)
else:
# everything worked
self.debugLog(u"Sent to PDU")
return(0)
########################################
def getPDUState(self, dev):
# request state of PDU
# returns state of all outlets in a single string
userName = dev.pluginProps["userName"]
password = dev.pluginProps["password"]
pduIpAddr = dev.pluginProps["ipAddr"]
outlet = dev.pluginProps["outlet"]
self.debugLog("Username: " + userName)
self.debugLog("Password: " + password)
self.debugLog("IP address: " + pduIpAddr)
self.debugLog("Outlet: " + outlet)
# build the request string to send to the PDU
base_url_cmd = "http://" + pduIpAddr + "/set.cmd?user=" + userName + "+pass=" + password + "+"
url_cmd = base_url_cmd + "cmd=getpower"
self.debugLog(u"Created URL command " + url_cmd )
resultCode = 0
try:
# send the command to the PDU & clean up afterwards
response = urllib2.urlopen(url_cmd)
resultString = response.read()
if dev.pluginProps['model'] == 'IP9255Pro':
url_cmd = base_url_cmd + "cmd=gettemperature"
response = urllib2.urlopen(url_cmd)
resultString += response.read()
url_cmd = base_url_cmd + "cmd=getcurrent"
response = urllib2.urlopen(url_cmd)
resultString += response.read()
response.close()
self.debugLog(u"Received response " + str(resultString))
except socket.timeout:
self.errorLog(u"Timed out when talking to PDU")
return("2")
except urllib2.URLError, e:
if hasattr(e, 'reason'):
self.errorLog(u"Error: We failed to reach a server.")
self.errorLog("Reason: " + str(e.reason))
return("2")
elif hasattr(e, 'code'):
self.errorLog(u"Error: The server couldn\'t fulfill the request.")
self.errorLog("Error code: " + str(e.code))
return("2")
else:
# everything worked
self.debugLog(u"Sent to PDU: " + url_cmd)
self.debugLog(u"Received from PDU: " + resultString)
# check if this outlet status is in string returned from PDU
# if so, grab the 5th char which will be either 1 (on) or 0 (off)
# f we have a 9255Pro, also get the temp and current (mispelled as Cruuent in the API)
outletString = "p6" + outlet + "="
index = resultString.find(outletString)
if index != -1:
resultCode = resultString[index+4]
if dev.pluginProps['model'] == 'IP9255Pro':
index = resultString.find('Temperature:')
resultCode += "+" + resultString[index+13:index+15]
index = resultString.find('Cruuent:')
resultCode += "+" + resultString[index+8:index+11]
self.debugLog(u"Received code " + str(resultCode))
else:
self.errorLog(u"Error: received unexpected response \"" + resultString + "\"")
resultCode = "2"
return(resultCode)
########################################
def readAndUpdateState(self, dev):
# request state from the PDU & update state variable accordingly
logChanges = dev.pluginProps['logChanges']
resultCode = self.getPDUState(dev)
if resultCode == '2':
self.errorLog(u"Error: PDU %s in unknown state: %s" % (dev.name, resultCode))
return(False)
if dev.pluginProps['model'] == 'IP9255Pro':
(resultCode,temp,current) = resultCode.split("+")
self.debugLog(u"Received codes: %s, %s, %s " % (resultCode, temp, current))
dev.updateStateOnServer("temp", temp)
dev.updateStateOnServer("current", current)
if int(resultCode) == 0:
dev.updateStateOnServer("onOffState", False)
if logChanges:
indigo.server.log(u"Device %s is off" % dev.name)
return(True)
elif int(resultCode) == 1:
dev.updateStateOnServer("onOffState", True)
if logChanges:
indigo.server.log(u"Device %s is on" % dev.name)
return(True)
else:
self.errorLog(u"Error: PDU %s in unknown state: %s" % (dev.name, resultCode))
return(False)
| 33.37963 | 117 | 0.639575 |
acec41d6d60a2226a8cc884b344c85867a7adc40 | 475 | py | Python | Exercicios/Exercicio024.py | RicardoMart922/estudo_Python | cb595c2a5e5aee568b6afa71b3ed9dd9cb7eef72 | [
"MIT"
] | null | null | null | Exercicios/Exercicio024.py | RicardoMart922/estudo_Python | cb595c2a5e5aee568b6afa71b3ed9dd9cb7eef72 | [
"MIT"
] | null | null | null | Exercicios/Exercicio024.py | RicardoMart922/estudo_Python | cb595c2a5e5aee568b6afa71b3ed9dd9cb7eef72 | [
"MIT"
] | null | null | null | # Faça um programa que leia um número de 0 a 9999 e mostre na tela cada um dos dígitos separados.
numero = int(input('Digite um número entre 0 e 9999: '))
unidade = numero // 1 % 10
dezena = numero // 10 % 10
centena = numero // 100 % 10
milhar = numero // 1000 % 10
print('\033[4;31mUnidade\033[m: {}'.format(unidade))
print('\033[4;32mDezena\033[m: {}'.format(dezena))
print('\033[4;33mCentena\033[m: {}'.format(centena))
print('\033[4;34mMilhar\033[m: {}'.format(milhar))
| 43.181818 | 97 | 0.677895 |
acec43b3b2e47064ae023b4a594df9e01ad3f64b | 3,000 | py | Python | setup.py | elliottl/httpie | 4c56d894ba9e2bb1c097a3a6067006843ac2944d | [
"BSD-3-Clause"
] | 24,849 | 2015-01-01T04:46:24.000Z | 2021-05-29T14:40:37.000Z | setup.py | rodrigoricky/httpie | 4c56d894ba9e2bb1c097a3a6067006843ac2944d | [
"BSD-3-Clause"
] | 529 | 2015-01-02T17:50:51.000Z | 2020-08-11T16:45:34.000Z | setup.py | rodrigoricky/httpie | 4c56d894ba9e2bb1c097a3a6067006843ac2944d | [
"BSD-3-Clause"
] | 2,057 | 2015-01-01T08:03:41.000Z | 2021-05-31T11:26:12.000Z | # This is purely the result of trial and error.
import sys
from setuptools import setup, find_packages
import httpie
# Note: keep requirements here to ease distributions packaging
tests_require = [
'pytest',
'pytest-httpbin>=0.0.6',
'responses',
]
dev_require = [
*tests_require,
'flake8',
'flake8-comprehensions',
'flake8-deprecated',
'flake8-mutable',
'flake8-tuple',
'pyopenssl',
'pytest-cov',
'pyyaml',
'twine',
'wheel',
'Jinja2'
]
install_requires = [
'charset_normalizer>=2.0.0',
'defusedxml>=0.6.0',
'requests[socks]>=2.22.0',
'Pygments>=2.5.2',
'requests-toolbelt>=0.9.1',
'multidict>=4.7.0',
'setuptools',
'importlib-metadata>=1.4.0; python_version < "3.8"',
]
install_requires_win_only = [
'colorama>=0.2.4',
]
# Conditional dependencies:
# sdist
if 'bdist_wheel' not in sys.argv:
if 'win32' in str(sys.platform).lower():
# Terminal colors for Windows
install_requires.extend(install_requires_win_only)
# bdist_wheel
extras_require = {
'dev': dev_require,
'test': tests_require,
# https://wheel.readthedocs.io/en/latest/#defining-conditional-dependencies
':sys_platform == "win32"': install_requires_win_only,
}
def long_description():
with open('README.md', encoding='utf-8') as f:
return f.read()
setup(
name='httpie',
version=httpie.__version__,
description=httpie.__doc__.strip(),
long_description=long_description(),
long_description_content_type='text/markdown',
url='https://httpie.io/',
download_url=f'https://github.com/httpie/httpie/archive/{httpie.__version__}.tar.gz',
author=httpie.__author__,
author_email='jakub@roztocil.co',
license=httpie.__licence__,
packages=find_packages(include=['httpie', 'httpie.*']),
entry_points={
'console_scripts': [
'http = httpie.__main__:main',
'https = httpie.__main__:main',
'httpie = httpie.manager.__main__:main',
],
},
python_requires='>=3.6',
extras_require=extras_require,
install_requires=install_requires,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 3 :: Only',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: BSD License',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development',
'Topic :: System :: Networking',
'Topic :: Terminals',
'Topic :: Text Processing',
'Topic :: Utilities'
],
project_urls={
'GitHub': 'https://github.com/httpie/httpie',
'Twitter': 'https://twitter.com/httpie',
'Discord': 'https://httpie.io/discord',
'Documentation': 'https://httpie.io/docs',
'Online Demo': 'https://httpie.io/run',
},
)
| 26.548673 | 89 | 0.626 |
acec43e33f9dd177c0ad01687c1bcee6d8115ca1 | 1,445 | py | Python | src/ion/process/test/manage_system.py | scionrep/scioncc_new | 086be085b69711ee24c4c86ed42f2109ca0db027 | [
"BSD-2-Clause"
] | 2 | 2015-10-05T20:36:35.000Z | 2018-11-21T11:45:24.000Z | src/ion/process/test/manage_system.py | scionrep/scioncc_new | 086be085b69711ee24c4c86ed42f2109ca0db027 | [
"BSD-2-Clause"
] | 21 | 2015-03-18T14:39:32.000Z | 2016-07-01T17:16:29.000Z | src/ion/process/test/manage_system.py | scionrep/scioncc_new | 086be085b69711ee24c4c86ed42f2109ca0db027 | [
"BSD-2-Clause"
] | 12 | 2015-03-18T10:53:49.000Z | 2018-06-21T11:19:57.000Z | #!/usr/bin/env python
""" call system_management_service actions. use as a proof of concept or the world's worst UI.
invoke with commands like this:
bin/pycc -x ion.process.test.manage_system.ChangeLogLevel logger=ion.process.bootstrap.ion_logger level=DEBUG
bin/pycc -x ion.process.test.manage_system.ReportStats
bin/pycc -x ion.process.test.manage_system.ClearStats
"""
__author__ = 'Michael Meisinger, Ian Katz, Thomas Lennan'
from pyon.core.bootstrap import get_service_registry
from pyon.public import ImmediateProcess
from interface.objects import AllContainers, ReportStatistics, ClearStatistics
class ChangeLogLevel(ImmediateProcess):
def on_start(self):
logger = self.CFG.get("logger")
level = self.CFG.get("level")
recursive = self.CFG.get("recursive", False)
svc = get_service_registry().services['system_management'].client(process=self)
svc.set_log_level(logger=logger, level=level, recursive=recursive)
class ReportStats(ImmediateProcess):
def on_start(self):
svc = get_service_registry().services['system_management'].client(process=self)
svc.perform_action(predicate=AllContainers(), action=ReportStatistics())
class ClearStats(ImmediateProcess):
def on_start(self):
svc = get_service_registry().services['system_management'].client(process=self)
svc.perform_action(predicate=AllContainers(), action=ClearStatistics())
| 40.138889 | 113 | 0.752941 |
acec442c0356e7574ddfe0df462d2be285ff0c37 | 8,018 | py | Python | pyleecan/GUI/Dialog/DMachineSetup/SMHoleMag/PHoleM57/PHoleM57.py | ajpina/pyleecan | f8d1fce7d108cf443f5767e35d59ff15905fb49f | [
"Apache-2.0"
] | 2 | 2020-08-28T14:54:55.000Z | 2021-03-13T19:34:45.000Z | pyleecan/GUI/Dialog/DMachineSetup/SMHoleMag/PHoleM57/PHoleM57.py | ajpina/pyleecan | f8d1fce7d108cf443f5767e35d59ff15905fb49f | [
"Apache-2.0"
] | null | null | null | pyleecan/GUI/Dialog/DMachineSetup/SMHoleMag/PHoleM57/PHoleM57.py | ajpina/pyleecan | f8d1fce7d108cf443f5767e35d59ff15905fb49f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from numpy import pi
from PySide2.QtCore import Signal
from PySide2.QtGui import QPixmap
from PySide2.QtWidgets import QWidget
from ......Classes.HoleM57 import HoleM57
from ......GUI import gui_option
from ......GUI.Dialog.DMachineSetup.SMHoleMag.PHoleM57.Gen_PHoleM57 import Gen_PHoleM57
from ......Methods.Slot.Slot.check import SlotCheckError
from ......GUI.Dialog.DMatLib.MatLib import MatLib
class PHoleM57(Gen_PHoleM57, QWidget):
"""Page to set the Hole Type 57"""
# Signal to DMachineSetup to know that the save popup is needed
saveNeeded = Signal()
# Information for WHoleMag
hole_name = "Slot Type 57"
hole_type = HoleM57
def __init__(self, hole=None, matlib=MatLib()):
"""Initialize the widget according to hole
Parameters
----------
self : PHoleM57
A PHoleM57 widget
hole : HoleM57
current hole to edit
matlib : list
List of available Material
"""
# Build the interface according to the .ui file
QWidget.__init__(self)
self.setupUi(self)
self.matlib = matlib
self.hole = hole
# Set FloatEdit unit
self.lf_W0.unit = "rad"
self.lf_W1.unit = "m"
self.lf_W2.unit = "m"
self.lf_W3.unit = "m"
self.lf_W4.unit = "m"
self.lf_H1.unit = "m"
self.lf_H2.unit = "m"
# Set default materials
self.w_mat_0.setText("mat_void:")
self.w_mat_0.def_mat = "Air"
self.w_mat_1.setText("magnet_0:")
self.w_mat_1.def_mat = "Magnet1"
self.w_mat_2.setText("magnet_1:")
self.w_mat_2.def_mat = "Magnet1"
if hole.magnet_0 is None: # SyRM
self.img_slot.setPixmap(
QPixmap(":/images/images/MachineSetup/WSlot/Slot_57_no_mag.PNG")
)
self.w_mat_0.update(self.hole, "mat_void", self.matlib)
self.w_mat_1.hide()
self.w_mat_2.hide()
else:
# Set current material
self.w_mat_0.update(self.hole, "mat_void", self.matlib)
self.w_mat_1.update(self.hole.magnet_0, "mat_type", self.matlib)
self.w_mat_2.update(self.hole.magnet_1, "mat_type", self.matlib)
# Set unit name (m ou mm)
self.u = gui_option.unit
wid_list = [
self.unit_W1,
self.unit_W2,
self.unit_W3,
self.unit_W4,
self.unit_H1,
self.unit_H2,
]
for wid in wid_list:
wid.setText(self.u.get_m_name())
# Fill the fields with the machine values (if they're filled)
self.lf_W0.setValue(self.hole.W0)
self.lf_W1.setValue(self.hole.W1)
self.lf_W2.setValue(self.hole.W2)
self.lf_W3.setValue(self.hole.W3)
self.lf_W4.setValue(self.hole.W4)
self.lf_H1.setValue(self.hole.H1)
self.lf_H2.setValue(self.hole.H2)
# Display the main output of the hole (surface, height...)
self.comp_output()
# Connect the signal
self.lf_W0.editingFinished.connect(self.set_W0)
self.lf_W1.editingFinished.connect(self.set_W1)
self.lf_W2.editingFinished.connect(self.set_W2)
self.lf_W3.editingFinished.connect(self.set_W3)
self.lf_W4.editingFinished.connect(self.set_W4)
self.lf_H1.editingFinished.connect(self.set_H1)
self.lf_H2.editingFinished.connect(self.set_H2)
self.w_mat_0.saveNeeded.connect(self.emit_save)
self.w_mat_1.saveNeeded.connect(self.emit_save)
self.w_mat_2.saveNeeded.connect(self.emit_save)
def set_W0(self):
"""Signal to update the value of W0 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.W0 = self.lf_W0.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_W1(self):
"""Signal to update the value of W1 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.W1 = self.lf_W1.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_W2(self):
"""Signal to update the value of W2 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.W2 = self.lf_W2.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_W3(self):
"""Signal to update the value of W3 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.W3 = self.lf_W3.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_W4(self):
"""Signal to update the value of W4 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.W4 = self.lf_W4.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_H1(self):
"""Signal to update the value of H1 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.H1 = self.lf_H1.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def set_H2(self):
"""Signal to update the value of H2 according to the line edit
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
self.hole.H2 = self.lf_H2.value()
self.comp_output()
# Notify the machine GUI that the machine has changed
self.saveNeeded.emit()
def comp_output(self):
"""Compute and display the hole output
Parameters
----------
self : PHoleM57
A PHoleM57 widget
"""
is_set = False
if self.check() is None:
try:
# We compute the output only if the hole is correctly set
# Compute all the needed output as string
s_surf = format(self.u.get_m2(self.hole.comp_surface()), ".4g")
m_surf = format(self.u.get_m2(self.hole.comp_surface_magnets()), ".4g")
# Update the GUI to display the Output
self.out_slot_surface.setText(
"Slot suface (2 part): " + s_surf + " " + self.u.get_m2_name()
)
self.out_magnet_surface.setText(
"Magnet surface: " + m_surf + " " + self.u.get_m2_name()
)
is_set = True
except:
pass
if not is_set:
# We can't compute the output => We erase the previous version
# (that way the user know that something is wrong)
self.out_slot_surface.setText("Slot suface (2 part): ?")
self.out_magnet_surface.setText("Magnet surface: ?")
def check(self):
"""Check that the current machine have all the needed field set
Parameters
----------
self : PHoleM57
A PHoleM57 widget
Returns
-------
error : str
Error message (return None if no error)
"""
# Constraints and None
try:
self.hole.check()
except SlotCheckError as error:
return str(error)
def emit_save(self):
"""Send a saveNeeded signal to the DMachineSetup"""
self.saveNeeded.emit()
| 31.198444 | 87 | 0.572587 |
acec44bc603edbd432fb68499cf2253f2bf86bd4 | 18,496 | py | Python | flextensor/baselines/conv2d_baseline.py | jcf94/FlexTensor | 72501488089e7a5674243ca8f0ebb311c743d8a1 | [
"MIT"
] | 4 | 2021-09-28T07:52:48.000Z | 2021-12-31T01:56:12.000Z | flextensor/baselines/conv2d_baseline.py | KnowingNothing/FlexTensor-Micro | 6de200326d4999c56af3f3343282149bc5758c15 | [
"MIT"
] | null | null | null | flextensor/baselines/conv2d_baseline.py | KnowingNothing/FlexTensor-Micro | 6de200326d4999c56af3f3343282149bc5758c15 | [
"MIT"
] | 3 | 2020-04-14T06:11:29.000Z | 2020-04-20T12:30:22.000Z | import os
import logging
import sys
import time
import argparse
import timeit
import torch
import tvm
import topi
import numpy as np
import tvm.contrib.graph_runtime as runtime
from tvm import relay
from tvm.relay import testing
from tvm.relay.testing.init import create_workload
from tvm.relay.testing import layers
from tvm import autotvm
from tvm.autotvm.tuner import XGBTuner, GATuner, RandomTuner, GridSearchTuner
from topi.testing import conv2d_nchw_python
from collections import namedtuple
from topi.util import get_const_tuple
from flextensor.configs.conv2d_config import *
torch.backends.cudnn.enabled = True
shape_dict = {
"yolo": yolo_shapes,
"google": google_shapes,
"squeeze": squeeze_shapes,
"res": res_shapes,
"vgg-16": vgg_16_shapes,
"vgg-19": vgg_19_shapes,
"test": test_conv_shapes,
"yolo_b8": yolo_shapes_b8,
"mobile_v2": mobilev2_shapes,
}
def pytorch_cpu(batch_size, height, width, channel, kernel_size, output_channel, stride=1, padding=0, dilation=1, groups=1, number=100, dev=0):
run_time = timeit.timeit(setup= 'import torch\n'
'conv = torch.nn.functional.conv2d\n'
'A = torch.rand([' + str(batch_size) + ', ' + str(channel) + ', ' + str(height) + ', ' + str(width) + '], dtype=torch.float32)\n'
'W = torch.rand([' + str(output_channel) + ', ' + str(channel//groups) + ', ' + str(kernel_size) + ', ' + str(kernel_size) + '], dtype=torch.float32)\n'
'conv(A, W, stride=' + str(stride) + ', padding=' + str(padding) + ', dilation=' + str(dilation) + ', groups=' + str(groups) + ')\n',
stmt='ans = conv(A, W, stride=' + str(stride) + ', padding=' + str(padding) + ', dilation=' + str(dilation) + ', groups=' + str(groups) + ')',
number=number)
return run_time / number * 1e3
def pytorch_cuda(N, H, W, C, kernel_size, K, stride=1, padding=0, dilation=1, groups=1, number=100, dev=0):
A = torch.rand([N, C, H, W], dtype=torch.float32).cuda("cuda:" + str(dev))
W = torch.rand([K, C//groups, kernel_size, kernel_size], dtype=torch.float32).cuda("cuda:" + str(dev))
# warm-up
torch.nn.functional.conv2d(A, W, stride=stride, padding=padding, dilation=dilation, groups=groups)
torch.cuda.synchronize()
sum_time = 0.0
for i in range(number):
start = torch.cuda.Event(enable_timing=True)
end = torch.cuda.Event(enable_timing=True)
start.record()
ans = torch.nn.functional.conv2d(A, W, stride=stride, padding=padding, dilation=dilation, groups=groups)
end.record()
# Waits for everything to finish running
torch.cuda.synchronize()
sum_time += start.elapsed_time(end)
return sum_time / number
def tvm_generic_cuda(timeout=4, trials=100):
def _inner(N, H, W, C, kernel_size, K, stride=1, padding=0, dilation=1, groups=1, number=100, dev=0):
return tvm_generic(N, H, W, C, kernel_size, K, stride=stride, padding=padding, dilation=dilation, groups=groups,
number=number, dev=dev, timeout=timeout, target="cuda", trials=trials)
return _inner
def tvm_generic_llvm(timeout=4, trials=100):
def _inner(N, H, W, C, kernel_size, K, stride=1, padding=0, dilation=1, groups=1, number=100, dev=0):
return tvm_generic(N, H, W, C, kernel_size, K, stride=stride, padding=padding, dilation=dilation, groups=groups,
number=number, dev=dev, timeout=timeout, target="llvm", trials=trials)
return _inner
def tvm_generic(N, H, W, C, kernel_size, K, stride=1, padding=0, dilation=1, groups=1,
number=100, dev=0, timeout=4, target="llvm", trials=100):
data_shape = (N, C, H, W)
data = relay.var("data", shape=data_shape, dtype="float32")
kernel_size = (kernel_size, kernel_size)
stride = (stride, stride)
padding = (padding, padding)
body = layers.conv2d(data=data, channels=K, kernel_size=kernel_size, strides=stride, padding=padding, name="conv2d")
op = relay.Function(relay.ir_pass.free_vars(body), body)
sym, params = create_workload(op)
tasks = autotvm.task.extract_from_program(op, target=target, params=params, ops=(relay.op.nn.conv2d,))
tuning_option = {
"log_filename": "tvm_baseline_{}.log".format((N, C, H, W, K, kernel_size, stride, padding, dilation, groups)),
"tuner": "xgb",
"early_stopping": 30,
"measure_option": autotvm.measure_option(
builder=autotvm.LocalBuilder(timeout=timeout),
runner=autotvm.LocalRunner(number=number, repeat=1, timeout=timeout, min_repeat_ms=150),
# runner=autotvm.RPCRunner(
# '1080ti', # change the device key to your key
# '0.0.0.0', 9190,
# number=20, repeat=3, timeout=4, min_repeat_ms=150)
),
}
log_filename = tuning_option["log_filename"]
tuner = tuning_option["tuner"]
early_stopping = tuning_option["early_stopping"]
measure_option = tuning_option["measure_option"]
# only support one task
assert len(tasks) == 1
for i, task in enumerate(tasks):
prefix = "[Task %2d/%2d] " % (i + 1, len(tasks))
# create tuner
if tuner == 'xgb' or tuner == 'xgb-rank':
tuner_obj = XGBTuner(task, loss_type='rank')
elif tuner == 'ga':
tuner_obj = GATuner(task, pop_size=100)
elif tuner == 'random':
tuner_obj = RandomTuner(task)
elif tuner == 'gridsearch':
tuner_obj = GridSearchTuner(task)
else:
raise ValueError("Invalid tuner: " + tuner)
# do tuning
n_trial = trials
length = len(task.config_space)
print("config space length=", length)
# tuner_obj.tune(n_trial=min(n_trial, length),
# early_stopping=early_stopping,
# measure_option=measure_option,
# callbacks=[
# autotvm.callback.progress_bar(n_trial, prefix=prefix),
# autotvm.callback.log_to_file(log_filename)])
if not os.path.exists(log_filename):
raise RuntimeError("the log file {} doesn't exists".format(log_filename))
with autotvm.apply_history_best(log_filename):
with relay.build_config(opt_level=3):
graph, lib, params = relay.build_module.build(op, target=target, params=params)
ctx = tvm.context(str(target), 0)
data_tvm = tvm.nd.array((np.random.uniform(size=data_shape)).astype("float32"))
module = runtime.create(graph, lib, ctx)
module.set_input("data", data_tvm)
module.set_input(**params)
# evaluate
ftimer = module.module.time_evaluator("run", ctx, number=number, repeat=1)
prof_res = np.array(ftimer().results) * 1e3
return prof_res
def schedule_direct_cuda(cfg, s, conv):
"""schedule optimized for batch size = 1"""
##### space definition begin #####
n, f, y, x = s[conv].op.axis
rc, ry, rx = s[conv].op.reduce_axis
cfg.define_split("tile_f", f, num_outputs=4)
cfg.define_split("tile_y", y, num_outputs=4)
cfg.define_split("tile_x", x, num_outputs=4)
cfg.define_split("tile_rc", rc, num_outputs=2)
cfg.define_split("tile_ry", ry, num_outputs=2)
cfg.define_split("tile_rx", rx, num_outputs=2)
cfg.define_knob("auto_unroll_max_step", [0, 512, 1500])
target = tvm.target.current_target()
if target.target_name in ['nvptx', 'rocm']:
cfg.define_knob("unroll_explicit", [1])
else:
cfg.define_knob("unroll_explicit", [0, 1])
# fallback support
if cfg.is_fallback:
ref_log = autotvm.tophub.load_reference_log(
target.target_name, target.model, 'conv2d', 'direct')
cfg.fallback_with_reference_log(ref_log)
##### space definition end #####
pad_data, kernel = s[conv].op.input_tensors
s[pad_data].compute_inline()
if isinstance(kernel.op, tvm.tensor.ComputeOp) and 'dilate' in kernel.op.tag:
s[kernel].compute_inline()
if conv.op in s.outputs:
output = conv
OL = s.cache_write(conv, 'local')
else:
output = s.outputs[0].output(0)
s[conv].set_scope('local')
OL = conv
# create cache stage
AA = s.cache_read(pad_data, 'shared', [OL])
WW = s.cache_read(kernel, 'shared', [OL])
# tile and bind spatial axes
n, f, y, x = s[output].op.axis
kernel_scope, n = s[output].split(n, nparts=1)
bf, vf, tf, fi = cfg["tile_f"].apply(s, output, f)
by, vy, ty, yi = cfg["tile_y"].apply(s, output, y)
bx, vx, tx, xi = cfg["tile_x"].apply(s, output, x)
bf = s[output].fuse(n, bf)
s[output].bind(bf, tvm.thread_axis("blockIdx.z"))
s[output].bind(by, tvm.thread_axis("blockIdx.y"))
s[output].bind(bx, tvm.thread_axis("blockIdx.x"))
s[output].bind(vf, tvm.thread_axis("vthread"))
s[output].bind(vy, tvm.thread_axis("vthread"))
s[output].bind(vx, tvm.thread_axis("vthread"))
s[output].bind(tf, tvm.thread_axis("threadIdx.z"))
s[output].bind(ty, tvm.thread_axis("threadIdx.y"))
s[output].bind(tx, tvm.thread_axis("threadIdx.x"))
s[output].reorder(bf, by, bx, vf, vy, vx, tf, ty, tx, fi, yi, xi)
s[OL].compute_at(s[output], tx)
# tile reduction axes
n, f, y, x = s[OL].op.axis
rc, ry, rx = s[OL].op.reduce_axis
rco, rci = cfg['tile_rc'].apply(s, OL, rc)
ryo, ryi = cfg['tile_rx'].apply(s, OL, ry)
rxo, rxi = cfg['tile_ry'].apply(s, OL, rx)
s[OL].reorder(rco, ryo, rxo, rci, ryi, rxi, n, f, y, x)
s[AA].compute_at(s[OL], rxo)
s[WW].compute_at(s[OL], rxo)
# cooperative fetching
for load in [AA, WW]:
n, f, y, x = s[load].op.axis
fused = s[load].fuse(n, f, y, x)
tz, fused = s[load].split(fused, nparts=cfg["tile_f"].size[2])
ty, fused = s[load].split(fused, nparts=cfg["tile_y"].size[2])
tx, fused = s[load].split(fused, nparts=cfg["tile_x"].size[2])
s[load].bind(tz, tvm.thread_axis("threadIdx.z"))
s[load].bind(ty, tvm.thread_axis("threadIdx.y"))
s[load].bind(tx, tvm.thread_axis("threadIdx.x"))
# unroll
s[output].pragma(kernel_scope, 'auto_unroll_max_step', cfg['auto_unroll_max_step'].val)
s[output].pragma(kernel_scope, 'unroll_explicit', cfg['unroll_explicit'].val)
N, CO, OH, OW = get_const_tuple(output.shape)
_, KH, KW, CI = get_const_tuple(kernel.shape)
cfg.add_flop(2 * N * OH * OW * CO * CI * KH * KW)
@autotvm.template
def conv2d_nchw(N, H, W, CO, CI, KH, KW, stride, padding, dilation):
# assert N == 1, "Only consider batch_size = 1 in this template"
data = tvm.placeholder((N, CI, H, W), name='data')
kernel = tvm.placeholder((CO, CI, KH, KW), name='kernel')
conv = topi.nn.conv2d_nchw(data, kernel, stride, padding, dilation=dilation, out_dtype='float32')
s = tvm.create_schedule([conv.op])
cfg = autotvm.get_config()
##### space definition begin #####
schedule_direct_cuda(cfg, s, conv)
return s, [data, kernel, conv]
logging.getLogger('autotvm').setLevel(logging.DEBUG)
logging.getLogger('autotvm').addHandler(logging.StreamHandler(sys.stdout))
def run(name, N, H, W, CO, CI, KH, KW, stride, pad, dilation, trials=100, timeout=4, number=10, target="llvm", dev=0, tune=True):
N, H, W, CO, CI, KH, KW, strides, padding = N, H, W, CO, CI, KH, KW, (stride, stride), (pad, pad)
task = autotvm.task.create(conv2d_nchw,
args=(N, H, W, CO, CI, KH, KW, strides, padding, dilation),
target=target)
print("config_space length:", len(task.config_space))
logfile = "conv2d_" + name + "_{}".format((N, CI, H, W, CO, KH, KW, stride, pad, dilation)) + ".log"
# Use local gpu, measure 10 times for every config to reduce variance
# The timeout of compiling a program is 10 seconds, the timeout for running is 4 seconds
measure_option = autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.LocalRunner(number=number, repeat=1, min_repeat_ms=150, timeout=timeout)
)
# Begin tuning, log records to file `conv2d.log`
# During tuning we will also try many invalid configs, so you are expected to
# see many error reports. As long as you can see non-zero GFLOPS, it is okay.
tuner = autotvm.tuner.XGBTuner(task)
beg = time.time()
print("Tune: ", tune)
if tune:
tuner.tune(n_trial=trials,
measure_option=measure_option,
callbacks=[autotvm.callback.log_to_file(logfile)])
end = time.time()
#########################################################################
# Finally we can inspect the best config from log file, check correctness,
# and measure running time.
# inspect the best config
dispatch_context = autotvm.apply_history_best(logfile)
best_config = dispatch_context.query(task.target, task.workload)
print("Optimize use ", end - beg, "s")
print("\nBest config:")
print(best_config)
# apply history best from log file
with autotvm.apply_history_best(logfile):
with tvm.target.create(target):
s, arg_bufs = conv2d_nchw(N, H, W, CO, CI, KH, KW, strides, padding, dilation)
# print(tvm.lower(s, arg_bufs, simple_mode=True))
func = tvm.build(s, arg_bufs, "cuda")
print(func.imported_modules[0].get_source())
func = tvm.build(s, arg_bufs)
# check correctness
a_np = np.random.uniform(size=(N, CI, H, W)).astype(np.float32)
w_np = np.random.uniform(size=(CO, CI, KH, KW)).astype(np.float32)
# c_np = conv2d_nchw_python(a_np, w_np, strides, padding)
ctx = tvm.context(str(target), dev)
a_tvm = tvm.nd.array(a_np, ctx=ctx)
w_tvm = tvm.nd.array(w_np, ctx=ctx)
c_tvm = tvm.nd.empty((N, CO, (H + 2 * pad - dilation * (KH - 1) - 1) // stride + 1, (W + 2 * pad - dilation * (KW - 1) - 1) // stride + 1), ctx=ctx)
# func(a_tvm, w_tvm, c_tvm)
# tvm.testing.assert_allclose(c_np, c_tvm.asnumpy(), rtol=1e-2)
# Evaluate running time. Here we choose a large repeat number (400) to reduce the noise
# and the overhead of kernel launch. You can also use nvprof to validate the result.
evaluator = func.time_evaluator(func.entry_name, ctx, number=number)
cost = evaluator(a_tvm, w_tvm, c_tvm).mean * 1e3
return cost
def tvm_opt_cuda(name, trials=100, timeout=4, tune=True):
def _inner(N, H, W, C, kernel_size, K, stride=1, padding=0, dilation=1, groups=1, number=100, dev=0):
return run(name, N, H, W, K, C, kernel_size, kernel_size, stride, padding, dilation,
trials=trials, timeout=timeout, number=number, target="cuda", dev=dev, tune=tune)
return _inner
def tvm_opt_llvm(name, trials=100, timeout=4, tune=True):
def _inner(N, H, W, C, kernel_size, K, stride=1, padding=0, dilation=1, groups=1, number=100, dev=0):
return run(name, N, H, W, K, C, kernel_size, kernel_size, stride, padding, dilation,
trials=trials, timeout=timeout, number=number, target="llvm", dev=dev, tune=tune)
return _inner
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--shapes", help="Use which shapes [yolo, google, res, squeeze, vgg-16, vgg-19]", type=str, default="yolo")
parser.add_argument("-f", "--from_", help="From which shape", type=int, default=0)
parser.add_argument("-t", "--to", help="To which shape", type=int, default=-1)
parser.add_argument("-n", "--number", help="number test run", type=int, default=10)
parser.add_argument("--target", help="target device type", type=str, default="llvm")
parser.add_argument("--device", help="target device number", type=int, default=0)
parser.add_argument("--type", help="type of baseline", type=str, default="pytorch")
parser.add_argument("--timeout", help="timeout of baseline", type=int, default=4)
parser.add_argument("--trials", type=int, default=100)
parser.add_argument("--tune", action="store_true")
parser.add_argument("--batch", type=int, default=1, help="Specify the batch size of the shape")
args = parser.parse_args()
shapes = shape_dict[args.shapes]
if args.to < 0:
end = len(shapes)
else:
end = args.to
shapes = shapes[args.from_:end]
print("Changing batch size to ", args.batch)
for i in range(len(shapes)):
shapes[i] = list(shapes[i])
shapes[i][0] = args.batch
shapes[i] = tuple(shapes[i])
if args.type == "pytorch":
if args.target == "cuda":
baseline = pytorch_cuda
elif args.target == "llvm":
baseline = pytorch_cpu
else:
raise RuntimeError("Only support target 'llvm' and 'cuda', but got %s"%args.target)
elif args.type == "tvm_generic":
if args.target == "cuda":
baseline = tvm_generic_cuda(timeout=args.timeout, trials=args.trials)
elif args.target == "llvm":
baseline = tvm_generic_llvm(timeout=args.timeout, trials=args.trials)
else:
raise RuntimeError("Only support target 'llvm' and 'cuda', but got %s"%args.target)
elif args.type == "tvm_opt":
if args.target == "cuda":
baseline = tvm_opt_cuda(args.shapes, timeout=args.timeout, trials=args.trials, tune=args.tune)
elif args.target == "llvm":
baseline = tvm_opt_llvm(args.shapes, timeout=args.timeout, trials=args.trials, tune=args.tune)
else:
raise RuntimeError("Only support target 'llvm' and 'cuda', but got %s"%args.target)
else:
raise RuntimeError("Only implement pytorch baseline now, no '%s' baseline"%args.type)
print("%s baselines for %s convolution 2d for target %s (%d):" % (args.type, args.shapes, args.target, args.device))
for i, shape in enumerate(shapes):
count = i + args.from_
print("layer", count, shape)
batch, in_channel, height, width, out_channel, _, k_h, k_w, _, stride, padding, dilation, groups = shape
batch = args.batch
cost = baseline(batch, height, width, in_channel, k_h, out_channel, stride=stride, padding=padding, dilation=dilation, groups=groups, number=args.number, dev=args.device)
print("Use %f(ms)" % cost)
print()
print("Done!")
| 43.622642 | 188 | 0.629974 |
acec4663ca482cd7b54dc800aa0459483af62296 | 134 | py | Python | 6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/production/penPosition.py | vishalbelsare/SLAPP3 | da187b771831aaaabaee16a26ad341db2e968104 | [
"CC0-1.0"
] | 8 | 2017-10-18T05:19:17.000Z | 2020-03-24T21:23:52.000Z | 6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/production/penPosition.py | vishalbelsare/SLAPP3 | da187b771831aaaabaee16a26ad341db2e968104 | [
"CC0-1.0"
] | null | null | null | 6 objectSwarmObserverAgents_AESOP_turtleLib_NetworkX/production/penPosition.py | vishalbelsare/SLAPP3 | da187b771831aaaabaee16a26ad341db2e968104 | [
"CC0-1.0"
] | 4 | 2017-10-25T09:07:49.000Z | 2019-08-18T09:17:58.000Z | # penPosition.py related to the projects with or without the use of a pen
# into the display
def setPen(address):
address.p = 0
| 19.142857 | 73 | 0.723881 |
acec468e9f4b787c063d933d1439c7df906c2704 | 194 | py | Python | uploads/core/forms.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 138 | 2016-08-24T09:51:52.000Z | 2022-02-02T14:43:48.000Z | uploads/core/forms.py | bopopescu/searchparty | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | [
"Apache-2.0"
] | 8 | 2017-06-27T06:45:32.000Z | 2021-07-04T15:35:07.000Z | uploads/core/forms.py | jasonravi/ankit_projetc | 95f20ac92f262b51118fe329cfe1d4c83711d273 | [
"MIT"
] | 163 | 2016-09-27T07:07:28.000Z | 2022-02-19T16:22:24.000Z | from django import forms
from uploads.core.models import Document
class DocumentForm(forms.ModelForm):
class Meta:
model = Document
fields = ('description', 'document', )
| 19.4 | 46 | 0.690722 |
acec469384fc5e56541dd23388cf7404abc6db07 | 186 | py | Python | walkthrough/donut.py | lightstep/python-opentelemetry-walkthrough | a9ad6789baf56b2f881fcdbb11b36380d36b8099 | [
"MIT"
] | 7 | 2019-12-17T15:26:42.000Z | 2020-10-21T20:38:04.000Z | walkthrough/donut.py | kavehhh/python-example | f00523fb4e2bfd5532f5e7ca7c762c0f24b30c2d | [
"MIT"
] | 8 | 2019-10-07T17:05:45.000Z | 2020-10-26T22:33:55.000Z | walkthrough/donut.py | kavehhh/python-example | f00523fb4e2bfd5532f5e7ca7c762c0f24b30c2d | [
"MIT"
] | 2 | 2019-10-18T19:13:29.000Z | 2020-04-15T06:51:33.000Z | class Donut(object):
def __init__(self, flavor, order_id, status):
self.flavor = flavor
self.order_id = order_id
self.status = status
__all__ = ['Donut']
| 16.909091 | 49 | 0.61828 |
acec4766947a2ead132472b6d6b1084e05d56208 | 1,404 | py | Python | tf_solution/r03_create_csv_for_tensorflow.py | MinliangLin/Keras-RetinaNet-for-Open-Images-Challenge-2018 | e9a1e0e463fd9792d839cba3d950d75ec6641b75 | [
"MIT"
] | 287 | 2018-08-31T20:13:07.000Z | 2022-01-27T23:50:59.000Z | tf_solution/r03_create_csv_for_tensorflow.py | MinliangLin/Keras-RetinaNet-for-Open-Images-Challenge-2018 | e9a1e0e463fd9792d839cba3d950d75ec6641b75 | [
"MIT"
] | 26 | 2019-02-01T07:55:18.000Z | 2022-02-10T00:00:46.000Z | tf_solution/r03_create_csv_for_tensorflow.py | MinliangLin/Keras-RetinaNet-for-Open-Images-Challenge-2018 | e9a1e0e463fd9792d839cba3d950d75ec6641b75 | [
"MIT"
] | 88 | 2018-09-01T17:51:24.000Z | 2021-08-10T03:10:13.000Z | # coding: utf-8
__author__ = 'ZFTurbo: https://kaggle.com/zfturbo'
if __name__ == '__main__':
import os
gpu_use = 0
print('GPU use: {}'.format(gpu_use))
os.environ["KERAS_BACKEND"] = "tensorflow"
os.environ["CUDA_VISIBLE_DEVICES"] = "{}".format(gpu_use)
from a00_utils_and_constants import *
from .r02_validation_with_tesnorflow import get_class_name_mappings
def create_csv_for_tf_predictions(input_dir, out_file):
out = open(out_file, 'w')
out.write('ImageId,PredictionString\n')
tc1, tc2 = get_class_name_mappings()
files = glob.glob(input_dir + '*.pklz')
for f in files:
id = os.path.basename(f)[:-5]
scale, output_dict = load_from_file(f)
num_detections = output_dict['num_detections']
classes = output_dict['detection_classes']
boxes = output_dict['detection_boxes']
scores = output_dict['detection_scores']
out.write(id + ',')
for i in range(num_detections):
label = tc1[classes[i]]
xmin = boxes[i][1]
ymin = boxes[i][0]
xmax = boxes[i][3]
ymax = boxes[i][2]
out.write('{} {} {} {} {} {} '.format(label, scores[i], xmin, ymin, xmax, ymax))
out.write('\n')
if __name__ == '__main__':
create_csv_for_tf_predictions(OUTPUT_PATH + 'cache_tensorflow/', SUBM_PATH + 'tf_pretrained_model_kaggle_test.csv')
| 31.909091 | 119 | 0.633191 |
acec487d7d987109cec34c21f7eb91247573b649 | 609 | py | Python | py_abac/policy/conditions/collection/any_in.py | sarthakgupta072/py-abac | 7afcec37adc361a8c177c98044481aa5d2696303 | [
"Apache-2.0"
] | 47 | 2019-10-17T03:23:01.000Z | 2022-02-26T03:27:03.000Z | py_abac/policy/conditions/collection/any_in.py | sarthakgupta072/py-abac | 7afcec37adc361a8c177c98044481aa5d2696303 | [
"Apache-2.0"
] | 17 | 2020-02-24T02:17:34.000Z | 2022-02-15T20:11:19.000Z | py_abac/policy/conditions/collection/any_in.py | sarthakgupta072/py-abac | 7afcec37adc361a8c177c98044481aa5d2696303 | [
"Apache-2.0"
] | 10 | 2019-12-24T04:13:21.000Z | 2021-08-01T08:21:41.000Z | """
Any of the values in collection conditions
"""
from marshmallow import post_load
from .base import CollectionCondition, CollectionConditionSchema
class AnyIn(CollectionCondition):
"""
Condition for any value of `what` in `values`
"""
def _is_satisfied(self, what) -> bool:
return bool(set(what).intersection(self.values))
class AnyInSchema(CollectionConditionSchema):
"""
JSON schema for any in collection condition
"""
@post_load
def post_load(self, data, **_): # pylint: disable=missing-docstring,no-self-use
return AnyIn(**data)
| 22.555556 | 84 | 0.681445 |
acec48cd8051aff4336840d61297f1a190bc1bae | 35,681 | py | Python | python/taichi/lang/__init__.py | mfkiwl/taichi | 899707f46a1b7f28efd927122104d59f02f7f5ec | [
"MIT"
] | null | null | null | python/taichi/lang/__init__.py | mfkiwl/taichi | 899707f46a1b7f28efd927122104d59f02f7f5ec | [
"MIT"
] | null | null | null | python/taichi/lang/__init__.py | mfkiwl/taichi | 899707f46a1b7f28efd927122104d59f02f7f5ec | [
"MIT"
] | null | null | null | import functools
import os
from copy import deepcopy as _deepcopy
from taichi.core.util import ti_core as _ti_core
from taichi.lang import impl
from taichi.lang.exception import InvalidOperationError
from taichi.lang.impl import *
from taichi.lang.kernel_arguments import ext_arr, template
from taichi.lang.kernel_impl import (KernelArgError, KernelDefError,
data_oriented, func, kernel, pyfunc)
from taichi.lang.matrix import Matrix, Vector
from taichi.lang.ndrange import GroupedNDRange, ndrange
from taichi.lang.ops import *
from taichi.lang.quant_impl import quant
from taichi.lang.runtime_ops import async_flush, sync
from taichi.lang.transformer import TaichiSyntaxError
from taichi.lang.type_factory_impl import type_factory
from taichi.lang.util import (has_pytorch, is_taichi_class, python_scope,
taichi_scope, to_numpy_type, to_pytorch_type,
to_taichi_type)
from taichi.misc.util import deprecated
from taichi.snode.fields_builder import FieldsBuilder
import taichi as ti
# TODO(#2223): Remove
core = _ti_core
runtime = impl.get_runtime()
i = indices(0)
j = indices(1)
k = indices(2)
l = indices(3)
ij = indices(0, 1)
ji = indices(1, 0)
jk = indices(1, 2)
kj = indices(2, 1)
ik = indices(0, 2)
ki = indices(2, 0)
ijk = indices(0, 1, 2)
ijkl = indices(0, 1, 2, 3)
outer_product = deprecated('ti.outer_product(a, b)',
'a.outer_product(b)')(Matrix.outer_product)
cross = deprecated('ti.cross(a, b)', 'a.cross(b)')(Matrix.cross)
dot = deprecated('ti.dot(a, b)', 'a.dot(b)')(Matrix.dot)
normalized = deprecated('ti.normalized(a)',
'a.normalized()')(Matrix.normalized)
cfg = default_cfg()
x86_64 = _ti_core.x64
x64 = _ti_core.x64
arm64 = _ti_core.arm64
cuda = _ti_core.cuda
metal = _ti_core.metal
opengl = _ti_core.opengl
cc = _ti_core.cc
wasm = _ti_core.wasm
vulkan = _ti_core.vulkan
gpu = [cuda, metal, opengl, vulkan]
cpu = _ti_core.host_arch()
timeline_clear = lambda: impl.get_runtime().prog.timeline_clear()
timeline_save = lambda fn: impl.get_runtime().prog.timeline_save(fn)
# Legacy API
type_factory_ = _ti_core.get_type_factory_instance()
@deprecated('kernel_profiler_print()', 'print_kernel_profile_info()')
def kernel_profiler_print():
return print_kernel_profile_info()
def print_kernel_profile_info():
"""Print the elapsed time(min,max,avg) of Taichi kernels on devices.
To enable this profiler, set `kernel_profiler=True` in `ti.init`.
Example::
>>> import taichi as ti
>>> ti.init(ti.cpu, kernel_profiler=True)
>>> var = ti.field(ti.f32, shape=1)
>>> @ti.kernel
>>> def compute():
>>> var[0] = 1.0
>>> compute()
>>> ti.print_kernel_profile_info() #[1]
Note:
[1] Currently the result of `KernelProfiler` could be incorrect on OpenGL
backend due to its lack of support for `ti.sync()`.
"""
impl.get_runtime().prog.print_kernel_profile_info()
def query_kernel_profile_info(name):
"""Query kernel elapsed time(min,avg,max) on devices using the kernel name.
To enable this profiler, set `kernel_profiler=True` in `ti.init`.
Args:
name (str): kernel name.
Returns:
struct KernelProfilerQueryResult with member varaibles(counter, min, max, avg)
Example::
>>> import taichi as ti
>>> ti.init(ti.cpu, kernel_profiler=True)
>>> n = 1024*1024
>>> var = ti.field(ti.f32, shape=n)
>>> @ti.kernel
>>> def fill():
>>> for i in range(n):
>>> var[i] = 0.1
>>> fill()
>>> ti.clear_kernel_profile_info() #[1]
>>> for i in range(100):
>>> fill()
>>> query_result = ti.query_kernel_profile_info(fill.__name__) #[2]
>>> print("kernel excuted times =",query_result.counter)
>>> print("kernel elapsed time(min_in_ms) =",query_result.min)
>>> print("kernel elapsed time(max_in_ms) =",query_result.max)
>>> print("kernel elapsed time(avg_in_ms) =",query_result.avg)
Note:
[1] To get the correct result, query_kernel_profile_info() must be used in conjunction with
clear_kernel_profile_info().
[2] Currently the result of `KernelProfiler` could be incorrect on OpenGL
backend due to its lack of support for `ti.sync()`.
"""
return impl.get_runtime().prog.query_kernel_profile_info(name)
@deprecated('kernel_profiler_clear()', 'clear_kernel_profile_info()')
def kernel_profiler_clear():
return clear_kernel_profile_info()
def clear_kernel_profile_info():
"""
Clear all KernelProfiler records.
"""
impl.get_runtime().prog.clear_kernel_profile_info()
def kernel_profiler_total_time():
"""
Get elapsed time of all kernels recorded in KernelProfiler.
Returns:
time (double): total time in second
"""
return impl.get_runtime().prog.kernel_profiler_total_time()
@deprecated('memory_profiler_print()', 'print_memory_profile_info()')
def memory_profiler_print():
return print_memory_profile_info()
def print_memory_profile_info():
"""Memory profiling tool for LLVM backends with full sparse support.
This profiler is automatically on.
"""
impl.get_runtime().materialize()
impl.get_runtime().prog.print_memory_profiler_info()
extension = _ti_core.Extension
def is_extension_supported(arch, ext):
"""Checks whether an extension is supported on an arch.
Args:
arch (taichi_core.Arch): Specified arch.
ext (taichi_core.Extension): Specified extension.
Returns:
bool: Whether `ext` is supported on `arch`.
"""
return _ti_core.is_extension_supported(arch, ext)
def reset():
impl.reset()
global runtime
runtime = impl.get_runtime()
class _EnvironmentConfigurator:
def __init__(self, kwargs, cfg):
self.cfg = cfg
self.kwargs = kwargs
self.keys = []
def add(self, key, cast=None):
cast = cast or self.bool_int
self.keys.append(key)
# TI_ASYNC= : no effect
# TI_ASYNC=0 : False
# TI_ASYNC=1 : True
name = 'TI_' + key.upper()
value = os.environ.get(name, '')
if len(value):
self[key] = cast(value)
if key in self.kwargs:
_ti_core.warn(
f'ti.init argument "{key}" overridden by environment variable {name}={value}'
)
del self.kwargs[key] # mark as recognized
elif key in self.kwargs:
self[key] = self.kwargs[key]
del self.kwargs[key] # mark as recognized
def __getitem__(self, key):
return getattr(self.cfg, key)
def __setitem__(self, key, value):
setattr(self.cfg, key, value)
@staticmethod
def bool_int(x):
return bool(int(x))
class _SpecialConfig:
# like CompileConfig in C++, this is the configurations that belong to other submodules
def __init__(self):
self.print_preprocessed = False
self.log_level = 'info'
self.gdb_trigger = False
self.excepthook = False
self.experimental_real_function = False
def init(arch=None,
default_fp=None,
default_ip=None,
_test_mode=False,
**kwargs):
# Make a deepcopy in case these args reference to items from ti.cfg, which are
# actually references. If no copy is made and the args are indeed references,
# ti.reset() could override the args to their default values.
default_fp = _deepcopy(default_fp)
default_ip = _deepcopy(default_ip)
kwargs = _deepcopy(kwargs)
ti.reset()
spec_cfg = _SpecialConfig()
env_comp = _EnvironmentConfigurator(kwargs, ti.cfg)
env_spec = _EnvironmentConfigurator(kwargs, spec_cfg)
# configure default_fp/ip:
# TODO: move these stuff to _SpecialConfig too:
env_default_fp = os.environ.get("TI_DEFAULT_FP")
if env_default_fp:
if default_fp is not None:
_ti_core.warn(
f'ti.init argument "default_fp" overridden by environment variable TI_DEFAULT_FP={env_default_fp}'
)
if env_default_fp == '32':
default_fp = ti.f32
elif env_default_fp == '64':
default_fp = ti.f64
elif env_default_fp is not None:
raise ValueError(
f'Invalid TI_DEFAULT_FP={env_default_fp}, should be 32 or 64')
env_default_ip = os.environ.get("TI_DEFAULT_IP")
if env_default_ip:
if default_ip is not None:
_ti_core.warn(
f'ti.init argument "default_ip" overridden by environment variable TI_DEFAULT_IP={env_default_ip}'
)
if env_default_ip == '32':
default_ip = ti.i32
elif env_default_ip == '64':
default_ip = ti.i64
elif env_default_ip is not None:
raise ValueError(
f'Invalid TI_DEFAULT_IP={env_default_ip}, should be 32 or 64')
if default_fp is not None:
impl.get_runtime().set_default_fp(default_fp)
if default_ip is not None:
impl.get_runtime().set_default_ip(default_ip)
# submodule configurations (spec_cfg):
env_spec.add('print_preprocessed')
env_spec.add('log_level', str)
env_spec.add('gdb_trigger')
env_spec.add('excepthook')
env_spec.add('experimental_real_function')
# compiler configurations (ti.cfg):
for key in dir(ti.cfg):
if key in ['arch', 'default_fp', 'default_ip']:
continue
cast = type(getattr(ti.cfg, key))
if cast is bool:
cast = None
env_comp.add(key, cast)
unexpected_keys = kwargs.keys()
if len(unexpected_keys):
raise KeyError(
f'Unrecognized keyword argument(s) for ti.init: {", ".join(unexpected_keys)}'
)
# dispatch configurations that are not in ti.cfg:
if not _test_mode:
ti.set_gdb_trigger(spec_cfg.gdb_trigger)
impl.get_runtime().print_preprocessed = spec_cfg.print_preprocessed
impl.get_runtime().experimental_real_function = \
spec_cfg.experimental_real_function
ti.set_logging_level(spec_cfg.log_level.lower())
if spec_cfg.excepthook:
# TODO(#1405): add a way to restore old excepthook
ti.enable_excepthook()
# select arch (backend):
env_arch = os.environ.get('TI_ARCH')
if env_arch is not None:
ti.info(f'Following TI_ARCH setting up for arch={env_arch}')
arch = _ti_core.arch_from_name(env_arch)
ti.cfg.arch = adaptive_arch_select(arch)
print(f'[Taichi] Starting on arch={_ti_core.arch_name(ti.cfg.arch)}')
if _test_mode:
return spec_cfg
# create a new program:
impl.get_runtime().create_program()
ti.trace('Materializing runtime...')
impl.get_runtime().prog.materialize_runtime()
impl._root_fb = FieldsBuilder()
def no_activate(*args):
for v in args:
_ti_core.no_activate(v.snode.ptr)
def block_local(*args):
if ti.current_cfg().dynamic_index:
raise InvalidOperationError(
'dynamic_index is not allowed when block_local is turned on.')
for a in args:
for v in a.get_field_members():
_ti_core.insert_snode_access_flag(
_ti_core.SNodeAccessFlag.block_local, v.ptr)
@deprecated('ti.cache_shared', 'ti.block_local')
def cache_shared(*args):
block_local(*args)
def cache_read_only(*args):
for a in args:
for v in a.get_field_members():
_ti_core.insert_snode_access_flag(
_ti_core.SNodeAccessFlag.read_only, v.ptr)
def assume_in_range(val, base, low, high):
return _ti_core.expr_assume_in_range(
Expr(val).ptr,
Expr(base).ptr, low, high)
def loop_unique(val, covers=None):
if covers is None:
covers = []
if not isinstance(covers, (list, tuple)):
covers = [covers]
covers = [x.snode.ptr if isinstance(x, Expr) else x.ptr for x in covers]
return _ti_core.expr_loop_unique(Expr(val).ptr, covers)
parallelize = _ti_core.parallelize
serialize = lambda: parallelize(1)
vectorize = _ti_core.vectorize
bit_vectorize = _ti_core.bit_vectorize
block_dim = _ti_core.block_dim
inversed = deprecated('ti.inversed(a)', 'a.inverse()')(Matrix.inversed)
transposed = deprecated('ti.transposed(a)', 'a.transpose()')(Matrix.transposed)
def polar_decompose(A, dt=None):
"""Perform polar decomposition (A=UP) for arbitrary size matrix.
Mathematical concept refers to https://en.wikipedia.org/wiki/Polar_decomposition.
This is only a wrapper for :func:`taichi.lang.linalg.polar_decompose`.
Args:
A (ti.Matrix(n, n)): input nxn matrix `A`.
dt (DataType): date type of elements in matrix `A`, typically accepts ti.f32 or ti.f64.
Returns:
Decomposed nxn matrices `U` and `P`.
"""
if dt is None:
dt = impl.get_runtime().default_fp
from .linalg import polar_decompose
return polar_decompose(A, dt)
def svd(A, dt=None):
"""Perform singular value decomposition (A=USV^T) for arbitrary size matrix.
Mathematical concept refers to https://en.wikipedia.org/wiki/Singular_value_decomposition.
This is only a wrappers for :func:`taichi.lang.linalg.svd`.
Args:
A (ti.Matrix(n, n)): input nxn matrix `A`.
dt (DataType): date type of elements in matrix `A`, typically accepts ti.f32 or ti.f64.
Returns:
Decomposed nxn matrices `U`, 'S' and `V`.
"""
if dt is None:
dt = impl.get_runtime().default_fp
from .linalg import svd
return svd(A, dt)
def eig(A, dt=None):
"""Compute the eigenvalues and right eigenvectors of a real matrix.
Mathematical concept refers to https://en.wikipedia.org/wiki/Eigendecomposition_of_a_matrix.
2D implementation refers to :func:`taichi.lang.linalg.eig2x2`.
Args:
A (ti.Matrix(n, n)): 2D Matrix for which the eigenvalues and right eigenvectors will be computed.
dt (DataType): The datatype for the eigenvalues and right eigenvectors.
Returns:
eigenvalues (ti.Matrix(n, 2)): The eigenvalues in complex form. Each row stores one eigenvalue. The first number of the eigenvalue represents the real part and the second number represents the imaginary part.
eigenvectors (ti.Matrix(n*2, n)): The eigenvectors in complex form. Each column stores one eigenvector. Each eigenvector consists of n entries, each of which is represented by two numbers for its real part and imaginary part.
"""
if dt is None:
dt = impl.get_runtime().default_fp
from taichi.lang import linalg
if A.n == 2:
return linalg.eig2x2(A, dt)
raise Exception("Eigen solver only supports 2D matrices.")
def sym_eig(A, dt=None):
"""Compute the eigenvalues and right eigenvectors of a real symmetric matrix.
Mathematical concept refers to https://en.wikipedia.org/wiki/Eigendecomposition_of_a_matrix.
2D implementation refers to :func:`taichi.lang.linalg.sym_eig2x2`.
Args:
A (ti.Matrix(n, n)): Symmetric Matrix for which the eigenvalues and right eigenvectors will be computed.
dt (DataType): The datatype for the eigenvalues and right eigenvectors.
Returns:
eigenvalues (ti.Vector(n)): The eigenvalues. Each entry store one eigen value.
eigenvectors (ti.Matrix(n, n)): The eigenvectors. Each column stores one eigenvector.
"""
assert all(A == A.transpose()), "A needs to be symmetric"
if dt is None:
dt = impl.get_runtime().default_fp
from taichi.lang import linalg
if A.n == 2:
return linalg.sym_eig2x2(A, dt)
raise Exception("Symmetric eigen solver only supports 2D matrices.")
def randn(dt=None):
"""Generates a random number from standard normal distribution.
Implementation refers to :func:`taichi.lang.random.randn`.
Args:
dt (DataType): The datatype for the generated random number.
Returns:
The generated random number.
"""
if dt is None:
dt = impl.get_runtime().default_fp
from .random import randn
return randn(dt)
determinant = deprecated('ti.determinant(a)',
'a.determinant()')(Matrix.determinant)
tr = deprecated('ti.tr(a)', 'a.trace()')(Matrix.trace)
def Tape(loss, clear_gradients=True):
"""Return a context manager of :class:`~taichi.lang.tape.TapeImpl`. The
context manager would catching all of the callings of functions that
decorated by :func:`~taichi.lang.kernel_impl.kernel` or
:func:`~taichi.lang.complex_kernel` under `with` statement, and calculate
all the partial gradients of a given loss variable by calling all of the
gradient function of the callings caught in reverse order while `with`
statement ended.
See also :func:`~taichi.lang.kernel_impl.kernel` and
:func:`~taichi.lang.complex_kernel` for gradient functions.
Args:
loss(:class:`~taichi.lang.expr.Expr`): The loss field, which shape should be ().
clear_gradients(Bool): Before `with` body start, clear all gradients or not.
Returns:
:class:`~taichi.lang.tape.TapeImpl`: The context manager.
Example::
>>> @ti.kernel
>>> def sum(a: ti.float32):
>>> for I in ti.grouped(x):
>>> y[None] += x[I] ** a
>>>
>>> with ti.Tape(loss = y):
>>> sum(2)"""
impl.get_runtime().materialize()
if len(loss.shape) != 0:
raise RuntimeError(
'The loss of `Tape` must be a 0-D field, i.e. scalar')
if not loss.snode.ptr.has_grad():
raise RuntimeError(
'Gradients of loss are not allocated, please use ti.field(..., needs_grad=True)'
' for all fields that are required by autodiff.')
if clear_gradients:
clear_all_gradients()
from taichi.lang.meta import clear_loss
clear_loss(loss)
return runtime.get_tape(loss)
def clear_all_gradients():
"""Set all fields' gradients to 0."""
impl.get_runtime().materialize()
def visit(node):
places = []
for i in range(node.ptr.get_num_ch()):
ch = node.ptr.get_ch(i)
if not ch.is_place():
visit(SNode(ch))
else:
if not ch.is_primal():
places.append(ch.get_expr())
places = tuple(places)
if places:
from taichi.lang.meta import clear_gradients
clear_gradients(places)
for root_fb in FieldsBuilder.finalized_roots():
visit(root_fb)
def benchmark(func, repeat=300, args=()):
import time
def run_benchmark():
compile_time = time.time()
func(*args) # compile the kernel first
ti.sync()
compile_time = time.time() - compile_time
ti.stat_write('compilation_time', compile_time)
codegen_stat = _ti_core.stat()
for line in codegen_stat.split('\n'):
try:
a, b = line.strip().split(':')
except:
continue
a = a.strip()
b = int(float(b))
if a == 'codegen_kernel_statements':
ti.stat_write('compiled_inst', b)
if a == 'codegen_offloaded_tasks':
ti.stat_write('compiled_tasks', b)
elif a == 'launched_tasks':
ti.stat_write('launched_tasks', b)
# Use 3 initial iterations to warm up
# instruction/data caches. Discussion:
# https://github.com/taichi-dev/taichi/pull/1002#discussion_r426312136
for i in range(3):
func(*args)
ti.sync()
ti.clear_kernel_profile_info()
t = time.time()
for n in range(repeat):
func(*args)
ti.sync()
elapsed = time.time() - t
avg = elapsed / repeat
ti.stat_write('wall_clk_t', avg)
device_time = ti.kernel_profiler_total_time()
avg_device_time = device_time / repeat
ti.stat_write('exec_t', avg_device_time)
run_benchmark()
def benchmark_plot(fn=None,
cases=None,
columns=None,
column_titles=None,
archs=None,
title=None,
bars='sync_vs_async',
bar_width=0.4,
bar_distance=0,
left_margin=0,
size=(12, 8)):
import matplotlib.pyplot as plt
import yaml
if fn is None:
fn = os.path.join(_ti_core.get_repo_dir(), 'benchmarks', 'output',
'benchmark.yml')
with open(fn, 'r') as f:
data = yaml.load(f, Loader=yaml.SafeLoader)
if bars != 'sync_vs_async': # need baseline
baseline_dir = os.path.join(_ti_core.get_repo_dir(), 'benchmarks',
'baseline')
baseline_file = f'{baseline_dir}/benchmark.yml'
with open(baseline_file, 'r') as f:
baseline_data = yaml.load(f, Loader=yaml.SafeLoader)
if cases is None:
cases = list(data.keys())
assert len(cases) >= 1
if len(cases) == 1:
cases = [cases[0], cases[0]]
ti.warning(
'Function benchmark_plot does not support plotting with only one case for now. Duplicating the item to move on.'
)
if columns is None:
columns = list(data[cases[0]].keys())
if column_titles is None:
column_titles = columns
normalize_to_lowest = lambda x: True
figure, subfigures = plt.subplots(len(cases), len(columns))
if title is None:
title = 'Taichi Performance Benchmarks (Higher means more)'
figure.suptitle(title, fontweight="bold")
for col_id in range(len(columns)):
subfigures[0][col_id].set_title(column_titles[col_id])
for case_id in range(len(cases)):
case = cases[case_id]
subfigures[case_id][0].annotate(
case,
xy=(0, 0.5),
xytext=(-subfigures[case_id][0].yaxis.labelpad - 5, 0),
xycoords=subfigures[case_id][0].yaxis.label,
textcoords='offset points',
size='large',
ha='right',
va='center')
for col_id in range(len(columns)):
col = columns[col_id]
if archs is None:
current_archs = data[case][col].keys()
else:
current_archs = [
x for x in archs if x in data[case][col].keys()
]
if bars == 'sync_vs_async':
y_left = [
data[case][col][arch]['sync'] for arch in current_archs
]
label_left = 'sync'
y_right = [
data[case][col][arch]['async'] for arch in current_archs
]
label_right = 'async'
elif bars == 'sync_regression':
y_left = [
baseline_data[case][col][arch]['sync']
for arch in current_archs
]
label_left = 'before'
y_right = [
data[case][col][arch]['sync'] for arch in current_archs
]
label_right = 'after'
elif bars == 'async_regression':
y_left = [
baseline_data[case][col][arch]['async']
for arch in current_archs
]
label_left = 'before'
y_right = [
data[case][col][arch]['async'] for arch in current_archs
]
label_right = 'after'
else:
raise RuntimeError('Unknown bars type')
if normalize_to_lowest(col):
for i in range(len(current_archs)):
maximum = max(y_left[i], y_right[i])
y_left[i] = y_left[i] / maximum if y_left[i] != 0 else 1
y_right[i] = y_right[i] / maximum if y_right[i] != 0 else 1
ax = subfigures[case_id][col_id]
bar_left = ax.bar(x=[
i - bar_width / 2 - bar_distance / 2
for i in range(len(current_archs))
],
height=y_left,
width=bar_width,
label=label_left,
color=(0.47, 0.69, 0.89, 1.0))
bar_right = ax.bar(x=[
i + bar_width / 2 + bar_distance / 2
for i in range(len(current_archs))
],
height=y_right,
width=bar_width,
label=label_right,
color=(0.68, 0.26, 0.31, 1.0))
ax.set_xticks(range(len(current_archs)))
ax.set_xticklabels(current_archs)
figure.legend((bar_left, bar_right), (label_left, label_right),
loc='lower center')
figure.subplots_adjust(left=left_margin)
fig = plt.gcf()
fig.set_size_inches(size)
plt.show()
def stat_write(key, value):
import yaml
case_name = os.environ.get('TI_CURRENT_BENCHMARK')
if case_name is None:
return
if case_name.startswith('benchmark_'):
case_name = case_name[10:]
arch_name = _ti_core.arch_name(ti.cfg.arch)
async_mode = 'async' if ti.cfg.async_mode else 'sync'
output_dir = os.environ.get('TI_BENCHMARK_OUTPUT_DIR', '.')
filename = f'{output_dir}/benchmark.yml'
try:
with open(filename, 'r') as f:
data = yaml.load(f, Loader=yaml.SafeLoader)
except FileNotFoundError:
data = {}
data.setdefault(case_name, {})
data[case_name].setdefault(key, {})
data[case_name][key].setdefault(arch_name, {})
data[case_name][key][arch_name][async_mode] = value
with open(filename, 'w') as f:
yaml.dump(data, f, Dumper=yaml.SafeDumper)
def is_arch_supported(arch):
"""Checks whether an arch is supported on the machine.
Args:
arch (taichi_core.Arch): Specified arch.
Returns:
bool: Whether `arch` is supported on the machine.
"""
arch_table = {
cuda: _ti_core.with_cuda,
metal: _ti_core.with_metal,
opengl: _ti_core.with_opengl,
cc: _ti_core.with_cc,
vulkan: lambda: _ti_core.with_vulkan,
wasm: lambda: True,
cpu: lambda: True,
}
with_arch = arch_table.get(arch, lambda: False)
try:
return with_arch()
except Exception as e:
arch = _ti_core.arch_name(arch)
_ti_core.warn(
f"{e.__class__.__name__}: '{e}' occurred when detecting "
f"{arch}, consider add `export TI_WITH_{arch.upper()}=0` "
f" to environment variables to depress this warning message.")
return False
def supported_archs():
"""Gets all supported archs on the machine.
Returns:
List[taichi_core.Arch]: All supported archs on the machine.
"""
archs = [cpu, cuda, metal, opengl, cc]
wanted_archs = os.environ.get('TI_WANTED_ARCHS', '')
want_exclude = wanted_archs.startswith('^')
if want_exclude:
wanted_archs = wanted_archs[1:]
wanted_archs = wanted_archs.split(',')
# Note, ''.split(',') gives you [''], which is not an empty array.
wanted_archs = list(filter(lambda x: x != '', wanted_archs))
if len(wanted_archs):
archs, old_archs = [], archs
for arch in old_archs:
if want_exclude == (_ti_core.arch_name(arch) not in wanted_archs):
archs.append(arch)
archs, old_archs = [], archs
for arch in old_archs:
if is_arch_supported(arch):
archs.append(arch)
return archs
def adaptive_arch_select(arch):
if arch is None:
return cpu
if not isinstance(arch, (list, tuple)):
arch = [arch]
for a in arch:
if is_arch_supported(a):
return a
ti.warn(f'Arch={arch} is not supported, falling back to CPU')
return cpu
class _ArchCheckers(object):
def __init__(self):
self._checkers = []
def register(self, c):
self._checkers.append(c)
def __call__(self, arch):
assert isinstance(arch, _ti_core.Arch)
return all([c(arch) for c in self._checkers])
_tests_arch_checkers_argname = '_tests_arch_checkers'
def _get_or_make_arch_checkers(kwargs):
k = _tests_arch_checkers_argname
if k not in kwargs:
kwargs[k] = _ArchCheckers()
return kwargs[k]
# test with all archs
def all_archs_with(**kwargs):
kwargs = _deepcopy(kwargs)
def decorator(test):
# @pytest.mark.parametrize decorator only knows about regular function args,
# without *args or **kwargs. By decorating with @functools.wraps, the
# signature of |test| is preserved, so that @ti.all_archs can be used after
# the parametrization decorator.
#
# Full discussion: https://github.com/pytest-dev/pytest/issues/6810
@functools.wraps(test)
def wrapped(*test_args, **test_kwargs):
can_run_on = test_kwargs.pop(_tests_arch_checkers_argname,
_ArchCheckers())
# Filter away archs that don't support 64-bit data.
fp = kwargs.get('default_fp', ti.f32)
ip = kwargs.get('default_ip', ti.i32)
if fp == ti.f64 or ip == ti.i64:
can_run_on.register(lambda arch: is_extension_supported(
arch, extension.data64))
for arch in ti.supported_archs():
if can_run_on(arch):
print('Running test on arch={}'.format(arch))
ti.init(arch=arch, **kwargs)
test(*test_args, **test_kwargs)
else:
print('Skipped test on arch={}'.format(arch))
return wrapped
return decorator
# test with all archs
def all_archs(test):
return all_archs_with()(test)
# Exclude the given archs when running the tests
#
# Example usage:
#
# @ti.archs_excluding(ti.cuda, ti.metal)
# def test_xx():
# ...
#
# @ti.archs_excluding(ti.cuda, default_fp=ti.f64)
# def test_yy():
# ...
def archs_excluding(*excluded_archs, **kwargs):
# |kwargs| will be passed to all_archs_with(**kwargs)
assert all([isinstance(a, _ti_core.Arch) for a in excluded_archs])
excluded_archs = set(excluded_archs)
def decorator(test):
@functools.wraps(test)
def wrapped(*test_args, **test_kwargs):
def checker(arch):
return arch not in excluded_archs
_get_or_make_arch_checkers(test_kwargs).register(checker)
return all_archs_with(**kwargs)(test)(*test_args, **test_kwargs)
return wrapped
return decorator
# Specifies the extension features the archs are required to support in order
# to run the test.
#
# Example usage:
#
# @ti.require(ti.extension.data64)
# @ti.all_archs_with(default_fp=ti.f64)
# def test_xx():
# ...
def require(*exts):
# Because this decorator injects an arch checker, its usage must be followed
# with all_archs_with(), either directly or indirectly.
assert all([isinstance(e, _ti_core.Extension) for e in exts])
def decorator(test):
@functools.wraps(test)
def wrapped(*test_args, **test_kwargs):
def checker(arch):
return all([is_extension_supported(arch, e) for e in exts])
_get_or_make_arch_checkers(test_kwargs).register(checker)
test(*test_args, **test_kwargs)
return wrapped
return decorator
def archs_support_sparse(test, **kwargs):
wrapped = all_archs_with(**kwargs)(test)
return require(extension.sparse)(wrapped)
def torch_test(func):
if ti.has_pytorch():
# OpenGL somehow crashes torch test without a reason, unforturnately
return ti.test(exclude=[opengl])(func)
else:
return lambda: None
# test with host arch only
def host_arch_only(func):
@functools.wraps(func)
def test(*args, **kwargs):
archs = [_ti_core.host_arch()]
for arch in archs:
ti.init(arch=arch)
func(*args, **kwargs)
return test
def archs_with(archs, **init_kwags):
"""
Run the test on the given archs with the given init args.
Args:
archs: a list of Taichi archs
init_kwargs: kwargs passed to ti.init()
"""
def decorator(test):
@functools.wraps(test)
def wrapped(*test_args, **test_kwargs):
for arch in archs:
ti.init(arch=arch, **init_kwags)
test(*test_args, **test_kwargs)
return wrapped
return decorator
def must_throw(ex):
def decorator(func):
def func__(*args, **kwargs):
finishes = False
try:
host_arch_only(func)(*args, **kwargs)
finishes = True
except ex:
# throws. test passed
pass
except Exception as err_actual:
assert False, 'Exception {} instead of {} thrown'.format(
str(type(err_actual)), str(ex))
if finishes:
assert False, 'Test successfully finished instead of throwing {}'.format(
str(ex))
return func__
return decorator
def complex_kernel(func):
"""A decorator for python function that user can customize the gradient
function by the decorator generated by
:func:`~taichi.lang.complex_kernel_grad` for this function, and could be
caught automatically by ti.Tape(). This decorator would not automatically
converted the function to a taichi kernel. Users should call other taichi
kernels if in need to enable automatic parallel computing.
Args:
fn (Callable): The Python function which needs to be decorated.
Returns:
Callable: The decorated function.
Example::
>>> @ti.kernel
>>> def multiply(a: ti.float32):
>>> for I in ti.grouped(x):
>>> y[I] = x[I] * a
>>>
>>> @ti.kernel
>>> def multiply_grad(a: ti.float32):
>>> for I in ti.grouped(x):
>>> x.grad[I] = y.grad[I] / a
>>>
>>> @ti.complex_kernel
>>> def foo(a):
>>> multiply(a)
>>>
>>> @ti.complex_kernel_grad(foo)
>>> def foo_grad(a):
>>> multiply_grad(a)"""
def decorated(*args, **kwargs):
impl.get_runtime().inside_complex_kernel = True
if impl.get_runtime().target_tape:
impl.get_runtime().target_tape.insert(decorated, args)
try:
func(*args, **kwargs)
finally:
impl.get_runtime().inside_complex_kernel = False
decorated.grad = None
return decorated
def complex_kernel_grad(primal):
"""Generate the gradient decorator for a given function decorated by
:func:`~taichi.lang.complex_kernel`. See :func:`~taichi.lang.complex_kernel`
to get further information and examples.
Args:
primal (Callable): The primal function for the decorator.
Returns:
Callable: The decorator."""
def decorator(func):
def decorated(*args, **kwargs):
func(*args, **kwargs)
primal.grad = decorated
return decorated
return decorator
__all__ = [s for s in dir() if not s.startswith('_')]
| 32.319746 | 233 | 0.613912 |
acec48d93bcaee05d46bc7016365a0e2d5f89364 | 30,435 | py | Python | pysnmp/CMM4-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 11 | 2021-02-02T16:27:16.000Z | 2021-08-31T06:22:49.000Z | pysnmp/CMM4-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 75 | 2021-02-24T17:30:31.000Z | 2021-12-08T00:01:18.000Z | pysnmp/CMM4-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
] | 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CMM4-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CMM4-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:09:18 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
Gauge32, NotificationType, Bits, ObjectIdentity, iso, Integer32, MibIdentifier, Counter64, ModuleIdentity, IpAddress, Counter32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "NotificationType", "Bits", "ObjectIdentity", "iso", "Integer32", "MibIdentifier", "Counter64", "ModuleIdentity", "IpAddress", "Counter32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Unsigned32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
whispBox, whispCMM4, whispModules = mibBuilder.importSymbols("WHISP-GLOBAL-REG-MIB", "whispBox", "whispCMM4", "whispModules")
EventString, WhispLUID, WhispMACAddress = mibBuilder.importSymbols("WHISP-TCV2-MIB", "EventString", "WhispLUID", "WhispMACAddress")
cmm4MibModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 161, 19, 1, 1, 15))
if mibBuilder.loadTexts: cmm4MibModule.setLastUpdated('200603290000Z')
if mibBuilder.loadTexts: cmm4MibModule.setOrganization('Cambium Networks')
cmm4Groups = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1))
cmm4Config = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2))
cmm4Status = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3))
cmm4Gps = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4))
cmm4EventLog = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 5))
cmm4Controls = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 6))
cmm4Snmp = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7))
cmm4Event = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 8))
cmm4GPSEvent = MibIdentifier((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 8, 1))
cmm4PortCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 1)).setObjects(("CMM4-MIB", "portCfgIndex"), ("CMM4-MIB", "cmm4PortText"), ("CMM4-MIB", "cmm4PortDevType"), ("CMM4-MIB", "cmm4PortPowerCfg"), ("CMM4-MIB", "cmm4PortResetCfg"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4PortCfgGroup = cmm4PortCfgGroup.setStatus('current')
cmm4ConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 2)).setObjects(("CMM4-MIB", "gpsTimingPulse"), ("CMM4-MIB", "lan1Ip"), ("CMM4-MIB", "lan1SubnetMask"), ("CMM4-MIB", "defaultGateway"), ("CMM4-MIB", "cmm4WebAutoUpdate"), ("CMM4-MIB", "cmm4ExtEthPowerReset"), ("CMM4-MIB", "cmm4IpAccessFilter"), ("CMM4-MIB", "cmm4IpAccess1"), ("CMM4-MIB", "cmm4IpAccess2"), ("CMM4-MIB", "cmm4IpAccess3"), ("CMM4-MIB", "cmm4MgmtPortSpeed"), ("CMM4-MIB", "cmm4NTPServerIp"), ("CMM4-MIB", "sessionTimeout"), ("CMM4-MIB", "vlanEnable"), ("CMM4-MIB", "managementVID"), ("CMM4-MIB", "siteInfoViewable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4ConfigGroup = cmm4ConfigGroup.setStatus('current')
cmm4PortStatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 3)).setObjects(("CMM4-MIB", "portStatusIndex"), ("CMM4-MIB", "cmm4PortPowerStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4PortStatusGroup = cmm4PortStatusGroup.setStatus('current')
cmm4StatusGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 4)).setObjects(("CMM4-MIB", "deviceType"), ("CMM4-MIB", "cmm4pldVersion"), ("CMM4-MIB", "cmm4SoftwareVersion"), ("CMM4-MIB", "cmm4SystemTime"), ("CMM4-MIB", "cmm4UpTime"), ("CMM4-MIB", "satellitesVisible"), ("CMM4-MIB", "satellitesTracked"), ("CMM4-MIB", "latitude"), ("CMM4-MIB", "longitude"), ("CMM4-MIB", "height"), ("CMM4-MIB", "trackingMode"), ("CMM4-MIB", "syncStatus"), ("CMM4-MIB", "cmm4MacAddress"), ("CMM4-MIB", "cmm4ExtEthPwrStat"), ("CMM4-MIB", "cmm4FPGAVersion"), ("CMM4-MIB", "cmm4FPGAPlatform"), ("CMM4-MIB", "defaultStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4StatusGroup = cmm4StatusGroup.setStatus('current')
cmm4GPSGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 5)).setObjects(("CMM4-MIB", "gpsTrackingMode"), ("CMM4-MIB", "gpsTime"), ("CMM4-MIB", "gpsDate"), ("CMM4-MIB", "gpsSatellitesVisible"), ("CMM4-MIB", "gpsSatellitesTracked"), ("CMM4-MIB", "gpsHeight"), ("CMM4-MIB", "gpsAntennaConnection"), ("CMM4-MIB", "gpsLatitude"), ("CMM4-MIB", "gpsLongitude"), ("CMM4-MIB", "gpsInvalidMsg"), ("CMM4-MIB", "gpsRestartCount"), ("CMM4-MIB", "gpsReceiverInfo"), ("CMM4-MIB", "gpsSyncStatus"), ("CMM4-MIB", "gpsSyncMasterSlave"), ("CMM4-MIB", "gpsLog"), ("CMM4-MIB", "gpsReInitCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4GPSGroup = cmm4GPSGroup.setStatus('current')
cmm4ControlsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 6)).setObjects(("CMM4-MIB", "cmm4Reboot"), ("CMM4-MIB", "cmm4ClearEventLog"), ("CMM4-MIB", "cmm4RebootIfRequired"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4ControlsGroup = cmm4ControlsGroup.setStatus('current')
cmm4SNMPGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 7)).setObjects(("CMM4-MIB", "cmm4SnmpComString"), ("CMM4-MIB", "cmm4SnmpAccessSubnet"), ("CMM4-MIB", "cmm4SnmpTrapIp1"), ("CMM4-MIB", "cmm4SnmpTrapIp2"), ("CMM4-MIB", "cmm4SnmpTrapIp3"), ("CMM4-MIB", "cmm4SnmpTrapIp4"), ("CMM4-MIB", "cmm4SnmpTrapIp5"), ("CMM4-MIB", "cmm4SnmpTrapIp6"), ("CMM4-MIB", "cmm4SnmpTrapIp7"), ("CMM4-MIB", "cmm4SnmpTrapIp8"), ("CMM4-MIB", "cmm4SnmpTrapIp9"), ("CMM4-MIB", "cmm4SnmpTrapIp10"), ("CMM4-MIB", "cmm4SnmpReadOnly"), ("CMM4-MIB", "cmm4SnmpGPSSyncTrapEnable"), ("CMM4-MIB", "cmm4SnmpAccessSubnet2"), ("CMM4-MIB", "cmm4SnmpAccessSubnet3"), ("CMM4-MIB", "cmm4SnmpAccessSubnet4"), ("CMM4-MIB", "cmm4SnmpAccessSubnet5"), ("CMM4-MIB", "cmm4SnmpAccessSubnet6"), ("CMM4-MIB", "cmm4SnmpAccessSubnet7"), ("CMM4-MIB", "cmm4SnmpAccessSubnet8"), ("CMM4-MIB", "cmm4SnmpAccessSubnet9"), ("CMM4-MIB", "cmm4SnmpAccessSubnet10"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4SNMPGroup = cmm4SNMPGroup.setStatus('current')
cmm4UserTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 1, 8)).setObjects(("CMM4-MIB", "entryIndex"), ("CMM4-MIB", "userLoginName"), ("CMM4-MIB", "userPswd"), ("CMM4-MIB", "accessLevel"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
cmm4UserTableGroup = cmm4UserTableGroup.setStatus('current')
gpsTimingPulse = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("master", 1), ("slave", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: gpsTimingPulse.setStatus('current')
lan1Ip = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1Ip.setStatus('current')
lan1SubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: lan1SubnetMask.setStatus('current')
defaultGateway = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: defaultGateway.setStatus('current')
cmm4WebAutoUpdate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 5), Integer32()).setUnits('Seconds').setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4WebAutoUpdate.setStatus('current')
cmm4ExtEthPowerReset = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4ExtEthPowerReset.setStatus('current')
cmm4IpAccessFilter = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4IpAccessFilter.setStatus('current')
cmm4IpAccess1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4IpAccess1.setStatus('current')
cmm4IpAccess2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 10), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4IpAccess2.setStatus('current')
cmm4IpAccess3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 11), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4IpAccess3.setStatus('current')
cmm4MgmtPortSpeed = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("autoNegotiate", 1), ("force10Half", 2), ("force10Full", 3), ("force100Half", 4), ("force100Full", 5)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4MgmtPortSpeed.setStatus('current')
cmm4NTPServerIp = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 13), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4NTPServerIp.setStatus('current')
sessionTimeout = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 14), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: sessionTimeout.setStatus('current')
vlanEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: vlanEnable.setStatus('current')
managementVID = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4094))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: managementVID.setStatus('current')
siteInfoViewable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("enable", 1), ("disable", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: siteInfoViewable.setStatus('current')
cmm4PortCfgTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7), )
if mibBuilder.loadTexts: cmm4PortCfgTable.setStatus('current')
cmm4PortCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7, 1), ).setIndexNames((0, "CMM4-MIB", "portCfgIndex"))
if mibBuilder.loadTexts: cmm4PortCfgEntry.setStatus('current')
portCfgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portCfgIndex.setStatus('current')
cmm4PortText = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4PortText.setStatus('current')
cmm4PortDevType = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("none", 0), ("canopy", 1), ("canopy56V", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4PortDevType.setStatus('current')
cmm4PortPowerCfg = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("on", 1), ("off", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4PortPowerCfg.setStatus('current')
cmm4PortResetCfg = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 2, 7, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("resetPort", 1), ("resetComplete", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4PortResetCfg.setStatus('current')
deviceType = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: deviceType.setStatus('current')
cmm4pldVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4pldVersion.setStatus('current')
cmm4SoftwareVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4SoftwareVersion.setStatus('current')
cmm4SystemTime = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4SystemTime.setStatus('current')
cmm4UpTime = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4UpTime.setStatus('current')
satellitesVisible = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: satellitesVisible.setStatus('current')
satellitesTracked = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: satellitesTracked.setStatus('current')
latitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: latitude.setStatus('current')
longitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: longitude.setStatus('current')
height = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 11), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: height.setStatus('current')
trackingMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: trackingMode.setStatus('current')
syncStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 13), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: syncStatus.setStatus('current')
cmm4MacAddress = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 14), WhispMACAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4MacAddress.setStatus('current')
cmm4ExtEthPwrStat = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("off", 0), ("on", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4ExtEthPwrStat.setStatus('current')
cmm4FPGAVersion = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 16), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4FPGAVersion.setStatus('current')
cmm4FPGAPlatform = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 17), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4FPGAPlatform.setStatus('current')
defaultStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("none", 0), ("defaultPlugInserted", 1), ("defaultSwitchActive", 2), ("defaultPlugInsertedAndDefaultSwitchActive", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: defaultStatus.setStatus('current')
cmm4PortStatusTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 1), )
if mibBuilder.loadTexts: cmm4PortStatusTable.setStatus('current')
cmm4PortStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 1, 1), ).setIndexNames((0, "CMM4-MIB", "portStatusIndex"))
if mibBuilder.loadTexts: cmm4PortStatusEntry.setStatus('current')
portStatusIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portStatusIndex.setStatus('current')
cmm4PortPowerStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0, -1))).clone(namedValues=NamedValues(("on", 1), ("off", 0), ("powerOverEthernetFault", -1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cmm4PortPowerStatus.setStatus('current')
gpsTrackingMode = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsTrackingMode.setStatus('current')
gpsTime = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsTime.setStatus('current')
gpsDate = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsDate.setStatus('current')
gpsSatellitesVisible = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsSatellitesVisible.setStatus('current')
gpsSatellitesTracked = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsSatellitesTracked.setStatus('current')
gpsHeight = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsHeight.setStatus('current')
gpsAntennaConnection = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsAntennaConnection.setStatus('current')
gpsLatitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsLatitude.setStatus('current')
gpsLongitude = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsLongitude.setStatus('current')
gpsInvalidMsg = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 10), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsInvalidMsg.setStatus('current')
gpsRestartCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsRestartCount.setStatus('current')
gpsReceiverInfo = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 12), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsReceiverInfo.setStatus('current')
gpsSyncStatus = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("syncOK", 1), ("noSync", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsSyncStatus.setStatus('current')
gpsSyncMasterSlave = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("cmmIsGPSMaster", 1), ("cmmIsGPSSlave", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsSyncMasterSlave.setStatus('current')
gpsLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 15), EventString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsLog.setStatus('current')
gpsReInitCount = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 4, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: gpsReInitCount.setStatus('current')
eventLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 5, 1), EventString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: eventLog.setStatus('current')
ntpLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 5, 2), EventString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpLog.setStatus('current')
cmm4Reboot = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("reboot", 1), ("finishedReboot", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4Reboot.setStatus('current')
cmm4ClearEventLog = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 6, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("clear", 1), ("notClear", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4ClearEventLog.setStatus('current')
cmm4RebootIfRequired = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 6, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("rebootifrquired", 1), ("rebootcomplete", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4RebootIfRequired.setStatus('current')
cmm4SnmpComString = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 1), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpComString.setStatus('current')
cmm4SnmpAccessSubnet = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet.setStatus('current')
cmm4SnmpTrapIp1 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp1.setStatus('current')
cmm4SnmpTrapIp2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp2.setStatus('current')
cmm4SnmpTrapIp3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp3.setStatus('current')
cmm4SnmpTrapIp4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp4.setStatus('current')
cmm4SnmpTrapIp5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp5.setStatus('current')
cmm4SnmpTrapIp6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp6.setStatus('current')
cmm4SnmpTrapIp7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp7.setStatus('current')
cmm4SnmpTrapIp8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 10), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp8.setStatus('current')
cmm4SnmpTrapIp9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 11), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp9.setStatus('current')
cmm4SnmpTrapIp10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 12), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpTrapIp10.setStatus('current')
cmm4SnmpReadOnly = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 0))).clone(namedValues=NamedValues(("readOnlyPermissions", 1), ("readWritePermissions", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpReadOnly.setStatus('current')
cmm4SnmpGPSSyncTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("gpsSyncTrapDisabled", 0), ("gpsSyncTrapEnabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpGPSSyncTrapEnable.setStatus('current')
cmm4SnmpAccessSubnet2 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 15), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet2.setStatus('current')
cmm4SnmpAccessSubnet3 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 16), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet3.setStatus('current')
cmm4SnmpAccessSubnet4 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 17), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet4.setStatus('current')
cmm4SnmpAccessSubnet5 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 18), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet5.setStatus('current')
cmm4SnmpAccessSubnet6 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 19), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet6.setStatus('current')
cmm4SnmpAccessSubnet7 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 20), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet7.setStatus('current')
cmm4SnmpAccessSubnet8 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 21), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet8.setStatus('current')
cmm4SnmpAccessSubnet9 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 22), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet9.setStatus('current')
cmm4SnmpAccessSubnet10 = MibScalar((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 7, 23), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cmm4SnmpAccessSubnet10.setStatus('current')
cmm4GPSInSync = NotificationType((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 8, 1, 1)).setObjects(("CMM4-MIB", "gpsSyncStatus"), ("CMM4-MIB", "cmm4MacAddress"))
if mibBuilder.loadTexts: cmm4GPSInSync.setStatus('current')
cmm4GPSNoSync = NotificationType((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 8, 1, 2)).setObjects(("CMM4-MIB", "gpsSyncStatus"), ("CMM4-MIB", "cmm4MacAddress"))
if mibBuilder.loadTexts: cmm4GPSNoSync.setStatus('current')
cmm4UserTable = MibTable((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 9), )
if mibBuilder.loadTexts: cmm4UserTable.setStatus('current')
cmm4UserEntry = MibTableRow((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 9, 1), ).setIndexNames((0, "CMM4-MIB", "entryIndex"))
if mibBuilder.loadTexts: cmm4UserEntry.setStatus('current')
entryIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entryIndex.setStatus('current')
userLoginName = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 9, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userLoginName.setStatus('current')
userPswd = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 9, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: userPswd.setStatus('current')
accessLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 161, 19, 3, 6, 9, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("noAdmin", 0), ("guest", 1), ("installer", 2), ("administrator", 3), ("technician", 4), ("engineering", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: accessLevel.setStatus('current')
mibBuilder.exportSymbols("CMM4-MIB", cmm4SystemTime=cmm4SystemTime, cmm4SnmpAccessSubnet10=cmm4SnmpAccessSubnet10, cmm4IpAccess1=cmm4IpAccess1, gpsSatellitesVisible=gpsSatellitesVisible, gpsTimingPulse=gpsTimingPulse, cmm4ClearEventLog=cmm4ClearEventLog, lan1Ip=lan1Ip, cmm4PortText=cmm4PortText, cmm4ExtEthPowerReset=cmm4ExtEthPowerReset, cmm4PortCfgEntry=cmm4PortCfgEntry, cmm4PortCfgTable=cmm4PortCfgTable, cmm4NTPServerIp=cmm4NTPServerIp, cmm4SnmpTrapIp4=cmm4SnmpTrapIp4, cmm4SnmpAccessSubnet7=cmm4SnmpAccessSubnet7, cmm4PortPowerCfg=cmm4PortPowerCfg, cmm4StatusGroup=cmm4StatusGroup, cmm4RebootIfRequired=cmm4RebootIfRequired, cmm4IpAccess2=cmm4IpAccess2, gpsRestartCount=gpsRestartCount, cmm4IpAccessFilter=cmm4IpAccessFilter, gpsInvalidMsg=gpsInvalidMsg, PYSNMP_MODULE_ID=cmm4MibModule, cmm4PortCfgGroup=cmm4PortCfgGroup, cmm4GPSNoSync=cmm4GPSNoSync, cmm4SnmpAccessSubnet=cmm4SnmpAccessSubnet, gpsHeight=gpsHeight, cmm4SnmpComString=cmm4SnmpComString, cmm4Controls=cmm4Controls, cmm4ConfigGroup=cmm4ConfigGroup, cmm4SnmpAccessSubnet3=cmm4SnmpAccessSubnet3, trackingMode=trackingMode, cmm4PortStatusGroup=cmm4PortStatusGroup, cmm4FPGAVersion=cmm4FPGAVersion, cmm4MacAddress=cmm4MacAddress, cmm4SnmpTrapIp8=cmm4SnmpTrapIp8, cmm4IpAccess3=cmm4IpAccess3, defaultStatus=defaultStatus, deviceType=deviceType, cmm4GPSInSync=cmm4GPSInSync, sessionTimeout=sessionTimeout, gpsLongitude=gpsLongitude, cmm4Snmp=cmm4Snmp, height=height, cmm4GPSGroup=cmm4GPSGroup, cmm4SnmpTrapIp10=cmm4SnmpTrapIp10, cmm4SnmpAccessSubnet5=cmm4SnmpAccessSubnet5, portStatusIndex=portStatusIndex, userPswd=userPswd, siteInfoViewable=siteInfoViewable, gpsLatitude=gpsLatitude, cmm4UserEntry=cmm4UserEntry, cmm4pldVersion=cmm4pldVersion, cmm4PortDevType=cmm4PortDevType, cmm4PortResetCfg=cmm4PortResetCfg, satellitesTracked=satellitesTracked, syncStatus=syncStatus, cmm4SnmpTrapIp2=cmm4SnmpTrapIp2, gpsSatellitesTracked=gpsSatellitesTracked, cmm4Gps=cmm4Gps, cmm4UserTableGroup=cmm4UserTableGroup, cmm4MibModule=cmm4MibModule, cmm4SnmpAccessSubnet8=cmm4SnmpAccessSubnet8, longitude=longitude, managementVID=managementVID, gpsDate=gpsDate, entryIndex=entryIndex, cmm4Status=cmm4Status, cmm4SnmpReadOnly=cmm4SnmpReadOnly, gpsReInitCount=gpsReInitCount, cmm4SoftwareVersion=cmm4SoftwareVersion, cmm4MgmtPortSpeed=cmm4MgmtPortSpeed, cmm4PortStatusEntry=cmm4PortStatusEntry, gpsAntennaConnection=gpsAntennaConnection, cmm4SnmpTrapIp7=cmm4SnmpTrapIp7, gpsSyncStatus=gpsSyncStatus, cmm4SnmpTrapIp9=cmm4SnmpTrapIp9, cmm4SnmpAccessSubnet4=cmm4SnmpAccessSubnet4, cmm4SnmpGPSSyncTrapEnable=cmm4SnmpGPSSyncTrapEnable, satellitesVisible=satellitesVisible, portCfgIndex=portCfgIndex, cmm4SnmpTrapIp6=cmm4SnmpTrapIp6, defaultGateway=defaultGateway, cmm4Groups=cmm4Groups, cmm4SnmpTrapIp1=cmm4SnmpTrapIp1, eventLog=eventLog, latitude=latitude, vlanEnable=vlanEnable, cmm4UserTable=cmm4UserTable, gpsReceiverInfo=gpsReceiverInfo, cmm4SNMPGroup=cmm4SNMPGroup, cmm4ExtEthPwrStat=cmm4ExtEthPwrStat, cmm4EventLog=cmm4EventLog, cmm4FPGAPlatform=cmm4FPGAPlatform, gpsLog=gpsLog, cmm4GPSEvent=cmm4GPSEvent, cmm4SnmpTrapIp5=cmm4SnmpTrapIp5, cmm4Event=cmm4Event, accessLevel=accessLevel, userLoginName=userLoginName, cmm4SnmpAccessSubnet9=cmm4SnmpAccessSubnet9, cmm4Config=cmm4Config, cmm4SnmpAccessSubnet2=cmm4SnmpAccessSubnet2, cmm4UpTime=cmm4UpTime, cmm4PortStatusTable=cmm4PortStatusTable, ntpLog=ntpLog, cmm4WebAutoUpdate=cmm4WebAutoUpdate, gpsSyncMasterSlave=gpsSyncMasterSlave, cmm4PortPowerStatus=cmm4PortPowerStatus, gpsTime=gpsTime, cmm4SnmpTrapIp3=cmm4SnmpTrapIp3, gpsTrackingMode=gpsTrackingMode, lan1SubnetMask=lan1SubnetMask, cmm4SnmpAccessSubnet6=cmm4SnmpAccessSubnet6, cmm4Reboot=cmm4Reboot, cmm4ControlsGroup=cmm4ControlsGroup)
| 124.22449 | 3,690 | 0.735108 |
acec492f6243bacaf88da671fdf1b6b77197bc4f | 2,284 | py | Python | src/oscar/apps/dashboard/offers/apps.py | mohamedkhaledegy/django-oscar | a4d07e028454ce49a7753dd3b2a3bb898a238883 | [
"BSD-3-Clause"
] | 3 | 2020-01-15T03:12:14.000Z | 2021-11-15T12:23:23.000Z | src/oscar/apps/dashboard/offers/apps.py | mohamedkhaledegy/django-oscar | a4d07e028454ce49a7753dd3b2a3bb898a238883 | [
"BSD-3-Clause"
] | 5 | 2021-05-28T19:53:02.000Z | 2022-03-12T00:51:43.000Z | src/oscar/apps/dashboard/offers/apps.py | mohamedkhaledegy/django-oscar | a4d07e028454ce49a7753dd3b2a3bb898a238883 | [
"BSD-3-Clause"
] | 2 | 2019-10-07T13:11:33.000Z | 2020-01-03T11:53:48.000Z | from django.urls import path
from django.utils.translation import gettext_lazy as _
from oscar.core.application import OscarDashboardConfig
from oscar.core.loading import get_class
class OffersDashboardConfig(OscarDashboardConfig):
label = 'offers_dashboard'
name = 'oscar.apps.dashboard.offers'
verbose_name = _('Offers dashboard')
default_permissions = ['is_staff', ]
def ready(self):
self.list_view = get_class('dashboard.offers.views', 'OfferListView')
self.metadata_view = get_class('dashboard.offers.views', 'OfferMetaDataView')
self.condition_view = get_class('dashboard.offers.views', 'OfferConditionView')
self.benefit_view = get_class('dashboard.offers.views', 'OfferBenefitView')
self.restrictions_view = get_class('dashboard.offers.views',
'OfferRestrictionsView')
self.delete_view = get_class('dashboard.offers.views', 'OfferDeleteView')
self.detail_view = get_class('dashboard.offers.views', 'OfferDetailView')
def get_urls(self):
urls = [
path('', self.list_view.as_view(), name='offer-list'),
# Creation
path('new/name-and-description/', self.metadata_view.as_view(), name='offer-metadata'),
path('new/condition/', self.condition_view.as_view(), name='offer-condition'),
path('new/incentive/', self.benefit_view.as_view(), name='offer-benefit'),
path('new/restrictions/', self.restrictions_view.as_view(), name='offer-restrictions'),
# Update
path('<int:pk>/name-and-description/', self.metadata_view.as_view(update=True), name='offer-metadata'),
path('<int:pk>/condition/', self.condition_view.as_view(update=True), name='offer-condition'),
path('<int:pk>/incentive/', self.benefit_view.as_view(update=True), name='offer-benefit'),
path('<int:pk>/restrictions/', self.restrictions_view.as_view(update=True), name='offer-restrictions'),
# Delete
path('<int:pk>/delete/', self.delete_view.as_view(), name='offer-delete'),
# Stats
path('<int:pk>/', self.detail_view.as_view(), name='offer-detail'),
]
return self.post_process_urls(urls)
| 51.909091 | 115 | 0.658932 |
acec4a935d6032bec88a8b1919c1f327a56cba02 | 6,694 | py | Python | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/user_authn/views/tests/test_events.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | 3 | 2021-12-15T04:58:18.000Z | 2022-02-06T12:15:37.000Z | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/user_authn/views/tests/test_events.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | null | null | null | Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/openedx/core/djangoapps/user_authn/views/tests/test_events.py | osoco/better-ways-of-thinking-about-software | 83e70d23c873509e22362a09a10d3510e10f6992 | [
"MIT"
] | 1 | 2019-01-02T14:38:50.000Z | 2019-01-02T14:38:50.000Z | """
Test classes for the events sent in the registration process.
Classes:
RegistrationEventTest: Test event sent after registering a user through the
user API.
LoginSessionEventTest: Test event sent after creating the user's login session
user through the user API.
"""
from unittest.mock import Mock
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django.urls import reverse
from openedx_events.learning.data import UserData, UserPersonalData
from openedx_events.learning.signals import SESSION_LOGIN_COMPLETED, STUDENT_REGISTRATION_COMPLETED
from openedx_events.tests.utils import OpenEdxEventsTestMixin
from common.djangoapps.student.tests.factories import UserFactory, UserProfileFactory
from openedx.core.djangoapps.user_api.tests.test_views import UserAPITestCase
from openedx.core.djangolib.testing.utils import skip_unless_lms
@skip_unless_lms
class RegistrationEventTest(UserAPITestCase, OpenEdxEventsTestMixin):
"""
Tests for the Open edX Events associated with the registration process through
the registration view.
This class guarantees that the following events are sent after registering
a user, with the exact Data Attributes as the event definition stated:
- STUDENT_REGISTRATION_COMPLETED: after the user's registration has been
completed.
"""
ENABLED_OPENEDX_EVENTS = ["org.openedx.learning.student.registration.completed.v1"]
@classmethod
def setUpClass(cls):
"""
Set up class method for the Test class.
So the Open edX Events Isolation starts, the setUpClass must be explicitly
called with the method that executes the isolation. We do this to avoid
MRO resolution conflicts with other sibling classes while ensuring the
isolation process begins.
"""
super().setUpClass()
cls.start_events_isolation()
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
self.url = reverse("user_api_registration")
self.user_info = {
"email": "user@example.com",
"name": "Test User",
"username": "test",
"password": "password",
"honor_code": "true",
}
self.receiver_called = False
def _event_receiver_side_effect(self, **kwargs): # pylint: disable=unused-argument
"""
Used show that the Open edX Event was called by the Django signal handler.
"""
self.receiver_called = True
def test_send_registration_event(self):
"""
Test whether the student registration event is sent during the user's
registration process.
Expected result:
- STUDENT_REGISTRATION_COMPLETED is sent and received by the mocked receiver.
- The arguments that the receiver gets are the arguments sent by the event
except the metadata generated on the fly.
"""
event_receiver = Mock(side_effect=self._event_receiver_side_effect)
STUDENT_REGISTRATION_COMPLETED.connect(event_receiver)
self.client.post(self.url, self.user_info)
user = User.objects.get(username=self.user_info.get("username"))
self.assertTrue(self.receiver_called)
self.assertDictContainsSubset(
{
"signal": STUDENT_REGISTRATION_COMPLETED,
"sender": None,
"user": UserData(
pii=UserPersonalData(
username=user.username,
email=user.email,
name=user.profile.name,
),
id=user.id,
is_active=user.is_active,
),
},
event_receiver.call_args.kwargs
)
@skip_unless_lms
class LoginSessionEventTest(UserAPITestCase, OpenEdxEventsTestMixin):
"""
Tests for the Open edX Events associated with the login process through the
login_user view.
This class guarantees that the following events are sent after the user's
session creation, with the exact Data Attributes as the event definition
stated:
- SESSION_LOGIN_COMPLETED: after login has been completed.
"""
ENABLED_OPENEDX_EVENTS = ["org.openedx.learning.auth.session.login.completed.v1"]
@classmethod
def setUpClass(cls):
"""
Set up class method for the Test class.
This method starts manually events isolation. Explanation here:
openedx/core/djangoapps/user_authn/views/tests/test_events.py#L44
"""
super().setUpClass()
cls.start_events_isolation()
def setUp(self): # pylint: disable=arguments-differ
super().setUp()
self.url = reverse("user_api_login_session", kwargs={"api_version": "v1"})
self.user = UserFactory.create(
username="test",
email="test@example.com",
password="password",
)
self.user_profile = UserProfileFactory.create(user=self.user, name="Test Example")
self.receiver_called = True
def _event_receiver_side_effect(self, **kwargs): # pylint: disable=unused-argument
"""
Used show that the Open edX Event was called by the Django signal handler.
"""
self.receiver_called = True
def test_send_login_event(self):
"""
Test whether the student login event is sent after the user's
login process.
Expected result:
- SESSION_LOGIN_COMPLETED is sent and received by the mocked receiver.
- The arguments that the receiver gets are the arguments sent by the event
except the metadata generated on the fly.
"""
event_receiver = Mock(side_effect=self._event_receiver_side_effect)
SESSION_LOGIN_COMPLETED.connect(event_receiver)
data = {
"email": "test@example.com",
"password": "password",
}
self.client.post(self.url, data)
user = User.objects.get(username=self.user.username)
self.assertTrue(self.receiver_called)
self.assertDictContainsSubset(
{
"signal": SESSION_LOGIN_COMPLETED,
"sender": None,
"user": UserData(
pii=UserPersonalData(
username=user.username,
email=user.email,
name=user.profile.name,
),
id=user.id,
is_active=user.is_active,
),
},
event_receiver.call_args.kwargs
)
| 36.380435 | 99 | 0.641769 |
acec4c3f1beb20aad9eb97743c0cd34bdb5ac51f | 16,858 | py | Python | PA2/code/main.py | badarsh2/EE6132-Deep-Learning-For-Imaging-Assignments | f2485bb2f0c17ebddd4acd176a8c6aa8ace6439a | [
"MIT"
] | 1 | 2019-04-20T09:36:36.000Z | 2019-04-20T09:36:36.000Z | PA2/code/main.py | badarsh2/EE6132-Deep-Learning-For-Imaging-Assignments | f2485bb2f0c17ebddd4acd176a8c6aa8ace6439a | [
"MIT"
] | null | null | null | PA2/code/main.py | badarsh2/EE6132-Deep-Learning-For-Imaging-Assignments | f2485bb2f0c17ebddd4acd176a8c6aa8ace6439a | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Driver program to train a CNN on MNIST dataset.
"""
from math import log10
import keras
import matplotlib.pyplot as plt
import numpy as np
from keras import backend as K
from keras.datasets import mnist
from keras.layers import Input
from keras.models import load_model, Model
from keras.utils import to_categorical
from scipy.ndimage.filters import gaussian_filter
from custom_callbacks import LossHistory
from custom_models import baseline_model, two_conv_layer_model, two_conv_one_dense_layer_model
from utils import preprocess_image_data, get_iter_batch, plot_learning_curve, generate_image_outputs, \
generate_noisy_outputs
# Initializing essential constants
batch_size = 128
num_classes = 10
epochs = 1
img_rows, img_cols = 28, 28
num_iter = 101
# Initializing essential global variables
input_shape = None
X_train, y_train_labels, y_train, X_test, y_test_labels, y_test = None, None, None, None, None, None
def normalize_tensor(x):
""" Utility function to normalize a tensor by its L2 norm
Params:
x: Tensorflow tensor
Returns:
tensor: Normalized tensor
"""
return x / (K.sqrt(K.mean(K.square(x))) + 1e-5)
def load_data():
""" Helper function to load and initialize data
"""
global input_shape, X_train, y_train_labels, y_train, X_test, y_test_labels, y_test
(X_train, y_train_labels), (X_test, y_test_labels) = mnist.load_data()
X_train, X_test, input_shape = preprocess_image_data(X_train, X_test, img_rows, img_cols, K)
# convert class vectors to binary class matrices
y_train = to_categorical(y_train_labels, num_classes)
y_test = to_categorical(y_test_labels, num_classes)
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
def question_1():
global input_shape, X_train, y_train_labels, y_train, X_test, y_test_labels, y_test
print("------------------------------------------------------------------------")
print("Baseline Model")
print("------------------------------------------------------------------------")
model1 = baseline_model(input_shape, num_classes)
loss_callback_1 = LossHistory((X_test, y_test))
model1.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(X_test, y_test),
callbacks=[loss_callback_1])
model1.save('model1.h5')
plot_learning_curve([loss_callback_1.train_indices, loss_callback_1.test_indices],
[loss_callback_1.train_losses, loss_callback_1.test_losses],
colors=['g-', 'm-'], labels=['Train loss', 'Test loss'],
title="Loss evolution for Baseline Model",
path="../outputs/q1/plots/train_test_loss_baseline.png",
axlabels=["Iterations", "Loss"])
plot_learning_curve([loss_callback_1.test_indices],
[loss_callback_1.test_acc],
colors=['c-'], labels=['Test Accuracy'],
title="Accuracy evolution for Baseline Model",
path="../outputs/q1/plots/test_acc_baseline.png",
axlabels=["Iterations", "Accuracy"])
print("------------------------------------------------------------------------")
print("2 conv layer model")
print("------------------------------------------------------------------------")
model2 = two_conv_layer_model(input_shape, num_classes)
loss_callback_2 = LossHistory((X_test, y_test))
model2.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(X_test, y_test),
callbacks=[loss_callback_2])
model2.save('model2.h5')
plot_learning_curve([loss_callback_2.train_indices, loss_callback_2.test_indices],
[loss_callback_2.train_losses, loss_callback_2.test_losses],
colors=['g-', 'm-'], labels=['Train loss', 'Test loss'],
title="Loss evolution for 2 conv layered Model",
path="../outputs/q1/plots/train_test_loss_2_conv.png",
axlabels=["Iterations", "Loss"])
plot_learning_curve([loss_callback_1.test_indices],
[loss_callback_1.test_acc],
colors=['c-'], labels=['Test Accuracy'],
title="Accuracy evolution for 2 conv layered Model",
path="../outputs/q1/plots/test_acc_2_conv.png",
axlabels=["Iterations", "Accuracy"])
print("------------------------------------------------------------------------")
print("2 conv layer + 1 hidden dense layer model")
print("------------------------------------------------------------------------")
model3 = two_conv_one_dense_layer_model(input_shape, num_classes)
loss_callback_3 = LossHistory((X_test, y_test))
model3.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1, validation_data=(X_test, y_test),
callbacks=[loss_callback_3])
model3.save('model3.h5')
plot_learning_curve([loss_callback_3.train_indices, loss_callback_3.test_indices],
[loss_callback_3.train_losses, loss_callback_3.test_losses],
colors=['g-', 'm-'], labels=['Train loss', 'Test loss'],
title="Loss evolution for 2 Conv + 1 Dense layer config",
path="../outputs/q1/plots/train_test_loss_2_conv_1_dense.png",
axlabels=["Iterations", "Loss"])
plot_learning_curve([loss_callback_3.test_indices],
[loss_callback_3.test_acc],
colors=['c-'], labels=['Test Accuracy'],
title="Accuracy evolution for 2 conv + 1 dense config",
path="../outputs/q1/plots/test_acc_2_conv_1_dense.png",
axlabels=["Iterations", "Accuracy"])
ids = np.random.choice(X_test.shape[0], 20)
X_samples = X_train[ids]
pred_samples_1 = model1.predict(X_samples)
generate_image_outputs(X_samples, np.argmax(pred_samples_1, axis=1), path="../outputs/q1/predictions/baseline")
pred_samples_2 = model2.predict(X_samples)
generate_image_outputs(X_samples, np.argmax(pred_samples_2, axis=1), path="../outputs/q1/predictions/2_conv")
pred_samples_3 = model3.predict(X_samples)
generate_image_outputs(X_samples, np.argmax(pred_samples_3, axis=1),
path="../outputs/q1/predictions/2_conv_1_dense")
def question_2():
global input_shape, X_train, y_train_labels, y_train, X_test, y_test_labels, y_test
model3 = load_model('model3.h5')
model3.trainable = False
learning_rate = 0.01
validation_interval = 10
# Iterating over each of the 10 classes for generating adversarial examples
for _label in range(0, num_classes):
print("------------------------------------------------------------------------")
print("Adversarial examples for label " + str(_label))
print("------------------------------------------------------------------------")
# y_eval is a dummy matrix useful for evaluating categorical crossentropy loss
y_eval = to_categorical(np.full((batch_size, 1), _label, dtype=int), num_classes=num_classes)
# y_fool is the duplicate label meant to fool the network and generate adversarial examples
y_fool = to_categorical(np.full((y_train_labels.shape[0], 1), _label, dtype=int), num_classes=num_classes)
batch = get_iter_batch(X_test, y_fool, batch_size, num_iter)
# initializing a 28 x 28 matrix for noise
noise = np.zeros((1, 28, 28, 1))
# new functional model to add noise and predict output using existing trained model
input1 = Input(shape=(img_rows, img_cols, 1))
input2 = Input(shape=(img_rows, img_cols, 1))
sum_inp = keras.layers.add([input1, input2])
op = model3(sum_inp)
noise_model = Model(inputs=[input1, input2], outputs=op)
# calculating gradient
a_loss = K.categorical_crossentropy(noise_model.output, y_eval)
grad = K.gradients(a_loss, noise_model.input[1])[0]
grad = K.mean(normalize_tensor(grad), axis=0)
# custom keras backend function that takes in two inputs and yields noise output,
# loss and gradient
custom_iterate = K.function([input1, input2], [noise_model.output, a_loss, grad])
train_indices, train_loss, test_indices, test_loss, test_acc = [], [], [], [], []
ctr = 0
# Batch wise manual gradient descent for learning adversarial noise
for _batch in batch:
X_actual, y_actual = _batch
output, loss, grads = custom_iterate([X_actual, noise])
# Validating at specific intervals
if (ctr % validation_interval == 0):
noise_test = np.zeros(X_test.shape) + noise[0]
preds_test = noise_model.predict([X_test, noise_test])
_test_acc = float(np.where(np.argmax(preds_test, axis=1) == _label)[0].shape[0]) / float(
preds_test.shape[0])
_test_loss = np.mean(loss)
test_indices.append(ctr)
test_loss.append(_test_loss)
test_acc.append(_test_acc)
train_indices.append(ctr)
train_loss.append(np.mean(loss))
# Gradient update
noise = noise - learning_rate * np.array(grads)
line = ("Iteration " + str(ctr + 1).rjust(int(log10(num_iter) + 1))
+ "/" + str(num_iter)
+ " complete. Train Loss: %0.10f " % np.mean(loss))
print(line)
ctr = ctr + 1
noise_test = np.zeros(X_test.shape) + noise[0]
preds = noise_model.predict([X_test, noise_test])
print(
"Accuracy: " + str(float(np.where(np.argmax(preds, axis=1) == _label)[0].shape[0]) / float(preds.shape[0])))
# Visualizing each of the generated noises
fig = plt.figure()
ax = fig.add_subplot(111)
ax.imshow(noise.reshape(28, 28), interpolation='nearest', cmap="gray")
plt.savefig("../outputs/q2/visualizations/sample_" + str(_label) + ".png")
plt.close()
# Plotting loss and accuracy evolution
plot_learning_curve([train_indices, test_indices],
[train_loss, test_loss],
colors=['c-', 'm-'], labels=['Train loss', 'Test loss'],
title="Loss evolution for adversarial noise training",
path="../outputs/q2/plots/train_test_loss_adversarial_noise_" + str(_label) + ".png",
axlabels=["Iterations", "Loss"])
plot_learning_curve([test_indices],
[test_acc],
colors=['r-'], labels=['Test Accuracy'],
title="Accuracy evolution for adversarial noise training",
path="../outputs/q2/plots/test_acc_adversarial_noise_" + str(_label) + ".png",
axlabels=["Iterations", "Accuracy"])
# Predicting for a random set of 9 adversarial images
ids = np.random.choice(X_test.shape[0], 9)
X_samples = X_test[ids]
noise_sample = np.zeros(X_samples.shape) + noise[0]
pred_samples = noise_model.predict([X_samples, noise_sample])
actual_samples = model3.predict(X_samples)
generate_noisy_outputs(X_samples + noise_sample, np.argmax(actual_samples, axis=1),
np.argmax(pred_samples, axis=1), path="../outputs/q2/predictions/" + str(_label))
def question_3():
global input_shape, X_train, y_train_labels, y_train, X_test, y_test_labels, y_test
model = load_model('model3.h5')
model.trainable = False
# Custom model that inputs 28 x 28 matrices and outputs logits (without softmax)
visualize_model = Model(inputs=model.input, outputs=model.get_layer("logits").output)
for _label in range(0, num_classes):
print("------------------------------------------------------------------------")
print("Synthetic image visualization for label " + str(_label))
print("------------------------------------------------------------------------")
y_temp = [_label]
y_temp = to_categorical(y_temp, num_classes)
# Setting cost to be the respective output neurons
cost = visualize_model.output[:, _label]
# Gradient calculation for the cost
grad = K.mean(K.gradients(cost, visualize_model.input)[0], axis=0)
# Custom keras backend function that inputs the images and returns the cost and gradient
custom_iterate = K.function([model.input], [visualize_model.output[:, _label], grad])
# Initializing a gaussian distribution centred around 128
X_init = np.random.normal(loc=128., scale=50., size=(1, 28, 28, 1))
X_init /= 255.
costs = []
iter_indices = []
# Batch wise gradient ascent for learning X_init
for i in range(num_iter):
cost, grads = custom_iterate([X_init])
sigma = (i + 1) * 4 / (num_iter + 0.5)
step_size = 1.0 / np.std(grads)
costs.append(cost[0])
iter_indices.append(i)
# Smoothening using a Gaussian filter
grads = gaussian_filter(grads, sigma)
# Gradient update
X_init = (1 - 0.0001) * X_init + step_size * np.array(grads)
line = ("Iteration " + str(i + 1).rjust(int(log10(num_iter) + 1))
+ "/" + str(num_iter)
+ " complete. Cost: %0.10f " % cost[0])
print(line)
# Visualizing the input image
fig = plt.figure()
ax = fig.add_subplot(111)
ax.imshow(X_init.reshape(28, 28), interpolation='nearest', cmap="gray")
plt.savefig("../outputs/q3/visualizations/max_output_" + str(_label) + ".png")
plt.close()
plot_learning_curve([iter_indices],
[costs],
colors=['b-'], labels=['Cost'],
title="Cost evolution over optimization iterations",
path="../outputs/q3/plots/cost_output_" + str(_label) + ".png",
axlabels=["Iterations", "Cost"])
# Custom model that inputs 28 x 28 image matrices and outputs 2nd maxpooling layer
visualize_model = Model(inputs=model.input, outputs=model.get_layer("maxpooling2").output)
for _id in range(15):
print("------------------------------------------------------------------------")
print("Synthetic image visualization for central neuron of filter " + str(_id))
print("------------------------------------------------------------------------")
# Setting cost as the central neuron of maxpooling layer
# Since row size and column size (7, 7) is odd, we do row/2 and column/2
cost = visualize_model.output[:, visualize_model.output.get_shape()[1] / 2,
visualize_model.output.get_shape()[2] / 2, _id]
grad = K.mean(K.gradients(cost, visualize_model.input)[0], axis=0)
custom_iterate = K.function([model.input], [cost, grad])
X_init = np.random.normal(loc=128., scale=50., size=(1, 28, 28, 1))
X_init /= 255.
# Batch wise gradient ascent for learning X_init
for i in range(num_iter):
cost, grads = custom_iterate([X_init])
sigma = (i + 1) * 4 / (num_iter + 0.5)
step_size = 1.0 / np.std(grads)
grads = gaussian_filter(grads, sigma)
# Gradient update
X_init = (1 - 0.0001) * X_init + step_size * np.array(grads)
line = ("Iteration " + str(i + 1).rjust(int(log10(num_iter) + 1))
+ "/" + str(num_iter)
+ " complete. Cost: %0.10f " % cost[0])
print(line)
# Plotting X_init for each of the filter optimizations
fig = plt.figure()
ax = fig.add_subplot(111)
ax.imshow(X_init.reshape(28, 28), interpolation='nearest', cmap="gray")
plt.text(0.5, 0.05, 'Filter: ' + str(_id), fontsize=28,
horizontalalignment='center', verticalalignment='center',
transform=ax.transAxes, color='white')
plt.savefig("../outputs/q3/visualizations/max_filter_" + str(_id) + ".png")
plt.close()
if __name__ == "__main__":
load_data()
question_1()
question_2()
question_3()
| 47.487324 | 116 | 0.58103 |
acec4cce98007ad3bcdf582de17e825b46005ff1 | 2,423 | py | Python | data/train/python/acec4cce98007ad3bcdf582de17e825b46005ff1testparser.py | harshp8l/deep-learning-lang-detection | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | [
"MIT"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/acec4cce98007ad3bcdf582de17e825b46005ff1testparser.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 5 | 2018-03-29T11:50:46.000Z | 2021-04-26T13:33:18.000Z | data/train/python/acec4cce98007ad3bcdf582de17e825b46005ff1testparser.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | from ..parser import parse_cst, parse_star_ast, parse_nmrstar_ast
from ..unparse import maybeerror
from ..starast import Data, Save, Loop
import unittest as u
good = maybeerror.MaybeError.pure
bad = maybeerror.MaybeError.error
class TestParser(u.TestCase):
def testParseGood(self):
self.assertEqual(parse_star_ast("data_hi save_me # oop \n save_ "),
good(Data('hi', {'me': Save({}, [])})))
def testParseGoodComplex(self):
inp = """
data_start
save_st1
_a 1
_b 2
save_
save_st2
_c 3
loop_
_d _e
w x y z m n
stop_
save_
"""
ast = Data('start',
{'st1': Save({'a': '1', 'b': '2'}, []),
'st2': Save({'c': '3'}, [Loop(['d', 'e'], [['w', 'x'], ['y', 'z'], ['m', 'n']])])})
self.assertEqual(parse_star_ast(inp), good(ast))
class TestParserErrors(u.TestCase):
def test_cst_problem(self):
self.assertEqual(parse_nmrstar_ast("data_hi save_me # oop \n "),
bad({'phase': 'CST construction',
'message': [('data', (1,1)), ('save', (1,9)), ('save close', 'EOF')]}))
def test_star_ast_problem(self):
self.assertEqual(parse_nmrstar_ast("data_hi save_me _a 1 _a 2 save_ "),
bad({'phase': 'AST construction',
'message': {'message': 'duplicate key', 'nodetype': 'save',
'key': 'a', 'first': (1,17), 'second': (1,22)}}))
def test_nmrstar_ast_problem(self):
self.assertEqual(parse_nmrstar_ast("data_hi save_me _A.a 1 _A.Sf_category 2 save_"),
bad({'phase': 'NMRSTAR AST construction',
'message': {'message': 'missing key "Sf_framecode"', 'nodetype': 'save'}}))
def test_unconsumed_input(self):
self.assertEqual(parse_nmrstar_ast("data_hi _what?"),
bad({'phase': 'CST construction',
'message': [('unparsed tokens remaining', (1,9))]}))
def test_junk(self):
self.assertEqual(parse_nmrstar_ast("what is this junk? this isn't nmr-star"),
bad({'phase': 'CST construction',
'message': [('data block', (1,1))]}))
| 37.276923 | 105 | 0.504333 |
acec4d7608a70d39b5fad79900e53c8da0ab8160 | 7,250 | py | Python | src/providers/commoncrawl/MuseumVictoria.py | 9LKQ7ZLC82/cccatalog | d2709afeef8645429baa455f590f6999b76db48d | [
"MIT"
] | 1 | 2019-05-11T13:25:28.000Z | 2019-05-11T13:25:28.000Z | src/providers/commoncrawl/MuseumVictoria.py | 9LKQ7ZLC82/cccatalog | d2709afeef8645429baa455f590f6999b76db48d | [
"MIT"
] | null | null | null | src/providers/commoncrawl/MuseumVictoria.py | 9LKQ7ZLC82/cccatalog | d2709afeef8645429baa455f590f6999b76db48d | [
"MIT"
] | null | null | null | """
Content Provider: Museums Victoria - Collections of palaeontology, zoology, indigenous cultures etc.
ETL Process: Identify images and their respective meta data that are available under a
Creative Commons license.
Output: TSV file containing images of artworks and their respective meta-data.
"""
from Provider import *
logging.basicConfig(format='%(asctime)s - %(name)s: [%(levelname)s] - Museums Victoria =======> %(message)s', level=logging.INFO)
class MuseumVictoria(Provider):
def __init__(self, _name, _domain, _cc_index):
Provider.__init__(self, _name, _domain, _cc_index)
def filterData(self, _data, _condition=None):
#Images can be located in four main content paths: /species, /items, /articles, and /specimens.
allowed = list(map(lambda x: '{}{}'.format(self.domain, x), ['/species/', '/items/', '/specimens/', '/articles/']))
data = list(filter(lambda x: x.split('\t')[0].startswith(tuple(allowed)), _data))
self.data = data
return self.data
def getMetaData(self, _html, _url):
"""
Parameters
------------------
_html: string
The HTML page that was extracted from Common Crawls WARC file.
_url: string
The url for the webpage.
Returns
------------------
A tab separated string which contains the meta data that was extracted from the HTML.
"""
soup = BeautifulSoup(_html, 'html.parser')
otherMetaData = {}
license = None
version = None
imageURL = None
formatted = []
extracted = []
self.clearFields()
#validate license for the image
licenseInfo = soup.find('span', attrs={'class': 'licence'})
if licenseInfo and licenseInfo.findChild('a'):
ccURL = urlparse(licenseInfo.findChild('a').attrs['href'])
license, version = self.getLicense(ccURL.netloc, ccURL.path, _url)
if not license:
logging.warning('License not detected in url: {}'.format(_url))
return None
self.license = license
self.licenseVersion = version
#get the image
imgProperty = soup.find('meta', {'property': 'og:image'})
if imgProperty:
imageURL = self.validateContent('', imgProperty, 'content')
imgWidth = self.validateContent('', soup.find('meta', {'property': 'og:image:width'}), 'content')
imgHeight = self.validateContent('', soup.find('meta', {'property': 'og:image:height'}), 'content')
self.url = imageURL
self.thumbnail = [imageURL.replace('-medium', '-thumbnail') if '-medium.' in imageURL else ''][0]
self.width = imgWidth
self.height = imgHeight
else:
logging.warning('Image not detected in url: {}'.format(_url))
return None
self.title = self.validateContent('', soup.find('meta', {'property': 'og:title'}), 'content')
#owner/credits
creatorInfo = soup.find('div', {'class':'creators'})
if creatorInfo:
creator = creatorInfo.text.strip()
if 'Photographer' in creator:
self.creator = creator.replace('Photographer:', '').strip()
elif 'Artist' in creator:
self.creator = creator.replace('Artist:', '').strip()
foreignID = self.getForeignID(_url)
if foreignID:
self.foreignIdentifier = foreignID.strip()
else:
logging.warning('Identifier not detected in: {}'.format(_url))
return None
'''thumbnails = soup.find_all('div', {'class': 'thumbnail'})
if thumbnails:
thumbnails = ['{}{}'.format(self.domain, x.img['src']) for x in thumbnails]
allImages = [x.replace('-thumbnail', '-medium') for x in thumbnails]
otherMetaData['thumbnails'] = ','.join(thumbnails)
otherMetaData['additional_images'] = ','.join(allImages)'''
#summary
summary = soup.find('div', {'class': 'summary'})
if summary:
description = summary.findChild('p')
if description:
description = description.text.strip()
otherMetaData['description'] = description
#more information/details
moreInfo = soup.find('div', {'class': 'detail'})
if moreInfo:
details = moreInfo.findChildren('li')
for item in details:
lbl = item.find('h3').text.strip()
lbl = re.sub('(\s)', '_', lbl).lower()
val = ','.join(re.sub(r'\s+', ' ', x.text).strip() for x in item.find_all('p'))
otherMetaData[lbl] = val
self.provider = self.name
self.source = 'commoncrawl'
self.foreignLandingURL = _url
#tags
if otherMetaData:
if 'keywords' in otherMetaData:
otherMetaData['tags'] = otherMetaData['keywords']
del otherMetaData['keywords']
#if 'artist' in otherMetaData:
#self.creator = otherMetaData['artist'].split('-')[0].strip()
#del otherMetaData['artist']
self.metaData = otherMetaData
#get the additional images
thumbnails = soup.find_all('div', {'class': 'thumbnail'})
if thumbnails and len(thumbnails) > 1:
for item in thumbnails:
img = item.findChild('img')
self.url = ''
self.thumbnail = ''
self.foreignIdentifier = ''
if 'image_alt_text' in otherMetaData:
del otherMetaData['image_alt_text']
if 'src' in img.attrs:
self.thumbnail = '{}{}'.format(self.domain.strip('%'), self.validateContent('', img, 'src'))
self.url = self.thumbnail.replace('-thumbnail', '-medium')
self.foreignIdentifier = self.url
if 'alt' in img.attrs:
otherMetaData['image_alt_text'] = self.validateContent('', img, 'alt')
else:
logging.warning('Image not detected in url: {}'.format(_url))
continue
self.metaData = otherMetaData
extracted.extend(self.formatOutput)
return extracted
else:
formatted = list(self.formatOutput)
return formatted
| 37.179487 | 129 | 0.503034 |
acec4f00ceb529edd80247cea68fb08fd556d33f | 1,197 | py | Python | examples/statistics/boxplot_vs_violin_demo.py | argriffing/matplotlib | 5555f5463fb5f995a59f7651c0034a5d6a4c7e84 | [
"MIT",
"BSD-3-Clause"
] | 1 | 2019-04-15T09:40:53.000Z | 2019-04-15T09:40:53.000Z | examples/statistics/boxplot_vs_violin_demo.py | argriffing/matplotlib | 5555f5463fb5f995a59f7651c0034a5d6a4c7e84 | [
"MIT",
"BSD-3-Clause"
] | 2 | 2021-05-10T17:57:41.000Z | 2021-07-26T16:23:09.000Z | examples/statistics/boxplot_vs_violin_demo.py | kdavies4/matplotlib | 330aefbd031ee227213afe655c5158320015d45b | [
"MIT",
"BSD-3-Clause"
] | 1 | 2015-12-21T07:24:54.000Z | 2015-12-21T07:24:54.000Z | # Box plot - violin plot comparison
#
# Note that although violin plots are closely related to Tukey's (1977) box plots,
# they add useful information such as the distribution of the sample data (density trace).
#
# By default, box plots show data points outside 1.5 x the inter-quartile range as outliers
# above or below the whiskers wheras violin plots show the whole range of the data.
#
# Violin plots require matplotlib >= 1.4.
import matplotlib.pyplot as plt
import numpy as np
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(12, 5))
# generate some random test data
all_data = [np.random.normal(0, std, 100) for std in range(6, 10)]
# plot violin plot
axes[0].violinplot(all_data,
showmeans=False,
showmedians=True)
axes[0].set_title('violin plot')
# plot box plot
axes[1].boxplot(all_data)
axes[1].set_title('box plot')
# adding horizontal grid lines
for ax in axes:
ax.yaxis.grid(True)
ax.set_xticks([y+1 for y in range(len(all_data))])
ax.set_xlabel('xlabel')
ax.set_ylabel('ylabel')
# add x-tick labels
plt.setp(axes, xticks=[y+1 for y in range(len(all_data))],
xticklabels=['x1', 'x2', 'x3', 'x4'])
plt.show()
| 29.925 | 91 | 0.6934 |
acec4f0cedd230f705fb36ef5c87d932c8634dc8 | 350 | py | Python | atomic_reactor/tasks/clone.py | qixiang/atomic-reactor | 050325f6be43f6b9399bf5472b87190ada8305bd | [
"BSD-3-Clause"
] | 113 | 2015-07-23T21:37:07.000Z | 2019-05-28T18:58:26.000Z | atomic_reactor/tasks/clone.py | qixiang/atomic-reactor | 050325f6be43f6b9399bf5472b87190ada8305bd | [
"BSD-3-Clause"
] | 921 | 2015-07-13T14:25:48.000Z | 2019-05-31T14:57:39.000Z | atomic_reactor/tasks/clone.py | qixiang/atomic-reactor | 050325f6be43f6b9399bf5472b87190ada8305bd | [
"BSD-3-Clause"
] | 42 | 2015-07-17T12:48:25.000Z | 2019-03-29T07:48:57.000Z | """
Copyright (c) 2021 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from atomic_reactor.tasks import common
class CloneTask(common.Task[common.TaskParams]):
"""Clone task."""
def execute(self):
self._params.source.get()
| 20.588235 | 61 | 0.72 |
acec4f268d58db2e7b25aff1a2e07605f700301d | 407 | py | Python | Python/Learn Python The Hard Way/ex16.py | Vayne-Lover/Effective | 05f0a08bec8eb112fdb4e7a489d0e33bc81522ff | [
"MIT"
] | null | null | null | Python/Learn Python The Hard Way/ex16.py | Vayne-Lover/Effective | 05f0a08bec8eb112fdb4e7a489d0e33bc81522ff | [
"MIT"
] | null | null | null | Python/Learn Python The Hard Way/ex16.py | Vayne-Lover/Effective | 05f0a08bec8eb112fdb4e7a489d0e33bc81522ff | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from sys import argv
script,file=argv
file1=open(file,"w")
file1.truncate()
promot="Please input the words."
line1=input(promot)
line2=input(promot)
line3=input(promot)
file1.write(line1)
file1.write("\n")
file1.write(line2)
file1.write("\n")
file1.write(line3)
file1.write("\n")
print("Finished!")
file1.close()
file2=open(file)
print(file2.read())
file2.close()
| 10.175 | 32 | 0.685504 |
acec502ed63a4c7a0dc332974bfe941850e01e47 | 187 | py | Python | python/stepfunctions/app.py | marclyo/aws-cdk-examples | f041f07ebd4c94897e16d37ff813a38eb32645a1 | [
"Apache-2.0"
] | 2,941 | 2019-02-08T15:29:36.000Z | 2022-03-31T23:57:42.000Z | python/stepfunctions/app.py | marclyo/aws-cdk-examples | f041f07ebd4c94897e16d37ff813a38eb32645a1 | [
"Apache-2.0"
] | 558 | 2019-02-14T23:32:02.000Z | 2022-03-30T00:35:11.000Z | python/stepfunctions/app.py | marclyo/aws-cdk-examples | f041f07ebd4c94897e16d37ff813a38eb32645a1 | [
"Apache-2.0"
] | 1,409 | 2019-02-12T19:13:04.000Z | 2022-03-31T18:46:21.000Z | #!/usr/bin/env python3
from aws_cdk import core
from stepfunctions.stepfunctions_stack import JobPollerStack
app = core.App()
JobPollerStack(app, "aws-stepfunctions-integ")
app.synth()
| 20.777778 | 60 | 0.796791 |
acec503ac7df22f934cc0f082d9499be86ffa6c7 | 363 | py | Python | tests/test_cli.py | cassiobotaro/flask_joke | b66b154c12e880819e88c56b4abf772461666431 | [
"MIT"
] | 1 | 2018-12-05T23:39:13.000Z | 2018-12-05T23:39:13.000Z | tests/test_cli.py | cassiobotaro/flask_joke | b66b154c12e880819e88c56b4abf772461666431 | [
"MIT"
] | null | null | null | tests/test_cli.py | cassiobotaro/flask_joke | b66b154c12e880819e88c56b4abf772461666431 | [
"MIT"
] | null | null | null | from unittest.mock import patch
from flask_joke.cli import main
@patch('builtins.print')
def test_cli(mocked_print):
main()
mocked_print.assert_called_once()
s = '<p>Wenn ist das Nunstück git und Slotermeyer? Ja! ... '\
'<strong>Beiherhund</strong> '\
'das Oder die Flipperwaldt gersput.</p>'
mocked_print.assert_called_with(s)
| 25.928571 | 65 | 0.69146 |
acec504530ee1ebab46df1128106d32d14f7e0fa | 602 | py | Python | scripts/power_point.py | jorgediazmontoya/api-convert | e4297725b1acf49b821a937aa625722b9a5ca3bb | [
"MIT"
] | null | null | null | scripts/power_point.py | jorgediazmontoya/api-convert | e4297725b1acf49b821a937aa625722b9a5ca3bb | [
"MIT"
] | null | null | null | scripts/power_point.py | jorgediazmontoya/api-convert | e4297725b1acf49b821a937aa625722b9a5ca3bb | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
from win32com import client
import argparse
pttxFormatPDF = 32
parser = argparse.ArgumentParser()
parser.add_argument("-pf", "--pathfile", help="Ruta del archivo a procesar")
args = parser.parse_args()
if args.pathfile:
input_folder_path = sys.argv[2]
in_file = os.path.abspath(input_folder_path)
out_file = os.path.splitext(in_file)[0]
powerpoint = client.Dispatch("PowerPoint.Application")
pdf = powerpoint.Presentations.Open(in_file, WithWindow=False)
pdf.SaveAs(out_file, pttxFormatPDF)
pdf.Close()
powerpoint.Quit()
| 24.08 | 76 | 0.73588 |
acec51ad795354729013fe584686e7f4b1d8a98f | 6,060 | py | Python | tools/bin/stretch_robot_system_check.py | hello-robot/stretch_body | 9d1e77cedf125ca3ccc3cf57a03a42ad3d3d5a2c | [
"RSA-MD"
] | 19 | 2020-07-14T07:00:45.000Z | 2022-01-26T18:28:59.000Z | tools/bin/stretch_robot_system_check.py | hello-robot/stretch_body | 9d1e77cedf125ca3ccc3cf57a03a42ad3d3d5a2c | [
"RSA-MD"
] | 45 | 2020-05-20T03:05:56.000Z | 2022-01-06T22:35:39.000Z | tools/bin/stretch_robot_system_check.py | hello-robot/stretch_body | 9d1e77cedf125ca3ccc3cf57a03a42ad3d3d5a2c | [
"RSA-MD"
] | 13 | 2020-07-14T23:13:42.000Z | 2022-03-05T02:00:57.000Z | #!/usr/bin/env python
from __future__ import print_function
import time
import stretch_body.robot as robot
import os, fnmatch
import subprocess
from colorama import Fore, Back, Style
import argparse
import stretch_body.hello_utils as hu
from stretch_body.dynamixel_XL430 import *
hu.print_stretch_re_use()
parser=argparse.ArgumentParser(description='Check that all robot hardware is present and reporting sane values')
args=parser.parse_args()
# #####################################################
def val_in_range(val_name, val,vmin, vmax):
p=val <=vmax and val>=vmin
if p:
print(Fore.GREEN +'[Pass] ' + val_name + ' = ' + str(val))
else:
print(Fore.RED +'[Fail] ' + val_name + ' = ' +str(val)+ ' out of range ' +str(vmin) + ' to ' + str(vmax))
def val_is_not(val_name, val,vnot):
if val is not vnot:
print(Fore.GREEN +'[Pass] ' + val_name + ' = ' + str(val))
else:
print(Fore.RED +'[Fail] ' + val_name + ' = ' +str(val))
#Turn off logging so get a clean output
import logging
#logging.disable(logging.CRITICAL)
r=robot.Robot()
r.startup()
# #####################################################
print(Style.RESET_ALL)
print('---- Checking Devices ----')
robot_devices={'hello-wacc':0, 'hello-motor-left-wheel':0,'hello-pimu':0, 'hello-lrf':0,'hello-dynamixel-head':0,'hello-dynamixel-wrist':0,'hello-motor-arm':0,'hello-motor-right-wheel':0,
'hello-motor-lift':0,'hello-respeaker':0}
listOfFiles = os.listdir('/dev')
pattern = "hello*"
for entry in listOfFiles:
if fnmatch.fnmatch(entry, pattern):
robot_devices[entry]=1
for k in robot_devices.keys():
if robot_devices[k]:
print(Fore.GREEN +'[Pass] : '+k)
else:
print(Fore.RED +'[Fail] : '+ k)
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-pimu']:
print('---- Checking Pimu ----')
p=r.pimu
val_in_range('Voltage',p.status['voltage'], vmin=p.config['low_voltage_alert'], vmax=14.5)
val_in_range('Current',p.status['current'], vmin=0.5, vmax=p.config['high_current_alert'])
val_in_range('Temperature',p.status['temp'], vmin=10, vmax=40)
val_in_range('Cliff-0',p.status['cliff_range'][0], vmin=p.config['cliff_thresh'], vmax=20)
val_in_range('Cliff-1',p.status['cliff_range'][1], vmin=p.config['cliff_thresh'], vmax=20)
val_in_range('Cliff-2',p.status['cliff_range'][2], vmin=p.config['cliff_thresh'], vmax=20)
val_in_range('Cliff-3',p.status['cliff_range'][3], vmin=p.config['cliff_thresh'], vmax=20)
val_in_range('IMU AZ',p.status['imu']['az'], vmin=-10.1, vmax=-9.5)
val_in_range('IMU Pitch', hu.rad_to_deg(p.status['imu']['pitch']), vmin=-12, vmax=12)
val_in_range('IMU Roll', hu.rad_to_deg(p.status['imu']['roll']), vmin=-12, vmax=12)
print(Style.RESET_ALL)
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-dynamixel-wrist']:
print('---- Checking EndOfArm ----')
w = r.end_of_arm
try:
for mk in w.motors.keys():
if w.motors[mk].do_ping():
print(Fore.GREEN +'[Pass] Ping of: '+mk)
if w.motors[mk].params['req_calibration']:
if w.motors[mk].motor.is_calibrated():
print(Fore.GREEN + '[Pass] Calibrated: ' + mk)
else:
print(Fore.RED + '[Fail] Not Calibrated: ' + mk)
else:
print(Fore.RED + '[Fail] Ping of: ' + mk)
print(Style.RESET_ALL)
except(IOError, DynamixelCommError):
print(Fore.RED + '[Fail] Startup of EndOfArm')
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-dynamixel-head']:
print('---- Checking Head ----')
h = r.head
try:
for mk in h.motors.keys():
if h.motors[mk].do_ping():
print(Fore.GREEN +'[Pass] Ping of: '+mk)
else:
print(Fore.RED + '[Fail] Ping of: ' + mk)
print(Style.RESET_ALL)
except(IOError, DynamixelCommError):
print(Fore.RED + '[Fail] Startup of EndOfArm')
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-wacc']:
print('---- Checking Wacc ----')
w=r.wacc
val_in_range('AX',w.status['ax'], vmin=8.0, vmax=11.0)
print(Style.RESET_ALL)
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-motor-left-wheel']:
print('---- Checking hello-motor-left-wheel ----')
m = r.base.left_wheel
val_is_not('Position',m.status['pos'], vnot=0)
print(Style.RESET_ALL)
m.stop()
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-motor-right-wheel']:
print('---- Checking hello-motor-right-wheel ----')
m = r.base.right_wheel
val_is_not('Position',m.status['pos'], vnot=0)
print(Style.RESET_ALL)
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-motor-arm']:
print('---- Checking hello-motor-arm ----')
m = r.arm.motor
val_is_not('Position',m.status['pos'], vnot=0)
val_is_not('Position Calibrated', m.status['pos_calibrated'], vnot=False)
print(Style.RESET_ALL)
# #####################################################
print(Style.RESET_ALL)
if robot_devices['hello-motor-lift']:
print('---- Checking hello-motor-lift ----')
m = r.lift.motor
val_is_not('Position',m.status['pos'], vnot=0)
val_is_not('Position Calibrated', m.status['pos_calibrated'], vnot=False)
print(Style.RESET_ALL)
# #####################################################
print(Style.RESET_ALL)
print ('---- Checking for Intel D435i ----')
cmd = "lsusb -d 8086:0b3a"
returned_value = subprocess.call(cmd,shell=True) # returns the exit code in unix
if returned_value==0:
print(Fore.GREEN + '[Pass] : Device found ')
else:
print(Fore.RED + '[Fail] : No device found')
r.stop() | 38.846154 | 187 | 0.572277 |
acec52810bf3992325946e4f6b1c494bb168d27c | 848 | py | Python | abc/abc151/abc151d-2.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | 1 | 2019-08-21T00:49:34.000Z | 2019-08-21T00:49:34.000Z | abc/abc151/abc151d-2.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | abc/abc151/abc151d-2.py | c-yan/atcoder | 940e49d576e6a2d734288fadaf368e486480a948 | [
"MIT"
] | null | null | null | # ワーシャルフロイド
from scipy.sparse.csgraph import csgraph_from_dense, floyd_warshall
H, W = map(int, input().split())
S = [input() for _ in range(H)]
g = [[0] * (H * W) for _ in range(H * W)]
for y in range(H):
for x in range(W):
if S[y][x] == '#':
continue
if y - 1 >= 0 and S[y - 1][x] != '#':
g[y * W + x][(y - 1) * W + x] = 1
if y + 1 < H and S[y + 1][x] != '#':
g[y * W + x][(y + 1) * W + x] = 1
if x - 1 >= 0 and S[y][x - 1] != '#':
g[y * W + x][y * W + x - 1] = 1
if x + 1 < W and S[y][x + 1] != '#':
g[y * W + x][y * W + x + 1] = 1
g = floyd_warshall(csgraph_from_dense(g))
result = 0
for i in range(H * W):
for j in range(H * W):
if g[i][j] == 0:
continue
result = max(result, g[i][j])
print(int(result))
| 29.241379 | 67 | 0.419811 |
acec534571154049b24c7b53fb1fb186b842e25b | 626 | py | Python | fperms/__init__.py | Formulka/django-fperms | 88b8fa3dd87075a56d8bfeb2b9993c578c22694e | [
"MIT"
] | 3 | 2019-03-29T09:50:45.000Z | 2021-05-01T21:11:33.000Z | fperms/__init__.py | Formulka/django-perms | 88b8fa3dd87075a56d8bfeb2b9993c578c22694e | [
"MIT"
] | 2 | 2018-04-12T00:54:05.000Z | 2018-04-12T16:32:42.000Z | fperms/__init__.py | Formulka/django-perms | 88b8fa3dd87075a56d8bfeb2b9993c578c22694e | [
"MIT"
] | 1 | 2018-07-13T14:42:07.000Z | 2018-07-13T14:42:07.000Z | __version__ = '0.4.2'
from django.apps import apps as django_apps
from fperms.conf import settings
from fperms.exceptions import ImproperlyConfigured
def get_perm_model():
"""
Returns the Perm model that is active in this project.
"""
try:
return django_apps.get_model(settings.PERM_MODEL, require_ready=False)
except ValueError:
raise ImproperlyConfigured("PERM_MODEL must be of the form 'app_label.model_name'")
except LookupError:
raise ImproperlyConfigured(
"PERM_MODEL refers to model '{}' that has not been installed".format(settings.PERM_MODEL)
)
| 29.809524 | 101 | 0.715655 |
acec535a0cee117536f037029068f00907d14331 | 5,626 | py | Python | pwndbg/arguments.py | jmc1283/pwndbg | df165f0788948b9b12e6a80fa91d647c13e6eb0a | [
"MIT"
] | 21 | 2018-01-01T13:28:56.000Z | 2019-11-06T15:30:56.000Z | pwndbg/arguments.py | jmc1283/pwndbg | df165f0788948b9b12e6a80fa91d647c13e6eb0a | [
"MIT"
] | null | null | null | pwndbg/arguments.py | jmc1283/pwndbg | df165f0788948b9b12e6a80fa91d647c13e6eb0a | [
"MIT"
] | 5 | 2018-01-02T01:30:41.000Z | 2020-01-04T05:55:57.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Allows describing functions, specifically enumerating arguments which
may be passed in a combination of registers and stack values.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import gdb
from capstone import CS_GRP_CALL
from capstone import CS_GRP_INT
import pwndbg.abi
import pwndbg.arch
import pwndbg.chain
import pwndbg.color.nearpc as N
import pwndbg.constants
import pwndbg.disasm
import pwndbg.funcparser
import pwndbg.functions
import pwndbg.ida
import pwndbg.memory
import pwndbg.regs
import pwndbg.symbol
import pwndbg.typeinfo
ida_replacements = {
'__int64': 'signed long long int',
'__int32': 'signed int',
'__int16': 'signed short',
'__int8': 'signed char',
'__uint64': 'unsigned long long int',
'__uint32': 'unsigned int',
'__uint16': 'unsigned short',
'__uint8': 'unsigned char',
'_BOOL_1': 'unsigned char',
'_BOOL_2': 'unsigned short',
'_BOOL_4': 'unsigned int',
'_BYTE': 'unsigned char',
'_WORD': 'unsigned short',
'_DWORD': 'unsigned int',
'_QWORD': 'unsigned long long',
'__pure': '',
'__hidden': '',
'__return_ptr': '',
'__struct_ptr': '',
'__array_ptr': '',
'__fastcall': '',
'__cdecl': '',
'__thiscall': '',
'__userpurge': '',
}
def get_syscall_name(instruction):
if CS_GRP_INT not in instruction.groups:
return None
try:
abi = pwndbg.abi.ABI.syscall()
syscall = getattr(pwndbg.regs, abi.syscall_register)
name = pwndbg.constants.syscall(syscall)
return 'SYS_' + name
except:
return None
def get(instruction):
"""
Returns an array containing the arguments to the current function,
if $pc is a 'call' or 'bl' type instruction.
Otherwise, returns None.
"""
n_args_default = 4
if instruction.address != pwndbg.regs.pc:
return []
try:
abi = pwndbg.abi.ABI.default()
except KeyError:
return []
if CS_GRP_CALL in instruction.groups:
# Not sure of any OS which allows multiple operands on
# a call instruction.
assert len(instruction.operands) == 1
target = instruction.operands[0].int
if not target:
return []
name = pwndbg.symbol.get(target)
if not name:
return []
elif CS_GRP_INT in instruction.groups:
# Get the syscall number and name
abi = pwndbg.abi.ABI.syscall()
target = None
syscall = getattr(pwndbg.regs, abi.syscall_register)
name = pwndbg.constants.syscall(syscall)
else:
return []
result = []
name = name or ''
sym = gdb.lookup_symbol(name)
name = name.replace('isoc99_', '') # __isoc99_sscanf
name = name.replace('@plt', '') # getpwiod@plt
# If we have particular `XXX_chk` function in our database, we use it.
# Otherwise, we show args for its unchecked version.
# We also lstrip `_` in here, as e.g. `__printf_chk` needs the underscores.
if name not in pwndbg.functions.functions:
name = name.replace('_chk', '')
name = name.strip().lstrip('_') # _malloc
func = pwndbg.functions.functions.get(name, None)
# Try to extract the data from GDB.
# Note that this is currently broken, pending acceptance of
# my patch: https://sourceware.org/ml/gdb-patches/2015-06/msg00268.html
if sym and sym[0]:
try:
n_args_default = len(sym[0].type.fields())
except TypeError:
pass
# Try to grab the data out of IDA
if not func and target:
typename = pwndbg.ida.GetType(target)
if typename:
typename += ';'
# GetType() does not include the name.
typename = typename.replace('(', ' function_name(', 1)
for k, v in ida_replacements.items():
typename = typename.replace(k, v)
func = pwndbg.funcparser.ExtractFuncDeclFromSource(typename + ';')
if func:
args = func.args
else:
args = [pwndbg.functions.Argument('int', 0, argname(i, abi)) for i in range(n_args_default)]
for i, arg in enumerate(args):
result.append((arg, argument(i, abi)))
return result
def argname(n, abi=None):
abi = abi or pwndbg.abi.ABI.default()
regs = abi.register_arguments
if n < len(regs):
return regs[n]
return 'arg[%i]' % n
def argument(n, abi=None):
"""
Returns the nth argument, as if $pc were a 'call' or 'bl' type
instruction.
Works only for ABIs that use registers for arguments.
"""
abi = abi or pwndbg.abi.ABI.default()
regs = abi.register_arguments
if n < len(regs):
return getattr(pwndbg.regs, regs[n])
n -= len(regs)
sp = pwndbg.regs.sp + (n * pwndbg.arch.ptrsize)
return int(pwndbg.memory.poi(pwndbg.typeinfo.ppvoid, sp))
def arguments(abi=None):
"""
Yields (arg_name, arg_value) tuples for arguments from a given ABI.
Works only for ABIs that use registers for arguments.
"""
abi = abi or pwndbg.abi.ABI.default()
regs = abi.register_arguments
for i in range(len(regs)):
yield argname(i, abi), argument(i, abi)
def format_args(instruction):
result = []
for arg, value in get(instruction):
code = arg.type != 'char'
pretty = pwndbg.chain.format(value, code=code)
result.append('%-10s %s' % (N.argument(arg.name) + ':', pretty))
return result
| 26.663507 | 100 | 0.627444 |
acec54024974cdc459f0ad3c439e5d2bade20889 | 1,104 | py | Python | languageserver/transport/stdio.py | cybojenix/languageserver-python | 446c882864e18bed2b0f4f8c0524d9c3fec4b5f9 | [
"MIT"
] | null | null | null | languageserver/transport/stdio.py | cybojenix/languageserver-python | 446c882864e18bed2b0f4f8c0524d9c3fec4b5f9 | [
"MIT"
] | null | null | null | languageserver/transport/stdio.py | cybojenix/languageserver-python | 446c882864e18bed2b0f4f8c0524d9c3fec4b5f9 | [
"MIT"
] | null | null | null | import asyncio
import sys
from typing import AsyncGenerator
class PipeTransport:
reader: asyncio.StreamReader
writer: asyncio.StreamWriter
def __init__(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
) -> None:
self.reader = reader
self.writer = writer
@classmethod
async def open(cls) -> "PipeTransport":
loop = asyncio.get_event_loop()
reader = asyncio.StreamReader()
reader_protocol = asyncio.StreamReaderProtocol(reader)
reader_transport, _ = await loop.connect_read_pipe(
lambda: reader_protocol, sys.stdin
)
write_transport, write_protocol = await loop.connect_write_pipe(
asyncio.Protocol, sys.stdout
)
writer = asyncio.StreamWriter(write_transport, write_protocol, None, loop)
return cls(reader, writer)
async def listen(self) -> AsyncGenerator[bytes, None]:
while True:
data = await self.reader.read()
yield data
def write(self, data: bytes) -> None:
self.writer.write(data)
| 26.926829 | 82 | 0.651268 |
acec546c23f374f90c75c102574040537d940572 | 1,324 | py | Python | ooobuild/dyn/util/x_job_manager.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/util/x_job_manager.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | ooobuild/dyn/util/x_job_manager.py | Amourspirit/ooo_uno_tmpl | 64e0c86fd68f24794acc22d63d8d32ae05dd12b8 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Interface Class
# this is a auto generated file generated by Cheetah
# Libre Office Version: 7.3
# Namespace: com.sun.star.util
from typing import TYPE_CHECKING
from ooo.oenv.env_const import UNO_ENVIRONMENT, UNO_RUNTIME
_DYNAMIC = False
if (not TYPE_CHECKING) and UNO_RUNTIME and UNO_ENVIRONMENT:
_DYNAMIC = True
if not TYPE_CHECKING and _DYNAMIC:
from com.sun.star.util import XJobManager as XJobManager
setattr(XJobManager, '__ooo_ns__', 'com.sun.star.util')
setattr(XJobManager, '__ooo_full_ns__', 'com.sun.star.util.XJobManager')
setattr(XJobManager, '__ooo_type_name__', 'interface')
else:
from ...lo.util.x_job_manager import XJobManager as XJobManager
__all__ = ['XJobManager']
| 35.783784 | 76 | 0.762085 |
acec54d28e24f188df5846e3fd442b88dcc800ae | 6,212 | py | Python | pdm/core.py | leibowitz/pdm | e6827a0bb5278c247fc4068ca373c9d9863ac98d | [
"MIT"
] | null | null | null | pdm/core.py | leibowitz/pdm | e6827a0bb5278c247fc4068ca373c9d9863ac98d | [
"MIT"
] | null | null | null | pdm/core.py | leibowitz/pdm | e6827a0bb5278c247fc4068ca373c9d9863ac98d | [
"MIT"
] | null | null | null | from __future__ import annotations
import argparse
import importlib
import os
import pkgutil
import sys
from typing import Any, List, Optional, Type, cast
import click
from pip._vendor import pkg_resources
from resolvelib import Resolver
from pdm import termui
from pdm.cli.actions import migrate_pyproject, print_pep582_command
from pdm.cli.commands.base import BaseCommand
from pdm.cli.options import ignore_python_option, pep582_option, verbose_option
from pdm.cli.utils import PdmFormatter, PdmParser
from pdm.exceptions import PdmUsageError
from pdm.installers import Synchronizer
from pdm.models.repositories import PyPIRepository
from pdm.project import Project
from pdm.project.config import Config, ConfigItem
COMMANDS_MODULE_PATH: str = importlib.import_module(
"pdm.cli.commands"
).__path__ # type: ignore
class Core:
"""A high level object that manages all classes and configurations"""
def __init__(self) -> None:
if sys.version_info >= (3, 8):
import importlib.metadata as importlib_metadata
else:
import importlib_metadata
self.version = importlib_metadata.version(__name__.split(".")[0])
self.project_class = Project
self.repository_class = PyPIRepository
self.resolver_class = Resolver
self.synchronizer_class = Synchronizer
self.ui = termui.UI()
self.parser: Optional[PdmParser] = None
self.subparsers: Optional[argparse._SubParsersAction] = None
def init_parser(self) -> None:
self.parser = PdmParser(
prog="pdm",
description="PDM - Python Development Master",
formatter_class=PdmFormatter,
)
self.parser.is_root = True # type: ignore
self.parser.add_argument(
"-V",
"--version",
action="version",
version="{}, version {}".format(
click.style("pdm", bold=True), self.version
),
help="show the version and exit",
)
verbose_option.add_to_parser(self.parser)
ignore_python_option.add_to_parser(self.parser)
pep582_option.add_to_parser(self.parser)
self.subparsers = self.parser.add_subparsers()
for _, name, _ in pkgutil.iter_modules(COMMANDS_MODULE_PATH):
module = importlib.import_module(f"pdm.cli.commands.{name}", __name__)
try:
klass = module.Command # type: ignore
except AttributeError:
continue
self.register_command(klass, klass.name or name)
def __call__(self, *args: Any, **kwargs: Any) -> None:
return self.main(*args, **kwargs)
def ensure_project(
self, options: argparse.Namespace, obj: Optional[Project]
) -> None:
if obj is not None:
options.project = obj
if getattr(options, "project", None) is None:
global_project = getattr(options, "global_project", None)
default_root = (
None
if global_project or getattr(options, "search_parent", True)
else "."
)
project = self.create_project(
getattr(options, "project_path", None) or default_root, # type: ignore
is_global=global_project,
)
options.project = project
migrate_pyproject(options.project)
def create_project(
self, root_path: Optional[os.PathLike] = None, is_global: bool = False
) -> Project:
return self.project_class(self, root_path, is_global)
def main(
self,
args: List[str] = None,
prog_name: str = None,
obj: Optional[Project] = None,
**extra: Any,
) -> None:
"""The main entry function"""
from pdm.models.pip_shims import global_tempdir_manager
self.init_parser()
self.load_plugins()
assert self.parser
assert self.subparsers
options = self.parser.parse_args(args or None)
self.ui.set_verbosity(options.verbose)
if options.ignore_python:
os.environ["PDM_IGNORE_SAVED_PYTHON"] = "1"
if options.pep582:
print_pep582_command(self.ui, options.pep582)
sys.exit(0)
self.ensure_project(options, obj)
try:
f = options.handler
except AttributeError:
self.parser.print_help()
sys.exit(1)
else:
try:
with global_tempdir_manager():
f(options.project, options)
except Exception:
etype, err, traceback = sys.exc_info()
should_show_tb = not isinstance(err, PdmUsageError)
if self.ui.verbosity > termui.NORMAL and should_show_tb:
raise cast(Exception, err).with_traceback(traceback)
self.ui.echo(
f"{termui.red('[' + etype.__name__ + ']')}: {err}", # type: ignore
err=True,
)
if should_show_tb:
self.ui.echo("Add '-v' to see the detailed traceback", fg="yellow")
sys.exit(1)
def register_command(
self, command: Type[BaseCommand], name: Optional[str] = None
) -> None:
"""Register a subcommand to the subparsers,
with an optional name of the subcommand.
"""
assert self.subparsers
command.register_to(self.subparsers, name)
@staticmethod
def add_config(name: str, config_item: ConfigItem) -> None:
"""Add a config item to the configuration class"""
Config.add_config(name, config_item)
def load_plugins(self) -> None:
"""Import and load plugins under `pdm.plugin` namespace
A plugin is a callable that accepts the core object as the only argument.
:Example:
def my_plugin(core: pdm.core.Core) -> None:
...
"""
for plugin in pkg_resources.iter_entry_points("pdm"): # type: ignore
plugin.load()(self)
def main(args: Optional[List[str]] = None) -> None:
"""The CLI entry function"""
return Core().main(args)
| 33.578378 | 87 | 0.611397 |
acec54f3c7879388482c9d762a11da0ced7a8ca7 | 2,034 | py | Python | applaud/endpoints/beta_app_clip_invocation_localizations.py | codinn/applaud | ed168ca67465b5c0acf4ab4f4e285a2ab348c96d | [
"MIT"
] | 3 | 2022-01-22T15:34:13.000Z | 2022-03-22T06:46:48.000Z | applaud/endpoints/beta_app_clip_invocation_localizations.py | codinn/applaud | ed168ca67465b5c0acf4ab4f4e285a2ab348c96d | [
"MIT"
] | 1 | 2022-03-13T17:13:01.000Z | 2022-03-16T05:04:51.000Z | applaud/endpoints/beta_app_clip_invocation_localizations.py | codinn/applaud | ed168ca67465b5c0acf4ab4f4e285a2ab348c96d | [
"MIT"
] | 1 | 2022-03-16T02:17:30.000Z | 2022-03-16T02:17:30.000Z | from __future__ import annotations
from .base import Endpoint, IDEndpoint, SortOrder, endpoint
from ..fields import *
from typing import Union
from ..schemas.models import *
from ..schemas.responses import *
from ..schemas.requests import *
from ..schemas.enums import *
class BetaAppClipInvocationLocalizationsEndpoint(Endpoint):
path = '/v1/betaAppClipInvocationLocalizations'
def create(self, request: BetaAppClipInvocationLocalizationCreateRequest) -> BetaAppClipInvocationLocalizationResponse:
'''Create the resource.
:param request: BetaAppClipInvocationLocalization representation
:type request: BetaAppClipInvocationLocalizationCreateRequest
:returns: Single BetaAppClipInvocationLocalization
:rtype: BetaAppClipInvocationLocalizationResponse
:raises: :py:class:`applaud.schemas.responses.ErrorResponse`: if a request or a HTTP error occurred.
'''
json = super()._perform_post(request)
return BetaAppClipInvocationLocalizationResponse.parse_obj(json)
class BetaAppClipInvocationLocalizationEndpoint(IDEndpoint):
path = '/v1/betaAppClipInvocationLocalizations/{id}'
def update(self, request: BetaAppClipInvocationLocalizationUpdateRequest) -> BetaAppClipInvocationLocalizationResponse:
'''Modify the resource.
:param request: BetaAppClipInvocationLocalization representation
:type request: BetaAppClipInvocationLocalizationUpdateRequest
:returns: Single BetaAppClipInvocationLocalization
:rtype: BetaAppClipInvocationLocalizationResponse
:raises: :py:class:`applaud.schemas.responses.ErrorResponse`: if a request or a HTTP error occurred.
'''
json = super()._perform_patch(request)
return BetaAppClipInvocationLocalizationResponse.parse_obj(json)
def delete(self):
'''Delete the resource.
:raises: :py:class:`applaud.schemas.responses.ErrorResponse`: if a request or a HTTP error occurred.
'''
super()._perform_delete()
| 41.510204 | 123 | 0.755162 |
acec569e05d2ae879fdd12d236a86d9e69509e91 | 238 | py | Python | 19/00/3.py | pylangstudy/201707 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | [
"CC0-1.0"
] | null | null | null | 19/00/3.py | pylangstudy/201707 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | [
"CC0-1.0"
] | 46 | 2017-06-30T22:19:07.000Z | 2017-07-31T22:51:31.000Z | 19/00/3.py | pylangstudy/201707 | c1cc72667f1e0b6e8eef4ee85067d7fa4ca500b6 | [
"CC0-1.0"
] | null | null | null | code = '''
def f():
class MyClass: pass
c = MyClass()
print(c.__class__.__name__)
#c = MyClass() # NameError: name 'MyClass' is not defined
#print(c.__class__.__name__)
'''
ret = exec(code, globals(), locals())
print(ret)
f()
| 19.833333 | 57 | 0.634454 |
acec576661526a7cde35aa19be8c25e3cb67d26e | 8,436 | py | Python | test/checksums.py | CarysT/xar | f476c05dec373fcdcd0e884d5a0201501555edb9 | [
"BSD-2-Clause"
] | null | null | null | test/checksums.py | CarysT/xar | f476c05dec373fcdcd0e884d5a0201501555edb9 | [
"BSD-2-Clause"
] | null | null | null | test/checksums.py | CarysT/xar | f476c05dec373fcdcd0e884d5a0201501555edb9 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
import hashlib
import os
import os.path
import re
import shutil
import struct
import subprocess
import util
#
# Utility Functions
#
def _get_numeric_value_from_header(archive_name, key):
"""
Dumps the header of the specified xar archive and extracts the header
size from the output, in bytes.
"""
header = subprocess.check_output(["xar", "--dump-header", "-f", archive_name])
for line in header.splitlines():
matchdata = re.match("^(.+):\s+(.+)$", line) # magic: 0x78617221 (OK)
assert matchdata, "unexpected output from `xar --dump-header`:\n{h}".format(h=header)
if matchdata.groups()[0] == key:
return int(matchdata.groups()[1])
raise AssertionError("no \"{k}\" found for archive \"{n}\":\n{h}".format(k=key, n=archive_name, h=header))
def _get_header_size(archive_name):
return _get_numeric_value_from_header(archive_name, "size")
def _get_toc_size(archive_name):
return _get_numeric_value_from_header(archive_name, "Compressed TOC length")
def _clobber_bytes_at(clobber_range, path):
with open(path, "r+") as f:
f.seek(clobber_range[0])
with open("/dev/random", "r") as r:
random_bytes = r.read(len(clobber_range))
f.write(random_bytes)
def _verify_extraction_failed(filename):
with util.directory_created("extracted") as directory:
try:
with open("/dev/null", "w") as n:
returncode = subprocess.call(["xar", "-x", "-C", directory, "-f", filename], stdout=n, stderr=n)
assert returncode != 0, "xar reported success extracting an archive with a broken TOC"
finally:
if os.path.exists(directory):
shutil.rmtree(directory)
def _hex_digest_string(raw_digest):
unpack_string = "B" * len(raw_digest)
format_string = "%02x" * len(raw_digest)
return format_string % struct.unpack(unpack_string, raw_digest)
def _verify_header_checksum(filename, algorithm):
header_size = _get_header_size(filename)
toc_length = _get_toc_size(filename)
with open(filename, "r") as f:
f.seek(header_size)
h = hashlib.new(algorithm, f.read(toc_length))
computed_digest = h.digest()
# We intentionally ignore the TOC-specified offset. We should build a variant of this that uses the TOC
# offset so we check both.
stored_digest = f.read(len(computed_digest))
assert computed_digest == stored_digest, "Digests don't match: expected value {d1} != stored value {d2}".format(d1=_hex_digest_string(computed_digest), d2=_hex_digest_string(stored_digest))
#
# Test Cases
#
def default_toc_checksum_validity(filename):
with util.archive_created(filename, "/bin") as path:
_verify_header_checksum(path, "sha1")
def sha1_toc_checksum_validity(filename):
with util.archive_created(filename, "/bin", "--toc-cksum", "sha1") as path:
_verify_header_checksum(path, "sha1")
def sha256_toc_checksum_validity(filename):
with util.archive_created(filename, "/bin", "--toc-cksum", "sha256") as path:
_verify_header_checksum(path, "sha256")
def sha512_toc_checksum_validity(filename):
with util.archive_created(filename, "/bin", "--toc-cksum", "sha512") as path:
_verify_header_checksum(path, "sha512")
def broken_toc_default_checksum(filename):
with util.archive_created(filename, "/bin") as path:
# Mess up the archive
toc_start = _get_header_size(path)
_clobber_bytes_at(list(range(toc_start + 4, toc_start + 4 + 100)), path) # Why did the original test specify 4? No idea.
# Try to extract it
_verify_extraction_failed(filename)
def broken_toc_sha1_checksum(filename):
with util.archive_created(filename, "/bin", "--toc-cksum", "sha1") as path:
# Mess up the archive
toc_start = _get_header_size(path)
_clobber_bytes_at(list(range(toc_start + 4, toc_start + 4 + 100)), path) # Why did the original test specify 4? No idea.
# Try to extract it
_verify_extraction_failed(filename)
def broken_toc_sha256_checksum(filename):
with util.archive_created(filename, "/bin", "--toc-cksum", "sha256") as path:
# Mess up the archive
toc_start = _get_header_size(path)
_clobber_bytes_at(list(range(toc_start + 4, toc_start + 4 + 100)), path) # Why did the original test specify 4? No idea.
# Try to extract it
_verify_extraction_failed(filename)
def broken_toc_sha512_checksum(filename):
with util.archive_created(filename, "/bin", "--toc-cksum", "sha512") as path:
# Mess up the archive
toc_start = _get_header_size(path)
_clobber_bytes_at(list(range(toc_start + 4, toc_start + 4 + 100)), path) # Why did the original test specify 4? No idea.
# Try to extract it
_verify_extraction_failed(filename)
def broken_heap_default_checksum(filename):
with util.archive_created(filename, "/bin") as path:
# Mess up the archive
toc_start = _get_header_size(path)
toc_size = _get_toc_size(path)
# Why 32? That's the size of the default sha256 checksum, which is stored before the heap.
_clobber_bytes_at(list(range(toc_start + toc_size + 32, toc_start + toc_size + 32 + 100)), path)
# Try to extract it
_verify_extraction_failed(filename)
def default_checksum_algorithm(filename):
with util.archive_created(filename, "/bin") as path:
header = subprocess.check_output(["xar", "--dump-header", "-f", path])
found = False
for line in header.splitlines():
matchdata = re.match("^Checksum algorithm:\s+(\d+)\s+\\((\w+)\\)$", line)
if not matchdata:
continue
found = True
algorithm = matchdata.groups()[1]
assert algorithm == "SHA1", "unexpected checksum algorithm default: received {a}, expected SHA1".format(a=algorithm)
assert found, "unexpected output from `xar --dump-header`:\n{h}".format(h=header)
# Apparently, xar doesn't currently fail when given an invalid checksum algorithm. Something to fix later.
#
# def invalid_checksum_algorithm(filename):
# try:
# with util.archive_created(filename, "/bin", "--toc-cksum", "invalid-algorithm") as path:
# raise AssertionError("xar succeeded when it should have failed")
# except subprocess.CalledProcessError:
# pass
# It does fail for md5 explicitly, however
def md5_toc_checksum_failure(filename):
try:
with open("/dev/null", "a") as devnull:
with util.archive_created(filename, "/bin", "--toc-cksum", "md5", stderr=devnull) as path:
raise AssertionError("xar succeeded when it should have failed")
except subprocess.CalledProcessError:
pass
def md5_file_checksum_failure(filename):
try:
with open("/dev/null", "a") as devnull:
with util.archive_created(filename, "/bin", "--file-cksum", "md5", stderr=devnull) as path:
raise AssertionError("xar succeeded when it should have failed")
except subprocess.CalledProcessError:
pass
def _verify_checksum_algorithm(filename, algorithm):
additional_args = []
if algorithm:
additional_args = ["--file-cksum", algorithm]
else:
algorithm = "sha1"
with util.archive_created(filename, "/bin", *additional_args) as path:
toc = subprocess.check_output(["xar", "--dump-toc=-", "-f", path])
found = False
for line in toc.splitlines():
if '<unarchived-checksum style="{a}">'.format(a=algorithm) in line or '<archived-checksum style="{a}">'.format(a=algorithm) in line:
break
else:
raise AssertionError("unexpected output from `xar --dump-toc=-`:\n{t}".format(t=toc))
def default_file_checksum_algorithm(filename):
_verify_checksum_algorithm(filename, None)
def sha1_file_checksum_algorithm(filename):
_verify_checksum_algorithm(filename, "sha1")
def sha256_file_checksum_algorithm(filename):
_verify_checksum_algorithm(filename, "sha256")
def sha512_file_checksum_algorithm(filename):
_verify_checksum_algorithm(filename, "sha512")
TEST_CASES = (default_toc_checksum_validity, sha1_toc_checksum_validity, sha256_toc_checksum_validity, sha512_toc_checksum_validity,
broken_toc_default_checksum, broken_toc_sha1_checksum, broken_toc_sha256_checksum, broken_toc_sha512_checksum,
broken_heap_default_checksum, default_checksum_algorithm,
default_file_checksum_algorithm, sha1_file_checksum_algorithm, sha256_file_checksum_algorithm, sha512_file_checksum_algorithm,
md5_toc_checksum_failure, md5_file_checksum_failure,)
if __name__ == "__main__":
for case in TEST_CASES:
try:
case("{f}.xar".format(f=case.__name__))
print("PASSED: {f}".format(f=case.__name__))
except (AssertionError, IOError, subprocess.CalledProcessError):
import sys, os
print("FAILED: {f}".format(f=case.__name__))
sys.excepthook(*sys.exc_info())
print("")
| 37.327434 | 191 | 0.739569 |
acec590e0c50994ab2f9e26cf15dcb587ca8d7e4 | 1,881 | py | Python | workers/servicekeyworker/servicekeyworker.py | dongboyan77/quay | 8018e5bd80f17e6d855b58b7d5f2792d92675905 | [
"Apache-2.0"
] | null | null | null | workers/servicekeyworker/servicekeyworker.py | dongboyan77/quay | 8018e5bd80f17e6d855b58b7d5f2792d92675905 | [
"Apache-2.0"
] | null | null | null | workers/servicekeyworker/servicekeyworker.py | dongboyan77/quay | 8018e5bd80f17e6d855b58b7d5f2792d92675905 | [
"Apache-2.0"
] | null | null | null | import logging
from datetime import datetime, timedelta
from prometheus_client import Counter
from app import app, instance_keys
from workers.servicekeyworker.models_pre_oci import pre_oci_model as model
from workers.worker import Worker
logger = logging.getLogger(__name__)
instance_key_renewal_self = Counter(
"quay_instance_key_renewal_self_total",
"number of times a Quay instance renews its own key",
labelnames=["success"],
)
class ServiceKeyWorker(Worker):
def __init__(self):
super(ServiceKeyWorker, self).__init__()
self.add_operation(
self._refresh_service_key, app.config.get("INSTANCE_SERVICE_KEY_REFRESH", 60) * 60
)
def _refresh_service_key(self):
""" Refreshes the instance's active service key so it doesn't get garbage collected. """
expiration_time = timedelta(minutes=instance_keys.service_key_expiration)
new_expiration = datetime.utcnow() + expiration_time
logger.debug(
"Starting automatic refresh of service key %s to new expiration %s",
instance_keys.local_key_id,
new_expiration,
)
try:
model.set_key_expiration(instance_keys.local_key_id, new_expiration)
except Exception as ex:
logger.exception(
"Failure for automatic refresh of service key %s with new expiration %s",
instance_keys.local_key_id,
new_expiration,
)
instance_key_renewal_self.labels(False).inc()
raise ex
logger.debug(
"Finished automatic refresh of service key %s with new expiration %s",
instance_keys.local_key_id,
new_expiration,
)
instance_key_renewal_self.labels(True).inc()
if __name__ == "__main__":
worker = ServiceKeyWorker()
worker.start()
| 31.35 | 96 | 0.671451 |
acec5aa1d29886a59ebf54af1a56208aee6737d9 | 132,871 | py | Python | userbot/modules/memesmix.py | MADEWGN/botgabut | d92094598fd7cdfd40f41e31fa44035212ed2cd8 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 4 | 2020-12-14T14:21:21.000Z | 2021-02-06T14:29:26.000Z | userbot/modules/memesmix.py | MADEWGN/botgabut | d92094598fd7cdfd40f41e31fa44035212ed2cd8 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 2 | 2021-01-04T13:13:05.000Z | 2021-03-23T02:06:32.000Z | userbot/modules/memesmix.py | MADEWGN/botgabut | d92094598fd7cdfd40f41e31fa44035212ed2cd8 | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | 20 | 2020-12-19T19:21:42.000Z | 2021-10-05T15:07:42.000Z | # all plugins are imported from bothub,x-tra-telegram by @heyworld
# Don't edit or you gay
# credits: spechide,ravana69,mkaraniya & me
from telethon import events
import asyncio
from userbot.events import register
from userbot import CMD_HELP, bot, ALIVE_NAME
from collections import deque
from telethon.errors.rpcerrorlist import MessageIdInvalidError
import random
# ================= CONSTANT =================
DEFAULTUSER = str(ALIVE_NAME) if ALIVE_NAME else uname().node
# ============================================
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 3
animation_ttl = range(0, 103)
input_str = event.pattern_match.group(1)
if input_str == "eye":
await event.edit(input_str)
animation_chars = [
"👁👁\n 👄 =====> Hey, How are you?",
"👁👁\n 👅 =====> Everything okay?",
"👁👁\n 💋 =====> Why are you staring at this?",
"👁👁\n 👄 =====> You idiot",
"👁👁\n 👅 =====> Go away",
"👁👁\n 💋 =====> Stop laughing",
"👁👁\n 👄 =====> It's not funny",
"👁👁\n 👅 =====> I guess ur still looking",
"👁👁\n 💋 =====> Ok retard 😑",
"👁👁\n 👄 =====> I go away then"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 103])
@register(outgoing=True, pattern="^.earth(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
deq = deque(list("🌏🌍🌎🌎🌍🌏🌍🌎"))
for _ in range(48):
await asyncio.sleep(0.1)
await event.edit("".join(deq))
deq.rotate(1)
@bot.on(events.NewMessage(pattern=r"\.bombs", outgoing=True))
async def _(event):
if event.fwd_from:
return
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("💣💣💣💣 \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n💣💣💣💣 \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n💣💣💣💣 \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n💣💣💣💣 \n▪️▪️▪️▪️ \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n💣💣💣💣 \n")
await asyncio.sleep(1)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n💥💥💥💥 \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n💥💥💥💥 \n💥💥💥💥 \n")
await asyncio.sleep(0.5)
await event.edit("▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n▪️▪️▪️▪️ \n😵😵😵😵 \n")
await asyncio.sleep(0.5)
await event.edit("RIP PLOX...")
await asyncio.sleep(2)
await event.delete()
"""Available Commands: .gift"""
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.5
animation_ttl = range(0, 17)
input_str = event.pattern_match.group(1)
if input_str == "gift":
await event.edit(input_str)
animation_chars = [
"⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬜⬜⬜⬜\n👇⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬛⬜⬜⬜\n⬜👇⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬛⬛⬜⬜\n⬜⬜👇⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬛⬛⬛⬜\n⬜⬜⬜👇⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬛⬛⬛⬜\n⬜⬜⬜⬛⬜\n⬜⬜⬜👇⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬛⬛⬛⬜\n⬜⬜⬜⬛⬜\n⬜⬜⬜⬛⬜\n⬜⬜⬜👇⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜",
"⬛⬛⬛⬛⬜\n⬜⬜⬜⬛⬜\n⬜⬜⬜👇⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜\n⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬜\n⬜⬜⬜👇⬜\n⬜⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜",
"⬛⬛⬛⬜⬜\n⬜⬜👇⬜⬜\n⬜⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜",
"⬛⬛⬜⬜⬜\n⬜👇⬜⬜⬜\n⬜[🎁](https://github.com/sahyam2019/OpenUserBot)⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜",
"⬛⬜⬜⬜⬜\n👇⬜⬜⬜⬜\n[🎁](https://github.com/sahyam2019/OpenUserBot)⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜",
"⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜",
"⬜⬜⬜⬜\n⬜⬜⬜⬜\n⬜⬜⬜⬜\n⬜⬜⬜⬜",
"⬜⬜⬜\n⬜⬜⬜\n⬜⬜⬜",
"⬜⬜\n⬜⬜",
"Click your Gift [🎁](http://giphygifs.s3.amazonaws.com/media/5Y2bU7FqLOuzK/giphy.mp4)"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 17])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.3
animation_ttl = range(0, 12)
input_str = event.pattern_match.group(1)
if input_str == "police":
await event.edit(input_str)
animation_chars = [
"🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵",
"🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴",
"🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵",
"🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴",
"🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵",
"🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴",
"🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵",
"🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴",
"🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵",
"🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴\n🔵🔵🔵⬜⬜⬜🔴🔴🔴",
"🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵\n🔴🔴🔴⬜⬜⬜🔵🔵🔵",
"OUB **Police is Chasing you now**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 12])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_ttl = range(0, 103)
input_str = event.pattern_match.group(1)
if input_str == "kill":
await event.edit(input_str)
animation_chars = [
"Fiiiiire",
"( ・ิω・ิ)︻デ═一-->",
"---->____________",
"------>__________",
"-------->_________",
"---------->_______",
"------------>_____",
"-------------->____",
"------------------>",
"------>;(^。^)ノ",
"( ̄ー ̄) DED",
"**Target killed successfully (°̥̥̥̥̥̥̥̥•̀.̫•́°̥̥̥̥̥̥̥)**",
]
for i in animation_ttl:
await event.edit(animation_chars[i % 103])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "Macos":
await event.edit(input_str)
animation_chars = [
"`Connecting To Hackintosh...`",
"`Initiating Hackintosh Login.`",
"`Loading Hackintosh... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Hackintosh... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Hackintosh... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Hackintosh... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Hackintosh... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Hackintosh... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Hackintosh... 84%\n█████████████████████▒▒▒▒ `",
"`Loading Hackintosh... 100%\n█████████████████████████ `",
"`Welcome...\n\nStock OS: Symbian OS\nCurrent OS: Hackintosh`\n\n**My PC Specs:**\n\n **CPU:** __2.9GHz Intel Core i9-8950HK (hexa-core, 12MB cache, up to 4.8GHz)__\n\n**Graphics:** __Nvidia GeForce GTX 1080 OC (8GB GDDR5X)__\n\n**RAM:** __32GB DDR4 (2,666MHz)__\n\n**Screen:** __17.3-inch, QHD (2,560 x 1,440) 120Hz G-Sync__\n\n**Storage:** __512GB PCIe SSD, 1TB HDD (7,200 rpm)__\n\n**Ports:** __2 x USB 3.0, 1 x USB-C 3.0, 1 x USB-C (Thunderbolt 3), HDMI, mini DisplayPort, Ethernet, headphone jack, microphone jack__\n\n**Connectivity:** __Killer 1550 802.11ac Wi-Fi, Bluetooth 5.0__\n\n**Camera:** __Alienware FHD camera, Tobii IR Eye-tracking with Windows Hello__\n\n**Size:** __16.7 x 13.1 x 1.18 inches (42.4 x 33.2 x 2.99cm; W x D x H)__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "Windows":
await event.edit(input_str)
animation_chars = [
"`Connecting To Windows 10...`",
"`Initiating Windows 10 Login.`",
"`Loading Windows 10... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Windows 10... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Windows 10... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Windows 10... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Windows 10... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Windows 10... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Windows 10... 84%\n█████████████████████▒▒▒▒ `",
"`Loading Windows 10... 100%\n█████████████████████████ `",
"`Welcome...\n\nStock OS: Symbian OS\nCurrent OS: Windows 10`\n\n**My PC Specs:**\n\n **CPU:** __2.9GHz Intel Core i9-8950HK (hexa-core, 12MB cache, up to 4.8GHz)__\n\n**Graphics:** __Nvidia GeForce GTX 1080 OC (8GB GDDR5X)__\n\n**RAM:** __32GB DDR4 (2,666MHz)__\n\n**Screen:** __17.3-inch, QHD (2,560 x 1,440) 120Hz G-Sync__\n\n**Storage:** __512GB PCIe SSD, 1TB HDD (7,200 rpm)__\n\n**Ports:** __2 x USB 3.0, 1 x USB-C 3.0, 1 x USB-C (Thunderbolt 3), HDMI, mini DisplayPort, Ethernet, headphone jack, microphone jack__\n\n**Connectivity:** __Killer 1550 802.11ac Wi-Fi, Bluetooth 5.0__\n\n**Camera:** __Alienware FHD camera, Tobii IR Eye-tracking with Windows Hello__\n\n**Size:** __16.7 x 13.1 x 1.18 inches (42.4 x 33.2 x 2.99cm; W x D x H)__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "Linux":
await event.edit(input_str)
animation_chars = [
"`Connecting To Linux...`",
"`Initiating Linux Login.`",
"`Loading Linux... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Linux... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Linux... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Linux... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Linux... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Linux... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Linux... 84%\n█████████████████████▒▒▒▒ `",
"`Loading Linux... 100%\n█████████████████████████ `",
"`Welcome...\n\nStock OS: Symbian OS\nCurrent OS: Linux`\n\n**My PC Specs:**\n\n **CPU:** __2.9GHz Intel Core i9-8950HK (hexa-core, 12MB cache, up to 4.8GHz)__\n\n**Graphics:** __Nvidia GeForce GTX 1080 OC (8GB GDDR5X)__\n\n**RAM:** __32GB DDR4 (2,666MHz)__\n\n**Screen:** __17.3-inch, QHD (2,560 x 1,440) 120Hz G-Sync__\n\n**Storage:** __512GB PCIe SSD, 1TB HDD (7,200 rpm)__\n\n**Ports:** __2 x USB 3.0, 1 x USB-C 3.0, 1 x USB-C (Thunderbolt 3), HDMI, mini DisplayPort, Ethernet, headphone jack, microphone jack__\n\n**Connectivity:** __Killer 1550 802.11ac Wi-Fi, Bluetooth 5.0__\n\n**Camera:** __Alienware FHD camera, Tobii IR Eye-tracking with Windows Hello__\n\n**Size:** __16.7 x 13.1 x 1.18 inches (42.4 x 33.2 x 2.99cm; W x D x H)__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "Stock":
await event.edit(input_str)
animation_chars = [
"`Connecting To Symbian OS...`",
"`Initiating Symbian OS Login.`",
"`Loading Symbian OS... 0%\n█████████████████████████ `",
"`Loading Symbian OS... 4%\n█████████████████████▒▒▒▒ `",
"`Loading Symbian OS... 8%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Symbian OS... 20%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Symbian OS... 36%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Symbian OS... 52%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Symbian OS... 84%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Loading Symbian OS... 100%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Welcome...\n\nStock OS: Symbian OS\nCurrent OS: Symbian OS`\n\n**My PC Specs:**\n\n **CPU:** __2.9GHz Intel Core i9-8950HK (hexa-core, 12MB cache, up to 4.8GHz)__\n\n**Graphics:** __Nvidia GeForce GTX 1080 OC (8GB GDDR5X)__\n\n**RAM:** __32GB DDR4 (2,666MHz)__\n\n**Screen:** __17.3-inch, QHD (2,560 x 1,440) 120Hz G-Sync__\n\n**Storage:** __512GB PCIe SSD, 1TB HDD (7,200 rpm)__\n\n**Ports:** __2 x USB 3.0, 1 x USB-C 3.0, 1 x USB-C (Thunderbolt 3), HDMI, mini DisplayPort, Ethernet, headphone jack, microphone jack__\n\n**Connectivity:** __Killer 1550 802.11ac Wi-Fi, Bluetooth 5.0__\n\n**Camera:** __Alienware FHD camera, Tobii IR Eye-tracking with Windows Hello__\n\n**Size:** __16.7 x 13.1 x 1.18 inches (42.4 x 33.2 x 2.99cm; W x D x H)__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 7)
input_str = event.pattern_match.group(1)
if input_str == "os":
await event.edit(input_str)
animation_chars = [
"`Scanning OS...`",
"`Scanning OS......`",
"__Current Loaded OS: CrDroid OS__\n\n**To Boot Other OS, Use The Following Trigger:**\n☑️ `.Macos`\n☑️ `.Windows`\n☑️ `.Linux`\n☑️ `.Stock`",
"__Current Loaded OS: CrDroid OS__\n\n**To Boot Other OS, Use The Following Trigger:**\n✅ `.Macos`\n☑️ `.Windows`\n☑️ `.Linux`\n☑️ `.Stock`",
"__Current Loaded OS: CrDroid OS__\n\n**To Boot Other OS, Use The Following Trigger:**\n✅ `.Macos`\n✅ `.Windows`\n☑️ `.Linux`\n☑️ `.Stock`",
"__Current Loaded OS: CrDroid OS__\n\n**To Boot Other OS, Use The Following Trigger:**\n✅ `.Macos`\n✅ `.Windows`\n✅ `.Linux`\n☑️ `.Stock`",
"__Current Loaded OS: CrDroid OS__\n\n**To Boot Other OS, Use The Following Trigger:**\n✅ `.Macos`\n✅ `.Windows`\n✅ `.Linux`\n✅ `.Stock`\n\n by @heyworld and others"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 7])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 24)
input_str = event.pattern_match.group(1)
if input_str == "isro":
await event.edit(input_str)
animation_chars = [
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n🚀⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛🚀⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛🚀⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🚀⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛🚀⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛🚀\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"🛸⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n🛸⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛🛸⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛🛸⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛🛸⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸⬛⬛",
"⬛⬛⬛🛸⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸⬛⬛\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸⬛🚶♂️\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🛸🚶♂️⬛\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n👽⬛⬛🛸🚶♂️⬛\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛👽⬛🛸🚶♂️⬛\n⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛\n⬛⬛👽🛸🚶♂️⬛\n⬜⬜⬜⬜⬜⬜",
"__Signal Lost....__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 24])
@register(outgoing=True, pattern="^.gangstar(?: |$)(.*)")
async def _(event):
if not event.text[0].isalpha() and event.text[0] not in (
"/", "#", "@", "!"):
await event.edit("EVERyBOdy")
await asyncio.sleep(0.3)
await event.edit("wAs")
await asyncio.sleep(0.2)
await event.edit("GanGeSTar")
await asyncio.sleep(0.5)
await event.edit("UNtIL ")
await asyncio.sleep(0.2)
await event.edit("I")
await asyncio.sleep(0.3)
await event.edit("ArRivEd")
await asyncio.sleep(0.3)
await event.edit("😎😎😎")
await asyncio.sleep(0.3)
await event.edit("EVERyBOdy wAs GanGeSTar UNtIL I ArRivEd 😎😎😎")
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 2
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "hack":
await event.edit(input_str)
animation_chars = [
"`Connecting To Hacked Private Server...`",
"`Target Selected.`",
"`Hacking... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Hacking... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Hacking... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Hacking... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Hacking... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Hacking... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Hacking... 84%\n█████████████████████▒▒▒▒ `",
"`Hacking... 100%\n█████████HACKED███████████ `",
f"`Targeted Account Hacked...\n\nPay 9999$ To @{DEFAULTUSER} or gib a pizza party 🍕 To Remove This Hack`"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.3
animation_ttl = range(0, 15)
input_str = event.pattern_match.group(1)
if input_str == "hypno":
await event.edit(input_str)
animation_chars = [
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬛⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬛⬛⬛⬜⬜\n⬜⬜⬛⬜⬛⬜⬜\n⬜⬜⬛⬛⬛⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬛⬛⬛⬛⬛⬛",
"⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛",
"⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛⬜",
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬜⬛⬛⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬛⬛⬜⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬛⬛⬛⬛⬛⬛",
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬜⬛⬛⬛⬜⬛\n⬛⬜⬛⬜⬛⬜⬛\n⬛⬜⬛⬛⬛⬜⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬛⬛⬛⬛⬛⬛",
"⬜⬜⬜⬜⬜⬜⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬜⬛⬜⬛⬜\n⬜⬛⬜⬜⬜⬛⬜\n⬜⬛⬛⬛⬛⬛⬜\n⬜⬜⬜⬜⬜⬜⬜",
"⬛⬛⬛⬛⬛\n⬛⬜⬜⬜⬛\n⬛⬜⬛⬜⬛\n⬛⬜⬜⬜⬛\n⬛⬛⬛⬛⬛",
"⬜⬜⬜\n⬜⬛⬜\n⬜⬜⬜",
"[👉🔴👈](t.me/heyworld)"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 15])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 2
animation_ttl = range(0, 15)
input_str = event.pattern_match.group(1)
if input_str == "whatsapp":
await event.edit(input_str)
animation_chars = [
"Looking for WhatsApp databases in targeted person...",
" User online: True\nTelegram access: True\nRead Storage: True ",
"Hacking... 0%\n[░░░░░░░░░░░░░░░░░░░░]\n`Looking for WhatsApp...`\nETA: 0m, 20s",
"Hacking... 11.07%\n[██░░░░░░░░░░░░░░░░░░]\n`Looking for WhatsApp...`\nETA: 0m, 18s",
"Hacking... 20.63%\n[███░░░░░░░░░░░░░░░░░]\n`Found folder C:/WhatsApp`\nETA: 0m, 16s",
"Hacking... 34.42%\n[█████░░░░░░░░░░░░░░░]\n`Found folder C:/WhatsApp`\nETA: 0m, 14s",
"Hacking... 42.17%\n[███████░░░░░░░░░░░░░]\n`Searching for databases`\nETA: 0m, 12s",
"Hacking... 55.30%\n[█████████░░░░░░░░░░░]\n`Found msgstore.db.crypt12`\nETA: 0m, 10s",
"Hacking... 64.86%\n[███████████░░░░░░░░░]\n`Found msgstore.db.crypt12`\nETA: 0m, 08s",
"Hacking... 74.02%\n[█████████████░░░░░░░]\n`Trying to Decrypt...`\nETA: 0m, 06s",
"Hacking... 86.21%\n[███████████████░░░░░]\n`Trying to Decrypt...`\nETA: 0m, 04s",
"Hacking... 93.50%\n[█████████████████░░░]\n`Decryption successful!`\nETA: 0m, 02s",
"Hacking... 100%\n[████████████████████]\n`Scanning file...`\nETA: 0m, 00s",
"Hacking complete!\nUploading file...",
"Targeted Account Hacked...!\n\n ✅ File has been successfully uploaded to my server.\nWhatsApp Database:\n`./DOWNLOADS/msgstore.db.crypt12`"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 15])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 549755813888)
input_str = event.pattern_match.group(1)
if input_str == "solar":
await event.edit(input_str)
animation_chars = [
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️🌎◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n🌕◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️☀\n◼️◼️◼️◼️◼️`",
"`◼️🌕◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️☀◼️`",
"`◼️◼️◼️🌕◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️☀◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️🌎◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️◼️◼️◼️`",
"`◼️◼️◼️◼️◼️\n☀◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️🌕\n◼️◼️◼️◼️◼️`",
"`◼️☀◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️🌕◼️`",
"`◼️◼️◼️☀◼️\n◼️◼️◼️◼️◼️\n◼️◼️🌎◼️◼️\n◼️◼️◼️◼️◼️\n◼️🌕◼️◼️◼️`",
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 549755813888])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "quickheal":
await event.edit(input_str)
animation_chars = [
"`Downloading File..`",
"`File Downloaded....`",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 84%\n█████████████████████▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 100%\n█████████████████████████ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nTask: 01 of 01 Files Scanned...\n\nReault: No Virus Found...`"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "sqh":
await event.edit(input_str)
animation_chars = [
"`Downloading File..`",
"`File Downloaded....`",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 84%\n█████████████████████▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 100%\n█████████████████████████ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nTask: 01 of 01 Files Scanned...\n\nReault: No Virus Found...`"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "vquickheal":
await event.edit(input_str)
animation_chars = [
"`Downloading File..`",
"`File Downloaded....`",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 0%\n▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 4%\n█▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 8%\n██▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 20%\n█████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 36%\n█████████▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 52%\n█████████████▒▒▒▒▒▒▒▒▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 84%\n█████████████████████▒▒▒▒ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nFile Scanned... 100%\n█████████████████████████ `",
"`Quick Heal Total Security Checkup\n\n\nSubscription: Pru User\nValid Until: 31/12/2099\n\nTask: 01 of 01 Files Scanned...\n\nReault:⚠️Virus Found⚠️\nMore Info: Torzan, Spyware, Adware`"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@register(outgoing=True, pattern="^.plane(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
await event.edit("✈-------------")
await event.edit("-✈------------")
await event.edit("--✈-----------")
await event.edit("---✈----------")
await event.edit("----✈---------")
await event.edit("-----✈--------")
await event.edit("------✈-------")
await event.edit("-------✈------")
await event.edit("--------✈-----")
await event.edit("---------✈----")
await event.edit("----------✈---")
await event.edit("-----------✈--")
await event.edit("------------✈-")
await event.edit("-------------✈")
await asyncio.sleep(3)
await event.delete()
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 19)
input_str = event.pattern_match.group(1)
if input_str == "jio":
await event.edit(input_str)
animation_chars = [
"`Connecting To Jio Network...`",
"`█ ▇ ▆ ▅ ▄ ▂ ▁`",
"`▒ ▇ ▆ ▅ ▄ ▂ ▁`",
"`▒ ▒ ▆ ▅ ▄ ▂ ▁`",
"`▒ ▒ ▒ ▅ ▄ ▂ ▁`",
"`▒ ▒ ▒ ▒ ▄ ▂ ▁`",
"`▒ ▒ ▒ ▒ ▒ ▂ ▁`",
"`▒ ▒ ▒ ▒ ▒ ▒ ▁`",
"`▒ ▒ ▒ ▒ ▒ ▒ ▒`",
"*Optimising Network...*",
"`▒ ▒ ▒ ▒ ▒ ▒ ▒`",
"`▁ ▒ ▒ ▒ ▒ ▒ ▒`",
"`▁ ▂ ▒ ▒ ▒ ▒ ▒`",
"`▁ ▂ ▄ ▒ ▒ ▒ ▒`",
"`▁ ▂ ▄ ▅ ▒ ▒ ▒`",
"`▁ ▂ ▄ ▅ ▆ ▒ ▒`",
"`▁ ▂ ▄ ▅ ▆ ▇ ▒`",
"`▁ ▂ ▄ ▅ ▆ ▇ █`",
"**Jio Network Boosted....**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 19])
@register(outgoing=True, pattern="^.dump(?: |$)(.*)")
async def _(message):
try:
obj = message.pattern_match.group(1)
if len(obj) != 3:
raise IndexError
inp = ' '.join(obj)
except IndexError:
inp = "🥞 🎂 🍫"
u, t, g, o, s, n = inp.split(), '🗑', '<(^_^ <)', '(> ^_^)>', '⠀ ', '\n'
h = [(u[0], u[1], u[2]), (u[0], u[1], ''), (u[0], '', '')]
for something in reversed([y for y in ([''.join(x) for x in (
f + (s, g, s + s * f.count(''), t), f + (g, s * 2 + s * f.count(''), t),
f[:i] + (o, f[i], s * 2 + s * f.count(''), t), f[:i] + (s + s * f.count(''), o, f[i], s, t),
f[:i] + (s * 2 + s * f.count(''), o, f[i], t), f[:i] + (s * 3 + s * f.count(''), o, t),
f[:i] + (s * 3 + s * f.count(''), g, t))] for i, f in enumerate(reversed(h)))]):
for something_else in something:
await asyncio.sleep(0.3)
try:
await message.edit(something_else)
except MessageIdInvalidError:
return
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 20)
input_str = event.pattern_match.group(1)
if input_str == "fadmin":
await event.edit(input_str)
animation_chars = [
"**Promoting User As Admin...**",
"**Enabling All Permissions To User...**",
"**(1) Send Messages: ☑️**",
"**(1) Send Messages: ✅**",
"**(2) Send Media: ☑️**",
"**(2) Send Media: ✅**",
"**(3) Send Stickers & GIFs: ☑️**",
"**(3) Send Stickers & GIFs: ✅**",
"**(4) Send Polls: ☑️**",
"**(4) Send Polls: ✅**",
"**(5) Embed Links: ☑️**",
"**(5) Embed Links: ✅**",
"**(6) Add Users: ☑️**",
"**(6) Add Users: ✅**",
"**(7) Pin Messages: ☑️**",
"**(7) Pin Messages: ✅**",
"**(8) Change Chat Info: ☑️**",
"**(8) Change Chat Info: ✅**",
"**Permission Granted Successfully**",
"**pRoMooTeD SuCcEsSfUlLy**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 20])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 17)
input_str = event.pattern_match.group(1)
if input_str == "fleave":
await event.edit(input_str)
animation_chars = [
"⬛⬛⬛\n⬛⬛⬛\n⬛⬛⬛",
"⬛⬛⬛\n⬛🔄⬛\n⬛⬛⬛",
"⬛⬆️⬛\n⬛🔄⬛\n⬛⬛⬛",
"⬛⬆️↗️\n⬛🔄⬛\n⬛⬛⬛",
"⬛⬆️↗️\n⬛🔄➡️\n⬛⬛⬛",
"⬛⬆️↗️\n⬛🔄➡️\n⬛⬛↘️",
"⬛⬆️↗️\n⬛🔄➡️\n⬛⬇️↘️",
"⬛⬆️↗️\n⬛🔄➡️\n↙️⬇️↘️",
"⬛⬆️↗️\n⬅️🔄➡️\n↙️⬇️↘️",
"↖️⬆️↗️\n⬅️🔄➡️\n↙️⬇️↘️",
"**Chat Message Exported To** `./Inpu/`",
"**Chat Message Exported To** `./Inpu/homework/`",
"**Chat Message Exported To** `./Inpu/homework/groupchat.txt`",
"__Legend is leaving this chat.....! Bye geys..__",
"__Legend is leaving this chat.....! Bye geys..__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 17])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.3
animation_ttl = range(0, 27)
input_str = event.pattern_match.group(1)
if input_str == "snake":
await event.edit(input_str)
animation_chars = [
"◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️◻️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◼️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◼️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◻️◻️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◼️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◼️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◼️◼️◼️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◼️◼️◻️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◼️◼️◻️◻️\n◻️◼️◼️◻️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◼️◼️◻️◻️\n◻️◼️◻️◻️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◼️◼️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◻️◼️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️\n◻️◻️◻️◻️◻️",
"◻️◻️◻️◻️◻️\n◻️◼️◻️◼️◻️\n◻️◻️◻️◻️◻️\n◻️◼️◼️◼️◻️\n◻️◻️◻️◻️◻️"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 27])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.01
animation_ttl = range(0, 288)
input_str = event.pattern_match.group(1)
if input_str == "think":
await event.edit(input_str)
animation_chars = [
"THINKING",
"THI&K#N₹",
"T+IN@I?G",
"¿H$NK∆NG",
"¶H×NK&N*",
"NGITHKIN",
"T+I#K@₹G",
"THINKING",
"THI&K#N₹",
"T+IN@I?G",
"¿H$NK∆NG",
"¶H×NK&N*",
"NGITHKIN",
"T+I#K@₹G",
"THINKING",
"THI&K#N₹",
"T+IN@I?G",
"¿H$NK∆NG",
"¶H×NK&N*",
"NGITHKIN",
"T+I#K@₹G",
"THINKING",
"THI&K#N₹",
"T+IN@I?G",
"¿H$NK∆NG",
"¶H×NK&N*",
"NGITHKIN",
"T+I#K@₹G",
"THINKING",
"THI&K#N₹",
"T+IN@I?G",
"¿H$NK∆NG",
"¶H×NK&N*",
"NGITHKIN",
"T+I#K@₹G",
"THINKING... 🤔"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 72])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.3
animation_ttl = range(0, 5)
input_str = event.pattern_match.group(1)
if input_str == "wtf":
await event.edit(input_str)
animation_chars = [
"What",
"What The",
"What The F",
"What The F Brah",
"What The F Brah\nhttps://telegra.ph//file/f3b760e4a99340d331f9b.jpg"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 5])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1.5
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "music":
await event.edit(input_str)
animation_chars = [
"⬤⬤⬤ 81% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:00** ▱▱▱▱▱▱▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `▶️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤⬤ 81% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:01** ▰▱▱▱▱▱▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤⬤ 81% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:02** ▰▰▱▱▱▱▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤⬤ 81% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:03** ▰▰▰▱▱▱▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:04** ▰▰▰▰▱▱▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:05** ▰▰▰▰▱▱▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:06** ▰▰▰▰▰▰▱▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:07** ▰▰▰▰▰▰▰▱▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:08** ▰▰▰▰▰▰▰▰▱▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:09** ▰▰▰▰▰▰▰▰▰▱ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏸️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**",
"⬤⬤◯ 80% ⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀⠀`✖️`\n\n⠀⠀⠀⠀⠀[cee jay Music Player](tg://user?id=689811472)\n\n⠀⠀⠀⠀**Now Playing:Kamasutra BGM**\n\n**00:10** ▰▰▰▰▰▰▰▰▰▰ **00:10**\n\n⠀⠀⠀⠀⠀`🔂` `⏮️` `⏪️` `⏺️` `⏩️` `⏭️`\n\n**⠀Next Song:** __I Am Sexy And I Know It.__\n\n⠀⠀⠀⠀**⠀Device: Nokia 1100**"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 3
animation_ttl = range(0, 18)
input_str = event.pattern_match.group(1)
if input_str == "call":
await event.edit(input_str)
animation_chars = [
"`Connecting To Telegram Headquarters...`",
"`Call Connected.`",
"`Telegram: Hello This is Telegram HQ. Who is this?`",
f"`Me: Yo this is`@{DEFAULTUSER},`Please Connect me to my lil bro,Pavel Durov`",
"`User Authorised.`",
"`Calling Pavel Durov` `At +916969696969`",
"`Private Call Connected...`",
"`Me: Hello Sir, Please Ban This Telegram Account.`",
"`Pavel: May I Know Who Is This?`",
f"`Me: Yo Brah, I Am` @{DEFAULTUSER} ",
"`Pavel: OMG!!! Long time no see, Wassup Brother...\nI'll Make Sure That Guy Account Will Get Blocked Within 24Hrs.`",
"`Me: Thanks, See You Later Brah.`",
"`Pavel: Please Don't Thank Brah, Telegram Is Our's. Just Gimme A Call When You Become Free.`",
"`Me: Is There Any Issue/Emergency???`",
"`Pavel: Yes Sur, There Is A Bug In Telegram v69.6.9.\nI Am Not Able To Fix It. If Possible, Please Help Fix The Bug.`",
"`Me: Send Me The App On My Telegram Account, I Will Fix The Bug & Send You.`",
"`Pavel: Sure Sur \nTC Bye Bye :)`",
"`Private Call Disconnected.`"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 18])
@bot.on(events.NewMessage(pattern=r"\.belo", outgoing=True))
async def _(event):
if event.fwd_from:
return
await event.edit("Typing...")
await asyncio.sleep(2)
x = (random.randrange(1, 96))
if x == 1:
await event.edit("`\"Underwater bubbles and raindrops are total opposites of each other.\"`")
if x == 2:
await event.edit("`\"If you buy an eraser you are literally paying for your mistakes.\"`")
if x == 3:
await event.edit("`\"The Person you care for most has the potential to destroy you the most.\"`")
if x == 4:
await event.edit("`\"If humans colonize the moon, it will probably attract retirement homes as the weaker gravity will allow the elderly to feel stronger.\"`")
if x == 5:
await event.edit("`\"Any video with “wait for it” in the title is simply too long.\"`")
if x == 6:
await event.edit("`\"Your age in years is how many times you’ve circled the Sun, but your age in months is how many times the Moon has circled you.\"`")
if x == 7:
await event.edit("`\"Biting your tongue while eating is a perfect example of how you can still screw up, even with decades of experience.\"`")
if x == 8:
await event.edit("`\"Saying that your home is powered by a wireless Nuclear fusion reactor that is 93 Million miles away sounds way cooler than just saying you have solar panels on your roof.\"`")
if x == 9:
await event.edit("`\"The most crushing feeling is when someone smiles at you on the street and you don’t react fast enough to smile back.\"`")
if x == 10:
await event.edit("`\"Teeth constantly require maintenance to prevent their decay when alive, and yet they manage to survive for thousands of years buried as fossils.\"`")
if x == 11:
await event.edit("`\"A folder is for things that you don't want to fold.\"`")
if x == 12:
await event.edit("`\"Waking up in the morning sometimes feels like resuming a shitty movie you decided to quit watching.\"`")
if x == 13:
await event.edit("`\"If everything goes smoothly, you probably won't remember today.\"`")
if x == 14:
await event.edit("`\"When you meet new people in real life, you unlock more characters for your dream world.\"`")
if x == 15:
await event.edit("`\"Maybe if they renamed sunscreen to “anti-cancer cream” more people would wear it.\"`")
if x == 16:
await event.edit("`\"200 years ago, people would never have guessed that humans in the future would communicate by silently tapping on glass.\"`")
if x == 17:
await event.edit("`\"Parents worry about what their sons download and worry about what their daughters upload.\"`")
if x == 18:
await event.edit("`\"It's crazy how you can be the same age as someone, but at a completely different stage in your life.\"`")
if x == 19:
await event.edit("`\"When you think you wanna die, you really don't wanna die, you just don't wanna live like this.\"`")
if x == 20:
await event.edit("`\"Technically, no one has ever been in an empty room.\"`")
if x == 21:
await event.edit("`\"An onion is the bass player of food. You would probably not enjoy it solo, but you’d miss it if it wasn’t there.\"`")
if x == 22:
await event.edit("`\"We run everywhere in videogames because we're too lazy to walk, but In real life we walk everywhere because we're too lazy to run.\"`")
if x == 23:
await event.edit("`\"Every single decision you ever made has brought you to read this sentence.\"`")
if x == 24:
await event.edit("`\"The word 'quiet' is often said very loud.\"`")
if x == 25:
await event.edit("`\"Everybody wants you to work hard, but nobody wants to hear about how hard you work.\"`")
if x == 26:
await event.edit("`\"We brush our teeth with hair on a stick and brush our hair with teeth on a stick.\"`")
if x == 27:
await event.edit("`\"No one remembers your awkward moments but they’re too busy remembering their own.\"`")
if x == 28:
await event.edit("`\"Dumb people try to say simple ideas as complex as possible while smart people try to say complex ideas as simple as possible.\"`")
if x == 29:
await event.edit("`\"Some people think they're better than you because they grew up richer. Some people think they're better than you because they grew up poorer.\"`")
if x == 30:
await event.edit("`\"The biggest irony is that computers & mobiles were invented to save out time!\"`")
if x == 31:
await event.edit("`\"After honey was first discovered, there was likely a period where people were taste testing any available slime from insects.\"`")
if x == 32:
await event.edit("`\"You know you’re getting old when your parents start disappointing you, instead of you disappointing them.\"`")
if x == 33:
await event.edit("`\"Humans are designed to learn through experience yet the education system has made it so we get no experience.\"`")
if x == 34:
await event.edit("`\"By focusing on blinking, you blink slower... Same for breathing.\"`")
if x == 35:
await event.edit("`\"Drivers in a hurry to beat traffic usually cause the accidents which create the traffic they were trying to avoid.\"`")
if x == 36:
await event.edit("`\"Characters that get married in fiction were literally made for each other.\"`")
if x == 37:
await event.edit("`\"Babies are a clean hard drive that can be programmed with any language.\"`")
if x == 38:
await event.edit("`\"There could be a miracle drug that cures every disease to man, that we'll never know about because it doesn't work on rats.\"`")
if x == 39:
await event.edit("`\"Rhinos evolved to grow a horn for protection, but it's what's making them go extinct.\"`")
if x == 40:
await event.edit("`\"Maybe we don't find time travelers because we all die in 25-50 years.\"`")
if x == 41:
await event.edit("`\"Sleep is the trial version of death, It even comes with ads based on your activity.\"`")
if x == 42:
await event.edit("`\"The most unrealistic thing about Spy movies is how clean the air ventilation system is!\"`")
if x == 43:
await event.edit("`\"In games we play through easy modes to unlock hard modes. In life we play through hard modes to unlock easy modes.\"`")
if x == 44:
await event.edit("`\"Silent people seem smarter than loud people, because they keep their stupid thoughts to themselves.\"`")
if x == 45:
await event.edit("`\"If Greenland actually turns green, we're all screwed.\"`")
if x == 46:
await event.edit("`\"If someone says clever things in your dream, it actually shows your own cleverness.\"`")
if x == 47:
await event.edit("`\"Famous movie quotes are credited to the actor and not the actual writer who wrote them.\"`")
if x == 48:
await event.edit("`\"No one actually teaches you how to ride a bicycle. They just hype you up until you work it out.\"`")
if x == 49:
await event.edit("`\"Ask yourself why the the brain ignores the second the.\"`")
if x == 50:
await event.edit("`\"You’ve probably forgot about 80% of your entire life and most of the memories you do remember are not very accurate to what actually happened.\"`")
if x == 51:
await event.edit("`\"It will be a lot harder for kids to win against their parents in video games in the future.\"`")
if x == 52:
await event.edit("`\"Everyone has flaws, if you don't recognize yours, you have a new one.\"`")
if x == 53:
await event.edit("`\"Raising a child is training your replacement.\"`")
if x == 54:
await event.edit("`\"'O'pen starts with a Closed circle, and 'C'lose starts with an open circle.\"`")
if x == 55:
await event.edit("`\"There's always someone who hated you for no reason, and still does.\"`")
if x == 56:
await event.edit("`\"After popcorn was discovered, there must have been a lot of random seeds that were roasted to see if it would have the same effect.\"`")
if x == 57:
await event.edit("`\"The more important a good night's sleep is, the harder it is to fall asleep.\"`")
if x == 58:
await event.edit("`\"Blessed are those that can properly describe the type of haircut they want to a new stylist.\"`")
if x == 59:
await event.edit("`\"Too many people spend money they haven't earned, to buy things they don't want, to impress people they don't like!\"`")
if x == 60:
await event.edit("`\"Theme park employees must be good at telling the difference between screams of horror and excitement.\"`")
if x == 61:
await event.edit("`\"6 to 6:30 feels more half-an-hour than 5:50 to 6:20\"`")
if x == 62:
await event.edit("`\"Getting your password right on the last login attempt before lockout is the closest thing to disarming a bomb at the last minute that most of us will experience.\"`")
if x == 63:
await event.edit("`\"Listening to podcasts before bed is the adult version of story-time.\"`")
if x == 64:
await event.edit("`\"If all criminals stopped robbing then the security industry would fall in which they could then easily go back to robbing.\"`")
if x == 65:
await event.edit("`\"A ton of whales is really only like half a whale.\"`")
if x == 66:
await event.edit("`\"When you get old, the old you is technically the new you, and your young self is the old you.\"`")
if x == 67:
await event.edit("`\"You probably won't find many negative reviews of parachutes on the Internet.\"`")
if x == 68:
await event.edit("`\"We show the most love and admiration for people when they're no longer around to appreciate it.\"`")
if x == 69:
await event.edit("`\"We've practiced sleeping thousands of times, yet can't do it very well or be consistent.\"`")
if x == 70:
await event.edit("`\"Humans are more enthusiastic about moving to another planet with hostile environment than preserving earth - the planet they are perfectly shaped for.\"`")
if x == 71:
await event.edit("`\"The happiest stage of most people's lives is when their brains aren't fully developed yet.\"`")
if x == 72:
await event.edit("`\"The most effective alarm clock is a full bladder.\"`")
if x == 73:
await event.edit("`\"You probably just synchronized blinks with millions of people.\"`")
if x == 74:
await event.edit("`\"Since we test drugs on animals first, rat medicine must be years ahead of human medicine.\"`")
if x == 75:
await event.edit("`\"Night before a day off is more satisfying than the actual day off.\"`")
if x == 76:
await event.edit("`\"We put paper in a folder to keep it from folding.\"`")
if x == 77:
await event.edit("`\"Somewhere, two best friends are meeting for the first time.\"`")
if x == 78:
await event.edit("`\"Our brain simultaneously hates us, loves us, doesn't care about us, and micromanages our every move.\"`")
if x == 79:
await event.edit("`\"Being a male is a matter of birth. Being a man is a matter of age. But being a gentleman is a matter of choice.\"`")
if x == 80:
await event.edit("`\"Soon the parents will be hiding their social account from their kids rather than kids hiding their accounts from the parents.\"`")
if x == 81:
await event.edit("`\"Wikipedia is what the internet was meant to be.\"`")
if x == 82:
await event.edit("`\"A theme park is the only place that you can hear screams in the distance and not be concerned.\"`")
if x == 83:
await event.edit("`\"A wireless phone charger offers less freedom of movement than a wired one.\"`")
if x == 84:
await event.edit("`\"If you repeatedly criticize someone for liking something you don't, they won't stop liking it. They'll stop liking you.\"`")
if x == 85:
await event.edit("`\"Somewhere there is a grandmother, whose grandson really is the most handsome boy in the world.\"`")
if x == 86:
await event.edit("`\"If someday human teleportation becomes real, people will still be late for work.\"`")
if x == 87:
await event.edit("`\"The first humans who ate crabs must have been really hungry to try and eat an armored sea spider\"`")
if x == 88:
await event.edit("`\"Doing something alone is kind of sad, but doing it solo is cool af.\"`")
if x == 89:
await event.edit("`\"Your brain suddenly becomes perfect at proofreading after you post something.\"`")
if x == 90:
await event.edit("`\"There's always that one song in your playlist that you always skip but never remove.\"`")
if x == 91:
await event.edit("`\"Kids next century will probably hate us for taking all the good usernames.\"`")
if x == 92:
await event.edit("`\"Bubbles are to fish what rain is to humans.\"`")
if x == 93:
await event.edit("`\"The more people you meet, the more you realise and appreciate how well your parents raised you.\"`")
if x == 94:
await event.edit("`\"A comma is a short pause, a coma is a long pause.\"`")
if x == 95:
await event.edit("`\"Someday you will either not wake up or not go to sleep.\"`")
if x == 96:
await event.edit("`\"Bermuda Triangle might be the exit portal of this simulation.\"`")
if x == 97:
await event.edit("`\"If we put solar panels above parking lots, then our cars wouldn't get hot and we would have a lot of clean energy.\"`")
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.5
animation_ttl = range(0, 16)
input_str = event.pattern_match.group(1)
if input_str == "human":
await event.edit(input_str)
animation_chars = [
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛🚗\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛🚗⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛🚗⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛🚗⬛⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛🚗⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛🚗⬛⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n🚗⬛⬛⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬜⬜⬜⬜⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬜⬜⬜😊⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛😊⬛⬛⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬛⬛⬜⬛⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛😊⬛⬛⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬛⬛⬜⬛⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬛⬜⬛⬛⬜⬛\n⬛⬛⬜⬛⬛⬛⬛\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛😊⬛⬛⬛\n⬛⬜⬜⬜⬜⬜⬛\n⬛⬛⬛⬜⬛⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬜⬛⬛⬛⬜⬛\n⬛⬛⬛⬛⬛⬛⬛\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬜⬛😊⬛⬜⬛\n⬛⬛⬜⬜⬜⬛⬛\n⬛⬛⬛⬜⬛⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬜⬛⬛⬛⬜⬛\n⬛⬛⬛⬛⬛⬛⬛\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛😊⬛⬛⬛\n⬛⬛⬜⬜⬜⬛⬛\n⬛⬜⬛⬜⬛⬜⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n⬛⬛⬜⬛⬜⬛⬛\n🔲🔲🔲🔲🔲🔲🔲",
"⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬛⬛⬛⬛⬛⬛⬛\n⬜⬜⬜😊⬜⬜⬜\n⬜⬜⬜⬜⬜⬜⬜\n🔲🔲🔲🔲🔲🔲🔲"]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 16])
@bot.on(events.NewMessage(pattern=r"\.qs", outgoing=True))
async def _(event):
if event.fwd_from:
return
await event.edit("selecting question...")
await asyncio.sleep(2)
x = (random.randrange(1, 60))
if x == 1:
await event.edit("`\"Arrange them in descending order of importance – MONEY, LOVE, FAMILY, CAREER, FRIENDS.\"`")
if x == 2:
await event.edit("`\"If you had to change your name, what would your new name be, and why would you choose that name?\"`")
if x == 3:
await event.edit("`\"What’s the most interesting thing you’ve read or seen this week?\"`")
if x == 4:
await event.edit("`\"What scene from a TV show will you never forget?\"`")
if x == 5:
await event.edit("`\"If you could become a master in one skill, what skill would you choose?\"`")
if x == 6:
await event.edit("`\"What three words can describe you?\"`")
if x == 7:
await event.edit("`\"If you had to delete one app from your phone, what would it be?\"`")
if x == 8:
await event.edit("`\"Would you go out with me if I was the last person on earth?\"`")
if x == 9:
await event.edit("`\"If you switched genders for the day, what would you do?\"`")
if x == 10:
await event.edit("`\"If you could eat lunch with someone here. Who would you choose?\"`")
if x == 11:
await event.edit("`\"If you were told you only had one week left to live, what would you do?\"`")
if x == 12:
await event.edit("`\"What's number one item you would save from your burning house?\"`")
if x == 13:
await event.edit("`\"If you could only text one person for the rest of your life, but you could never talk to that person face to face, who would that be?\"`")
if x == 14:
await event.edit("`\"How many kids do you want to have in the future?\"`")
if x == 15:
await event.edit("`\"Who in this group would be the worst person to date? Why?\"`")
if x == 16:
await event.edit("`\"What does your dream boy or girl look like?\"`")
if x == 17:
await event.edit("`\"What would be in your web history that you’d be embarrassed if someone saw?\"`")
if x == 18:
await event.edit("`\"Do you sing in the shower?\"`")
if x == 19:
await event.edit("`\"What’s the right age to get married?\"`")
if x == 20:
await event.edit("`\"What are your top 5 rules for life?\"`")
if x == 21:
await event.edit("`\"If given an option, would you choose a holiday at the beach or in the mountains?\"`")
if x == 22:
await event.edit("`\"If you are made the president of your country, what would be the first thing that you will do?\"`")
if x == 23:
await event.edit("`\"If given a chance to meet 3 most famous people on the earth, who would it be, answer in order of preference.\"`")
if x == 24:
await event.edit("`\"Have you ever wished to have a superpower, if so, what superpower you would like to have?\"`")
if x == 25:
await event.edit("`\"Can you spend an entire day without phone and internet? If yes, what would you do?\"`")
if x == 26:
await event.edit("`\"Live-in relation or marriage, what do you prefer?\"`")
if x == 27:
await event.edit("`\"What is your favorite cuisine or type of food?\"`")
if x == 28:
await event.edit("`\"What are some good and bad things about the education system in your country?\"`")
if x == 29:
await event.edit("`\"What do you think of online education?\"`")
if x == 30:
await event.edit("`\"What are some goals you have failed to accomplish?\"`")
if x == 31:
await event.edit("`\"Will technology save the human race or destroy it?\"`")
if x == 32:
await event.edit("`\"What was the best invention of the last 50 years?\"`")
if x == 33:
await event.edit("`\"Have you travelled to any different countries? Which ones?\"`")
if x == 34:
await event.edit("`\"Which sport is the most exciting to watch? Which is the most boring to watch?\"`")
if x == 35:
await event.edit("`\"What’s the most addictive mobile game you have played?\"`")
if x == 36:
await event.edit("`\"How many apps do you have on your phone?\"`")
if x == 37:
await event.edit("`\"What was the last song you listened to?\"`")
if x == 38:
await event.edit("`\"Do you prefer to watch movies in the theater or in the comfort of your own home?\"`")
if x == 39:
await event.edit("`\"Do you like horror movies? Why or why not?\"`")
if x == 40:
await event.edit("`\"How often do you help others? Who do you help? How do you help?\"`")
if x == 41:
await event.edit("`\"What song do you play most often?\"`")
if x == 42:
await event.edit("`\"Suggest a new rule that should be added in this group!\"`")
if x == 43:
await event.edit("`\"What app on your phone do you think I should get?\"`")
if x == 44:
await event.edit("`\"What website or app has completely changed your life for better or for worse?\"`")
if x == 45:
await event.edit("`\"What isn’t real but you desperately wish it was?\"`")
if x == 46:
await event.edit("`\"What thing do you really wish you could buy right now?\"`")
if x == 47:
await event.edit("`\"If you could ban an admin from this group. Who would you prefer ?\"`")
if x == 48:
await event.edit("`\"What would you do if someone left a duffle bag filled with $2,000,000 on your back porch?\"`")
if x == 49:
await event.edit("`\"Who is the luckiest person you know?\"`")
if x == 50:
await event.edit("`\"If you could visit someone's house in this group, who would it be ?\"`")
if x == 51:
await event.edit("`\"What are you tired of hearing about?\"`")
if x == 52:
await event.edit("`\"If you died today, what would your greatest achievement be?\"`")
if x == 53:
await event.edit("`\"What method will you choose to kill yourself?\"`")
if x == 54:
await event.edit("`\"What’s the best news you've heard in the last 24 hours?\"`")
if x == 55:
await event.edit("`\"What is the most important change that should be made to your country’s education system?\"`")
if x == 56:
await event.edit("`\"Send your favourite sticker pack.\"`")
if x == 57:
await event.edit("`\"Send your favourite animated sticker pack.\"`")
if x == 58:
await event.edit("`\"Send your favourite video or gif.\"`")
if x == 59:
await event.edit("`\"Send your favourite emojies\"`")
if x == 60:
await event.edit("`\"What’s something you misunderstood as a child and only realized much later was wrong?\"`")
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 100)
input_str = event.pattern_match.group(1)
if input_str == "load":
await event.edit(input_str)
animation_chars = [
"▮",
"▯",
"▬",
"▭"
""
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 100)
input_str = event.pattern_match.group(1)
if input_str == "square":
await event.edit(input_str)
animation_chars = [
"◧",
"◨",
"◧",
"◨"
""
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 100)
input_str = event.pattern_match.group(1)
if input_str == "up":
await event.edit(input_str)
animation_chars = [
"╹",
"╻",
"╹",
"╻"
""
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 100)
input_str = event.pattern_match.group(1)
if input_str == "round":
await event.edit(input_str)
animation_chars = [
"⚫",
"⬤",
"●",
"∘"
""
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 0.1
animation_ttl = range(0, 100)
input_str = event.pattern_match.group(1)
if input_str == "heart":
await event.edit(input_str)
animation_chars = [
"🖤",
"❤️",
"🖤",
"❤️"
""
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 4])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 11)
input_str = event.pattern_match.group(1)
if input_str == "anim":
await event.edit(input_str)
animation_chars = [
"😁",
"😧",
"😡",
"😢",
"😁",
"😧",
"😡",
"😢",
"__**...BOY oh BOY! i feel like a wamen....**__"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 11])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 2
animation_ttl = range(0, 6)
input_str = event.pattern_match.group(1)
if input_str == "fnl":
await event.edit(input_str)
animation_chars = [
"😁🏿",
"😁🏾",
"😁🏽",
"😁🏼",
"😁",
"**Fair & Lovely GeNg Is BeHiNd You....**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 6])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 2
animation_ttl = range(0, 6)
input_str = event.pattern_match.group(1)
if input_str == "monkey":
await event.edit(input_str)
animation_chars = [
"🐵",
"🙉",
"🙈",
"🙊",
"🖕🐵🖕",
"**OPPA MONEKEYY Style....**"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 6])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 14)
input_str = event.pattern_match.group(1)
if input_str == "hand":
await event.edit(input_str)
animation_chars = [
"👈",
"👉",
"☝️",
"👆",
"🖕",
"👇",
"✌️",
"🤞",
"🖖",
"🤘",
"🤙",
"🖐️",
"👌"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 14])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 1
animation_ttl = range(0, 13)
input_str = event.pattern_match.group(1)
if input_str == "cnt":
await event.edit(input_str)
animation_chars = [
"🔟",
"9️⃣",
"8️⃣",
"7️⃣",
"6️⃣",
"5️⃣",
"4️⃣",
"3️⃣",
"2️⃣",
"1️⃣",
"0️⃣",
"🆘"
]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 13])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_interval = 9
animation_ttl = range(0, 15)
input_str = event.pattern_match.group(1)
if input_str == "wupload":
await event.edit(input_str)
animation_chars = [
"Uploading File From Telegram To Whatsapp...",
" User Online: True\nTelegram API Access: True\nWhatsapp API Access: True\nRead Storage: True ",
"DOWNLOADING STARTED... \n\n0% [░░░░░░░░░░░░░░░░░░░░]\n`Connecting To WhatsApp API...`\nETA: 0m, 20s",
"DOWNLOADING... \n\n11.07% [██░░░░░░░░░░░░░░░░░░]\n\nETA: 0m, 18s",
"DOWNLOADING... \n\n20.63% [███░░░░░░░░░░░░░░░░░]\n\nETA: 0m, 16s",
"FILE DOWNLOADED, UPLOADING TO ADMIN'S WHATSAPP GROUP [CHUTIYA GENG BOYS]... \n\n34.42% [█████░░░░░░░░░░░░░░░]\n\nETA: 0m, 14s",
"UPLOADING... \n\n42.17% [███████░░░░░░░░░░░░░]\n\nETA: 0m, 12s",
"UPLOADING... \n\n55.30% [█████████░░░░░░░░░░░]\n\nETA: 0m, 10s",
"UPLOADING... \n\n64.86% [███████████░░░░░░░░░]\n\nETA: 0m, 08s",
"UPLOADED TO ADMIN'S WHATSAPP GROUP SERVER ... \n\n74.02% [█████████████░░░░░░░]\n\nETA: 0m, 06s",
"SPLITTING FILE IN WHATSAPP SUPPORTED SIZE & UPLOADING IT ... 86.21% [███████████████░░░░░]\n\nETA: 0m, 04s",
"SPLITTING FILE IN WHATSAPP SUPPORTED SIZE & UPLOADING IT... 93.50% [█████████████████░░░]\n\nETA: 0m, 02s",
"UPLOADING TO ADMIN'S WHATSAPP GROUP [CHUTIYA GANG BOYS]... 100% [████████████████████]\n`Scanning file...`\nETA: 0m, 00s",
"UPLOADING FILE TO WHATSAPP GROUP COMPLETED!\nFILE VERIFIED: ✅",
"API TERMINATED UNTIL FURTHER USAGE..."]
for i in animation_ttl:
await asyncio.sleep(animation_interval)
await event.edit(animation_chars[i % 15])
@bot.on(events.NewMessage(pattern=r"\.(.*)", outgoing=True))
async def _(event):
if event.fwd_from:
return
animation_ttl = range(0, 103)
input_str = event.pattern_match.group(1)
if input_str == "admeme":
await event.edit(input_str)
animation_chars = [
"@aaaaaaaaaaaaadddddddddddddmmmmmmmmmmmmmiiiiiiiiiiiiinnnnnnnnnnnnn",
"@aaaaaaaaaaaaddddddddddddmmmmmmmmmmmmiiiiiiiiiiiinnnnnnnnnnnn",
"@aaaaaaaaaaadddddddddddmmmmmmmmmmmiiiiiiiiiiinnnnnnnnnnn",
"@aaaaaaaaaaddddddddddmmmmmmmmmmiiiiiiiiiinnnnnnnnnn",
"@aaaaaaaaadddddddddmmmmmmmmmiiiiiiiiinnnnnnnnn",
"@aaaaaaaaddddddddmmmmmmmmiiiiiiiinnnnnnnn",
"@aaaaaaadddddddmmmmmmmiiiiiiinnnnnnn",
"@aaaaaaddddddmmmmmmiiiiiinnnnnn",
"@aaaaadddddmmmmmiiiiinnnnn",
"@aaaaddddmmmmiiiinnnn",
"@aaadddmmmiiinnn",
"@aaddmmiinn",
"@admin"]
for i in animation_ttl:
await event.edit(animation_chars[i % 103])
@register(outgoing=True, pattern="^.gotm(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
await event.edit("Thinking... 🤔")
await asyncio.sleep(2)
x = (random.randrange(1, 30))
if x == 1:
await event.edit("[To your teachers on failing you in all your papers confidently, every time...](https://telegra.ph/file/431d178780f9bff353047.jpg)", link_preview=True)
if x == 2:
await event.edit("[A shift from the mainstream darling, sweetheart, jaanu, and what not...](https://telegra.ph/file/6bbb86a6c7d2c4a61e102.jpg)", link_preview=True)
if x == 3:
await event.edit("[To the guy who's friendzone-ing you...](https://telegra.ph/file/8930b05e9535e9b9b8229.jpg)", link_preview=True)
if x == 4:
await event.edit("[When your friend asks for his money back...](https://telegra.ph/file/2df575ab38df5ce9dbf5e.jpg)", link_preview=True)
if x == 5:
await event.edit("[A bad-ass reply to who do you think you are?](https://telegra.ph/file/3a35a0c37f4418da9f702.jpg)", link_preview=True)
if x == 6:
await event.edit("[When the traffic police stops your car and asks for documents...](https://telegra.ph/file/52612d58d6a61315a4c3a.jpg)", link_preview=True)
if x == 7:
await event.edit("[ When your friend asks about the food he/she just cooked and you don't want to break his/her heart...](https://telegra.ph/file/702df36088f5c26fef931.jpg)", link_preview=True)
if x == 8:
await event.edit("[When you're out of words...](https://telegra.ph/file/ba748a74bcab4a1135d2a.jpg)", link_preview=True)
if x == 9:
await event.edit("[When you realize your wallet is empty...](https://telegra.ph/file/a4508324b496d3d4580df.jpg)", link_preview=True)
if x == 10:
await event.edit("[When shit is about to happen...](https://telegra.ph/file/e15d9d64f9f25e8d05f19.jpg)", link_preview=True)
if x == 11:
await event.edit("[When that oversmart classmate shouts a wrong answer in class...](https://telegra.ph/file/1a225a2e4b7bfd7f7a809.jpg)", link_preview=True)
if x == 12:
await event.edit("[When things go wrong in a big fat Indian wedding...](https://telegra.ph/file/db69e17e85bb444caca32.jpg)", link_preview=True)
if x == 13:
await event.edit("[A perfect justification for breaking a promise...](https://telegra.ph/file/0b8fb8fb729d157844ac9.jpg)", link_preview=True)
if x == 14:
await event.edit("[When your friend just won't stop LOL-ing on something silly you said...](https://telegra.ph/file/247fa54106c32318797ae.jpg)", link_preview=True)
if x == 15:
await event.edit("[When someone makes a joke on you...](https://telegra.ph/file/2ee216651443524eaafcf.jpg)", link_preview=True)
if x == 16:
await event.edit("[When your professor insults you in front of the class...](https://telegra.ph/file/a2dc7317627e514a8e180.jpg)", link_preview=True)
if x == 17:
await event.edit("[When your job interviewer asks if you're nervous...](https://telegra.ph/file/9cc147d0bf8adbebf164b.jpg)", link_preview=True)
if x == 18:
await event.edit("[When you're sick of someone complaining about the heat outside...](https://telegra.ph/file/9248635263c52b968f968.jpg)", link_preview=True)
if x == 19:
await event.edit("[When your adda is occupied by outsiders...](https://telegra.ph/file/ef537007ba6d9d4cbd384.jpg)", link_preview=True)
if x == 20:
await event.edit("[When you don't have the right words to motivate somebody...](https://telegra.ph/file/2c932d769ae4c5fbed368.jpg)", link_preview=True)
if x == 21:
await event.edit("[When the bouncer won't let you and your group of friends in because you're all under-aged...](https://telegra.ph/file/6c8ca79f1e20ebd04391c.jpg)", link_preview=True)
if x == 22:
await event.edit("[To the friend who wants you to take the fall for his actions...](https://telegra.ph/file/d4171b9bc9104b5d972d9.jpg)", link_preview=True)
if x == 23:
await event.edit("[When that prick of a bully wouldn't take your words seriously...](https://telegra.ph/file/188d73bd24cf866d8d8d0.jpg)", link_preview=True)
if x == 24:
await event.edit("[ When you're forced to go shopping/watch a football match with your partner...](https://telegra.ph/file/6e129f138c99c1886cb2b.jpg)", link_preview=True)
if x == 25:
await event.edit("[To the large queue behind you after you get the last concert/movie ticket...](https://telegra.ph/file/2423f213dd4e4282a31ea.jpg)", link_preview=True)
if x == 26:
await event.edit("[When your parents thought you'd fail but you prove them wrong...](https://telegra.ph/file/39cc5098466f622bf21e3.jpg)", link_preview=True)
if x == 27:
await event.edit("[A justification for not voting!](https://telegra.ph/file/87d475a8f9a8350d2450e.jpg)", link_preview=True)
if x == 28:
await event.edit("[When your partner expects you to do too many things...](https://telegra.ph/file/68bc768d36e08862bf94e.jpg)", link_preview=True)
if x == 29:
await event.edit("[When your friends cancel on the plan you made at the last minute...](https://telegra.ph/file/960b58c8f625b17613307.jpg)", link_preview=True)
if x == 30:
await event.edit("[For that friend of yours who does not like loud music and head banging...](https://telegra.ph/file/acbce070d3c52b921b2bd.jpg)", link_preview=True)
@register(outgoing=True, pattern="^.gott(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
await event.edit("Typing...")
await asyncio.sleep(2)
x = (random.randrange(1, 40))
if x == 1:
await event.edit("`\"The man who passes the sentence should swing the sword.\"`")
if x == 2:
await event.edit("`\"When the snows fall and the white winds blow, the lone wolf dies but the pack survives!\"`")
if x == 3:
await event.edit("`\"The things I do for love!\"`")
if x == 4:
await event.edit("`\"I have a tender spot in my heart for cripples, bastards and broken things.\"`")
if x == 5:
await event.edit("`\"Death is so terribly final, while life is full of possibilities.\"`")
if x == 6:
await event.edit("`\"Once you’ve accepted your flaws, no one can use them against you.\"`")
if x == 7:
await event.edit("`\"If I look back I am lost.\"`")
if x == 8:
await event.edit("`\"When you play the game of thrones, you win or you die.\"`")
if x == 9:
await event.edit("`\"I grew up with soldiers. I learned how to die a long time ago.\"`")
if x == 10:
await event.edit("`\"What do we say to the Lord of Death?\nNot Today!\"`")
if x == 11:
await event.edit("`\"Every flight begins with a fall.\"`")
if x == 12:
await event.edit("`\"Different roads sometimes lead to the same castle.\"`")
if x == 13:
await event.edit("`\"Never forget what you are. The rest of the world will not. Wear it like armour, and it can never be used to hurt you.\"`")
if x == 14:
await event.edit("`\"The day will come when you think you are safe and happy, and your joy will turn to ashes in your mouth.\"`")
if x == 15:
await event.edit("`\"The night is dark and full of terrors.\"`")
if x == 16:
await event.edit("`\"You know nothing, Jon Snow.\"`")
if x == 17:
await event.edit("`\"Night gathers, and now my watch begins!\"`")
if x == 18:
await event.edit("`\"A Lannister always pays his debts.\"`")
if x == 19:
await event.edit("`\"Burn them all!\"`")
if x == 20:
await event.edit("`\"What do we say to the God of death?\"`")
if x == 21:
await event.edit("`\"There's no cure for being a c*nt.\"`")
if x == 22:
await event.edit("`\"Winter is coming!\"`")
if x == 23:
await event.edit("`\"That's what I do: I drink and I know things.\"`")
if x == 24:
await event.edit("`\"I am the dragon's daughter, and I swear to you that those who would harm you will die screaming.\"`")
if x == 25:
await event.edit("`\"A lion does not concern himself with the opinion of sheep.\"`")
if x == 26:
await event.edit("`\"Chaos isn't a pit. Chaos is a ladder.\"`")
if x == 27:
await event.edit("`\"I understand that if any more words come pouring out your c*nt mouth, I'm gonna have to eat every f*cking chicken in this room.\"`")
if x == 28:
await event.edit("`\"If you think this has a happy ending, you haven't been paying attention.\"`")
if x == 29:
await event.edit("`\"If you ever call me sister again, I'll have you strangled in your sleep.\"`")
if x == 30:
await event.edit("`\"A girl is Arya Stark of Winterfell. And I'm going home.\"`")
if x == 31:
await event.edit("`\"Any man who must say 'I am the King' is no true King.\"`")
if x == 32:
await event.edit("`\"If I fall, don't bring me back.\"`")
if x == 33:
await event.edit("`\"Lannister, Targaryen, Baratheon, Stark, Tyrell... they're all just spokes on a wheel. This one's on top, then that one's on top, and on and on it spins, crushing those on the ground.\"`")
if x == 34:
await event.edit("`\"Hold the door!`")
if x == 35:
await event.edit("`\"When people ask you what happened here, tell them the North remembers. Tell them winter came for House Frey.\"`")
if x == 36:
await event.edit("`\"Nothing f*cks you harder than time.\"`")
if x == 37:
await event.edit("`\"There is only one war that matters. The Great War. And it is here.\"`")
if x == 38:
await event.edit("`\"Power is power!\"`")
if x == 39:
await event.edit("`\"I demand a trial by combat!\"`")
if x == 40:
await event.edit("`\"I wish I was the monster you think I am!\"`")
@register(outgoing=True, pattern="^.hp(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
await event.edit("Casting spell...")
await asyncio.sleep(2)
x = (random.randrange(1, 76))
if (x == 1):
await event.edit("**Crucio**")
if (x == 2):
await event.edit("**Sectumsempra**")
if (x == 3):
await event.edit("**Morsmordre**")
if (x == 4):
await event.edit("**Tarantallegra**")
if (x == 5):
await event.edit("*Fiendfyre Curse**")
if (x == 6):
await event.edit("**Petrificus Totalus**")
if (x == 7):
await event.edit("**Furnunculus**")
if (x == 8):
await event.edit("**Rictusempra**")
if (x == 9):
await event.edit("**Deletrius**")
if (x == 10):
await event.edit("*Incarcarous**")
if (x == 11):
await event.edit("**Confringo**")
if (x == 12):
await event.edit("**Densaugeo**")
if (x == 13):
await event.edit("**Mobilicorpus**")
if (x == 14):
await event.edit("**Liberacorpus**")
if (x == 15):
await event.edit("**Levicorpus**")
if (x == 16):
await event.edit("**Deprimo**")
if (x == 17):
await event.edit("**Sonorus**")
if (x == 18):
await event.edit("**Duro**")
if (x == 19):
await event.edit("**Legilimens**")
if (x == 20):
await event.edit("**Expulso**")
if (x == 21):
await event.edit("**Oppugno**")
if (x == 22):
await event.edit("**Fidelius**")
if (x == 23):
await event.edit("**Imperio**")
if (x == 24):
await event.edit("**Piertotum Locomotor**")
if (x == 25):
await event.edit("**Aparecium**")
if (x == 26):
await event.edit("**Defodio**")
if (x == 27):
await event.edit("**Descendo**")
if (x == 28):
await event.edit("**Specialis Revelio**")
if (x == 29):
await event.edit("**Protego Totalum**")
if (x == 30):
await event.edit("**Meteolojinx Recanto**")
if (x == 31):
await event.edit("**Cave Inimicum**")
if (x == 32):
await event.edit("**Impedimenta**")
if (x == 33):
await event.edit("**Obscuro**")
if (x == 34):
await event.edit("**Reducto**")
if (x == 35):
await event.edit("**Anapneo**")
if (x == 36):
await event.edit("**Locomotor Mortis**")
if (x == 37):
await event.edit("**Geminio**")
if (x == 38):
await event.edit("** Aguamenti**")
if (x == 39):
await event.edit("**Avada Kedavra**")
if (x == 40):
await event.edit("**Repelo Muggletum**")
if (x == 41):
await event.edit("**Stupefy**")
if (x == 42):
await event.edit("**Diffindo**")
if (x == 43):
await event.edit("**Erecto**")
if (x == 44):
await event.edit("**Finite Incantatem**")
if (x == 45):
await event.edit("**Prior Incantato**")
if (x == 46):
await event.edit("**Expulso**")
if (x == 47):
await event.edit("**Incendio**")
if (x == 48):
await event.edit("**Nox**")
if (x == 49):
await event.edit("**Colloportus**")
if (x == 50):
await event.edit("**Evanesco**")
if (x == 51):
await event.edit("**Ferula**")
if (x == 52):
await event.edit("**Expecto Patronum**")
if (x == 53):
await event.edit("**Confundo**")
if (x == 54):
await event.edit("**Relashio**")
if (x == 55):
await event.edit("**Tergeo**")
if (x == 56):
await event.edit("**Episkey**")
if (x == 57):
await event.edit("**Oblivate**")
if (x == 58):
await event.edit("**Expelliarmus**")
if (x == 59):
await event.edit("*Silencio**")
if (x == 60):
await event.edit("**Muffliato**")
if (x == 61):
await event.edit("**Protego**")
if (x == 62):
await event.edit("**Expulso**")
if (x == 63):
await event.edit("**Riddikulus**")
if (x == 64):
await event.edit("**Pack**")
if (x == 65):
await event.edit("**Reducio**")
if (x == 66):
await event.edit("**Reparo**")
if (x == 67):
await event.edit("**Lumos**")
if (x == 68):
await event.edit("**Orchideous**")
if (x == 69):
await event.edit("**Portus**")
if (x == 70):
await event.edit("**Scourgify**")
if (x == 71):
await event.edit("**Wingardium Leviosa**")
if (x == 71):
await event.edit("**Impervius**")
if (x == 73):
await event.edit("**Engorgio**")
if (x == 74):
await event.edit("**Glisseo**")
if (x == 75):
await event.edit("**Accio**")
@register(outgoing=True, pattern="^.suits(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
await event.edit("Typing...")
await asyncio.sleep(2)
x = (random.randrange(1, 43))
if x == 1:
await event.edit("`\"The only time success comes before work is in the dictionary.\"`")
if x == 2:
await event.edit("`\"That’s the difference between you and me, you wanna lose small, I wanna win big.\"`")
if x == 3:
await event.edit("`\"When you are backed against the wall, break the goddamn thing down.\"`")
if x == 4:
await event.edit("`\"If they think you care, they’ll walk all over you.\"`")
if x == 5:
await event.edit("`\"I don’t have dreams, I have goals.\"`")
if x == 6:
await event.edit("`\"It’s going to happen, because I am going to make it happen.\"`")
if x == 7:
await event.edit("`\"Ever loved someone so much, you would do anything for them? Yeah, well make that someone yourself and do whatever the hell you want.\"`")
if x == 8:
await event.edit("`\"I like to smile at people who don’t like me.\"`")
if x == 9:
await event.edit("`\"Don’t raise your voice, improve your argument.\"`")
if x == 10:
await event.edit("`\"You want to change your life? Change the way you think.\"`")
if x == 11:
await event.edit("`\"Have goals so big you get uncomfortable telling small minded people.\"`")
if x == 12:
await event.edit("`\"Kill them with success. Bury them with a smile.\"`")
if x == 13:
await event.edit("`\"Winners don’t make excuses.\"`")
if x == 14:
await event.edit("`\"It's not a problem if you always win.\"`")
if x == 15:
await event.edit("`\"I don’t play the odds I play the man.\"`")
if x == 16:
await event.edit("`\"You always have a choice.\"`")
if x == 17:
await event.edit("`\"Sorry, I can’t hear you over the sound of how awesome I am.\"`")
if x == 18:
await event.edit("`\"Anyone can do my job, but no one can be me.\"`")
if x == 19:
await event.edit("`\"I believe in work, I don’t fuck with luck.\"`")
if x == 20:
await event.edit("`\"It’s not bragging if it’s true.\"`")
if x == 21:
await event.edit("`\"Win a no win situation by rewriting the rules.\"`")
if x == 22:
await event.edit("`\"Let them hate, just make sure they spell your name right.\"`")
if x == 23:
await event.edit("`\"That’s the difference between you and me. You wanna lose small, I wanna win big.\"`")
if x == 24:
await event.edit("`\"Oh you have no idea how Donna I am.\"`")
if x == 25:
await event.edit("`\"It is so much easier to criticize someone else than it is to acknowledge your own shortcomings.\"`")
if x == 26:
await event.edit("`\"And if you think I'm smarter than you? You're damn right I do. But if you think that means I can't kick your ass up and down this floor, take a swing. See what happens.\"`")
if x == 27:
await event.edit("`\"If they had did it once, they will do it again.\"`")
if x == 28:
await event.edit("`\"He goes, I go.\"`")
if x == 29:
await event.edit("`\"First impressions last. You start behind the eight ball, you'll never get in front.\"`")
if x == 30:
await event.edit("`\"I don't respond to threats. I make them.\"`")
if x == 31:
await event.edit("`\"Nobody does anything as a courtesy. They sent you where they want you to look. Listen, being a lawyer is a lot like being a doctor.\"`")
if x == 32:
await event.edit("`\"Sometimes I like to hangout with people that aren't that bright. You know, just to see how the other halves live.\"`")
if x == 33:
await event.edit("`\"Never destroy anyone in public when you can accomplish the same result in private.\"`")
if x == 34:
await event.edit("`\"You don’t send a puppy to clean up its own mess.\"`")
if x == 35:
await event.edit("`\"Gloating is fine, you just not have to suck at it.\"`")
if x == 36:
await event.edit("`\"It's not bragging if it's true.\"`")
if x == 37:
await event.edit("`\"Sometimes good guys gotta do bad things to make the bad guys pay.\"`")
if x == 38:
await event.edit("`\"I don't pave the way for people,people pave the way for me.\"`")
if x == 39:
await event.edit("`\"My respect isn't demanded, it's earnt.\"`")
if x == 40:
await event.edit("`\"I don't get lucky, I make my own luck.\"`")
if x == 41:
await event.edit("`\"This isn't elementary school. This is hard work, long hours, high pressure. I need a grown godamn man.\nYou give me this, and I'll work as hard as it takes to school those Harvard douches and become the best lawyer you have ever seen.\"`")
if x == 42:
await event.edit("`\"Love is a terrifying thing\nIt’s not safe. Because when you love someone, you have to face the fact that you can lose them.\nSometimes life throws an unexpected wrench in your way. It might be that you’re in jeopardy of losing your career, your freedom, or worst of all, you might even find out that a loved one has died\nThese things make you realize how precious life is, how important every second we have on this earth is, and how important the people we care about are to us.\"`")
@register(outgoing=True, pattern="^.kiss(?: |$)(.*)")
async def _(event):
if event.fwd_from:
return
deq = deque(list("😗😙😚😚😘"))
for _ in range(48):
await asyncio.sleep(0.1)
await event.edit("".join(deq))
deq.rotate(1)
CMD_HELP.update({
"mixmemes":
"`.eye`\
\nUsage: see it yourself.\
\n\n`.earth`\
\nusage: spins like earth 🌎🌎\
\n\n`.bombs`\
\nUsage: For bombing tg 🤣🤣\
\n\n`.think`\
\nUsage: hmmm\
\n\n`.gotm` or `.gott`\
\nUsage: got sucks🤣\
\n\n`.snake`\
\nUsage: See it yourself\
\n\n`.call`\
\nUsage: call tg owner\
\n\n`.belo` or `.hp` or `.suits`\
\nUsage: kinda interesting\
\n\n`.admeme`\
\nUsage: spammy af\
\n\n`.qs`\
\nUsage: start conversation\
\n\n`.wtf`\
\nUsage: See it yourself\
\n\n`.load`,`.up`,`.square`,`.round`,`.heart`,`.monkey`,`.anim`,`.hand`,`.fnl`,`.cnt`,`.kiss`\
\nUsage: See it yourself\
\n\n`.human`\
\nUsage: Nothing interesting\
\n\n`.wupload`\
\nUsage: meh\
\n\n`.music`\
\nUsage: gey music player\
\n\n`.fleave`\
\nUsage: fake leaving\
\n\n`.fadmin`\
\nUsage: Fake admin 🤣🤣\
\n\n`.gift`\
\nUsage: Well it's a gift i can't say what's inside 😁😁!\
\n\n`.police`\
\nUsage: Time to go to jail 😔😔.\
\n\n`.kill`\
\nUsage: For killing your enemies 🔫🔫 !!\
\n\n`.os`\
\nUsage: see it yourself 🤐🤐.\
\n\n`.isro`\
\nUsage: For calling aliens 👽👽 :P\
\n\n`.gangstar`\
\nUsage:U becum gengstar 🤠🤠.\
\n\n`.hack`\
\nUsage: For hacking telegram🖥️🖥️.\
\n\n`.dump`\
\nUsage: For throwing waste xD\
\n\n`.hypno`\
\nUsage: Oh fek my eyes 👀\
\n\n`.whatsapp`\
\nUsage: Now you can hack whatsapp too 😂😂 \
\n\n`.solar`\
\nUsage: Our beautiful solar system 🌞🌞\
\n\n`.quickheal` or `.sqh` or `.vquickheal`\
\nUsage: Virus found ...Remove it using this 😂😂.\
\n\n`.plane`\
\nUsage: For travelling from one place to another ✈️✈️\
\n\n`.jio`\
\nUsage: Your network slow?? Boost it using this 🤣🤣\
\n\n\nWARNING⚠️⚠️: All this cmds will spam group recents.\nUse it in OT groups/Spam groups OR GET YOU A** KICKED😂😂."
})
| 43.607155 | 4,035 | 0.38995 |
acec5ab2cba050cee427d48725eab54ff8ad7fb0 | 2,592 | py | Python | afb/utils/misc.py | dave-msk/broker | f6fbb3ec78c25ee89a954231466599787cbc122f | [
"Apache-2.0"
] | 1 | 2021-08-28T04:46:30.000Z | 2021-08-28T04:46:30.000Z | afb/utils/misc.py | dave-msk/broker | f6fbb3ec78c25ee89a954231466599787cbc122f | [
"Apache-2.0"
] | 1 | 2019-03-08T08:28:41.000Z | 2019-03-08T13:00:55.000Z | afb/utils/misc.py | dave-msk/broker | f6fbb3ec78c25ee89a954231466599787cbc122f | [
"Apache-2.0"
] | 1 | 2019-02-27T09:12:49.000Z | 2019-02-27T09:12:49.000Z | # Copyright 2021 (David) Siu-Kei Muk. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import yaml
from afb.utils import proxy
SEP = "/"
_RESERVED = "afb"
NONE = object()
CONFIG_LOADER = {
'.yaml': yaml.safe_load,
'.yml': yaml.safe_load,
'.json': json.load,
}
_mfr_lib = proxy.ModuleProxy("afb.core.manufacturer")
_dep_lib = proxy.ModuleProxy("afb.utils.deprecation")
def create_mfr(cls, fct_fn_dict, keyword_mode=None):
_dep_lib.warn("`{}` is deprecated and will be removed in a future version. "
"Use `Manufacturer.from_dict` instead."
.format(qualname(create_mfr)))
if keyword_mode is not None:
_dep_lib.warn("`keyword_mode` is not used anymore.")
return _mfr_lib.Manufacturer.from_dict(cls, fct_fn_dict)
def qualname_id(obj, sep="_"):
fmt = "%s" + sep + "%s"
return fmt % (obj.__module__.replace(".", sep), obj.__name__)
def qualname(obj):
if obj.__module__ == "builtins": return obj.__name__
return "%s.%s" % (obj.__module__, obj.__name__)
def load_config(config):
"""Loads dictionary from config file.
The currently supported file format is YAML and JSON.
The file format is determined by the file extension:
- YAML: `.yaml`, `.yml`
- JSON: `.json`
The config file must contain a representation that will be deserialized into
a single `dict`. Additional dictionaries (e.g. from YAML) are ignored.
"""
fmt = os.path.splitext(config)[-1].lower()
with open(config, 'rb') as f:
data = CONFIG_LOADER[fmt](f) or {None: None}
if not isinstance(data, dict):
raise TypeError("File content is not a `dict`. Path: {}, Content: {}"
.format(config, data))
return data
def is_reserved(name):
return name.split(SEP)[0] == _RESERVED
def join(*args):
return SEP.join(args)
def join_reserved(*args):
return join(_RESERVED, *args)
| 28.483516 | 80 | 0.680941 |
acec5afbdf5a5ac03cda531b77e83a9576bf9874 | 393 | py | Python | agents/Agent.py | sergiuionescu/gym-agents | c65b786a148a868c9ee922dca1cb592604e7e828 | [
"Apache-2.0"
] | null | null | null | agents/Agent.py | sergiuionescu/gym-agents | c65b786a148a868c9ee922dca1cb592604e7e828 | [
"Apache-2.0"
] | null | null | null | agents/Agent.py | sergiuionescu/gym-agents | c65b786a148a868c9ee922dca1cb592604e7e828 | [
"Apache-2.0"
] | null | null | null | class Agent(object):
def random_prediction(self):
pass
def prediction(self, observation):
pass
def act(self, ob):
pass
def add_reward(self, observation, reward):
pass
def get_learning_rate(self):
pass
def reset(self, found):
pass
def sleep(self):
pass
def set_session(self, session):
pass
| 15.115385 | 46 | 0.569975 |
acec5b2b2cdc1009d35bef1166bd984408f34bef | 9,006 | py | Python | unittest/tests/modules/compare.py | simsab-ufcg/ndvi-finder | fb1a25dcca5b38a18bc51e8436e470bc9bbd464f | [
"MIT"
] | null | null | null | unittest/tests/modules/compare.py | simsab-ufcg/ndvi-finder | fb1a25dcca5b38a18bc51e8436e470bc9bbd464f | [
"MIT"
] | 17 | 2018-10-16T17:45:47.000Z | 2019-05-08T18:56:48.000Z | unittest/tests/modules/compare.py | simsab-ufcg/ndvi-finder | fb1a25dcca5b38a18bc51e8436e470bc9bbd464f | [
"MIT"
] | 1 | 2018-10-18T16:56:52.000Z | 2018-10-18T16:56:52.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#******************************************************************************
#
# Project: GDAL
# Purpose: Compare two files for differences and report.
# Author: Frank Warmerdam, warmerdam@pobox.com
#
#******************************************************************************
# Copyright (c) 2012, Frank Warmerdam <warmerdam@pobox.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#******************************************************************************
import os
import sys
import filecmp
from osgeo import gdal
from osgeo import osr
#######################################################
def compare_metadata(golden_md, new_md, id, options=[]):
if golden_md is None and new_md is None:
return 0
found_diff = 0
if len(list(golden_md.keys())) != len(list(new_md.keys())):
print('Difference in %s metadata key count' % id)
print(' Golden Keys: ' + str(list(golden_md.keys())))
print(' New Keys: ' + str(list(new_md.keys())))
found_diff += 1
for key in list(golden_md.keys()):
if key not in new_md:
print('New %s metadata lacks key \"%s\"' % (id, key))
found_diff += 1
elif new_md[key] != golden_md[key]:
print('Metadata value difference for key "' + key + '"')
print(' Golden: "' + golden_md[key] + '"')
print(' New: "' + new_md[key] + '"')
found_diff += 1
return found_diff
#######################################################
# Review and report on the actual image pixels that differ.
def compare_image_pixels(golden_band, new_band, id, options=[]):
diff_count = 0
max_diff = 0
for line in range(golden_band.YSize):
golden_line = golden_band.ReadAsArray(0, line, golden_band.XSize, 1)[0]
new_line = new_band.ReadAsArray(0, line, golden_band.XSize, 1)[0]
diff_line = golden_line.astype(float) - new_line.astype(float)
max_diff = max(max_diff,abs(diff_line).max())
diff_count += len(diff_line.nonzero()[0])
print(' Pixels Differing: ' + str(diff_count))
print(' Maximum Pixel Difference: ' + str(max_diff))
#######################################################
def compare_band(golden_band, new_band, id, options=[]):
found_diff = 0
if golden_band.DataType != new_band.DataType:
print('Band %s pixel types differ.' % id)
print(' Golden: ' + gdal.GetDataTypeName(golden_band.DataType))
print(' New: ' + gdal.GetDataTypeName(new_band.DataType))
found_diff += 1
#if golden_band.GetNoDataValue() != new_band.GetNoDataValue():
# print('Band %s nodata values differ.' % id)
# print(' Golden: ' + str(golden_band.GetNoDataValue()))
# print(' New: ' + str(new_band.GetNoDataValue()))
# found_diff += 1
if golden_band.GetColorInterpretation() != new_band.GetColorInterpretation():
print('Band %s color interpretation values differ.' % id)
print(' Golden: ' + gdal.GetColorInterpretationName(golden_band.GetColorInterpretation()))
print(' New: ' + gdal.GetColorInterpretationName(new_band.GetColorInterpretation()))
found_diff += 1
if golden_band.Checksum() != new_band.Checksum():
print('Band %s checksum difference:' % id)
print(' Golden: ' + str(golden_band.Checksum()))
print(' New: ' + str(new_band.Checksum()))
found_diff += 1
compare_image_pixels(golden_band,new_band, id, options)
# Check overviews
if golden_band.GetOverviewCount() != new_band.GetOverviewCount():
print('Band %s overview count difference:' % id)
print(' Golden: ' + str(golden_band.GetOverviewCount()))
print(' New: ' + str(new_band.GetOverviewCount()))
found_diff += 1
else:
for i in range(golden_band.GetOverviewCount()):
found_diff += compare_band(golden_band.GetOverview(i),
new_band.GetOverview(i),
id + ' overview ' + str(i),
options)
# Metadata
if 'SKIP_METADATA' not in options:
found_diff += compare_metadata(golden_band.GetMetadata(),
new_band.GetMetadata(),
'Band ' + id, options)
# TODO: Color Table, gain/bias, units, blocksize, mask, min/max
return found_diff
#######################################################
def compare_srs(golden_wkt, new_wkt):
if golden_wkt == new_wkt:
return 0
print('Difference in SRS!')
golden_srs = osr.SpatialReference(golden_wkt)
new_srs = osr.SpatialReference(new_wkt)
if golden_srs.IsSame(new_srs):
print(' * IsSame() reports them as equivalent.')
else:
print(' * IsSame() reports them as different.')
print(' Golden:')
print(' ' + golden_srs.ExportToPrettyWkt())
print(' New:')
print(' ' + new_srs.ExportToPrettyWkt())
return 1
#######################################################
def compare_db(golden_db, new_db, options=[]):
found_diff = 0
# SRS
if 'SKIP_SRS' not in options:
found_diff += compare_srs(golden_db.GetProjection(),
new_db.GetProjection())
# GeoTransform
if 'SKIP_GEOTRANSFORM' not in options:
golden_gt = golden_db.GetGeoTransform()
new_gt = new_db.GetGeoTransform()
if golden_gt != new_gt:
print('GeoTransforms Differ:')
print(' Golden: ' + str(golden_gt))
print(' New: ' + str(new_gt))
found_diff += 1
# Metadata
if 'SKIP_METADATA' not in options:
found_diff += compare_metadata(golden_db.GetMetadata(),
new_db.GetMetadata(),
'Dataset', options)
# Bands
if golden_db.RasterCount != new_db.RasterCount:
print('Band count mismatch (golden=%d, new=%d)' \
% (golden_db.RasterCount, new_db.RasterCount))
found_diff += 1
# Dimensions
for i in range(golden_db.RasterCount):
gSzX = golden_db.GetRasterBand(i+1).XSize
nSzX = new_db.GetRasterBand(i+1).XSize
gSzY = golden_db.GetRasterBand(i+1).YSize
nSzY = new_db.GetRasterBand(i+1).YSize
if gSzX != nSzX or gSzY != nSzY:
print('Band size mismatch (band=%d golden=[%d,%d], new=[%d,%d])' %
(i, gSzX, gSzY, nSzX, nSzY))
found_diff += 1
# If so-far-so-good, then compare pixels
if found_diff == 0:
for i in range(golden_db.RasterCount):
found_diff += compare_band(golden_db.GetRasterBand(i+1),
new_db.GetRasterBand(i+1),
str(i+1),
options)
return found_diff
#######################################################
def compare_sds(golden_db, new_db, options=[]):
found_diff = 0
golden_sds = golden_db.GetMetadata('SUBDATASETS')
new_sds = new_db.GetMetadata('SUBDATASETS')
count = len(list(golden_sds.keys())) / 2
for i in range(count):
key = 'SUBDATASET_%d_NAME' % (i+1)
sub_golden_db = gdal.Open(golden_sds[key])
sub_new_db = gdal.Open(new_sds[key])
sds_diff = compare_db(sub_golden_db, sub_new_db, options)
found_diff += sds_diff
if sds_diff > 0:
print('%d differences found between:\n %s\n %s' \
% (sds_diff, golden_sds[key],new_sds[key]))
return found_diff
#######################################################
def Usage():
print('Usage: gdalcompare.py [-sds] <golden_file> <new_file>')
sys.exit(1)
#######################################################
#
# Mainline
#
def compare_tiffs(golden_file, new_file):
#### Compare Files ####
found_diff = 0
# compare raw binary files.
try:
os.stat(golden_file)
if not filecmp.cmp(golden_file,new_file):
print('Files differ at the binary level.')
found_diff += 1
except:
print('Skipped binary file comparison, golden file not in filesystem.')
# compare as GDAL Datasets.
golden_db = gdal.Open(golden_file)
new_db = gdal.Open(new_file)
found_diff += compare_db(golden_db, new_db)
return found_diff
| 34.505747 | 96 | 0.603153 |
acec5d30fc9d892b74353cd63c415b949168ff00 | 776 | py | Python | 2021/13/script_2.py | gregueiras/advent-of-code | 3b363eee6c2761b6b3b0ab9f31e590f7442d0733 | [
"MIT"
] | null | null | null | 2021/13/script_2.py | gregueiras/advent-of-code | 3b363eee6c2761b6b3b0ab9f31e590f7442d0733 | [
"MIT"
] | null | null | null | 2021/13/script_2.py | gregueiras/advent-of-code | 3b363eee6c2761b6b3b0ab9f31e590f7442d0733 | [
"MIT"
] | null | null | null | from aux import Board
from pathlib import Path
path = Path(__file__).parent / "./input.txt"
dots = []
instructions = []
readingDots = True
with open(path) as fp:
Lines = fp.readlines()
for line in Lines:
line = line.strip()
if line == "":
readingDots = False
elif readingDots:
dots.append(list(map(int, line.split(","))))
elif not readingDots:
inst = line.split("=")
instructions.append((inst[0][-1], int(inst[1])))
board = Board(dots, instructions)
# board.instructions.reverse()
for inst in board.instructions:
board.fold(inst)
# print(board._printBoard(board.board))
print(board.visible())
print(board.visible())
print(board.dimX)
print(board.dimY)
print(len(board.board))
| 21.555556 | 60 | 0.628866 |
acec5d61fa1311ef9db13e1441cec71101da2ce0 | 290 | py | Python | Server/Python/src/dbs/dao/MySQL/Block/ListStats.py | vkuznet/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 8 | 2015-08-14T04:01:32.000Z | 2021-06-03T00:56:42.000Z | Server/Python/src/dbs/dao/MySQL/Block/ListStats.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 162 | 2015-01-07T21:34:47.000Z | 2021-10-13T09:42:41.000Z | Server/Python/src/dbs/dao/MySQL/Block/ListStats.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 16 | 2015-01-22T15:27:29.000Z | 2021-04-28T09:23:28.000Z | #!/usr/bin/env python
"""
This module provides Block.ListStats data access object.
Block parameters based on current conditions at DBS, are listed by this DAO
"""
from dbs.dao.Oracle.Block.ListStats import ListStats as OraBlockListStats
class ListStats(OraBlockListStats):
pass
| 24.166667 | 75 | 0.772414 |
acec5e8c923c319bfbe139efdd7d9018aa88441e | 87 | py | Python | sb3_contrib/tdqn/__init__.py | zappavignandrea/stable-baselines3-contrib | aca9c2871289f8961ee349befa374983108fae20 | [
"MIT"
] | null | null | null | sb3_contrib/tdqn/__init__.py | zappavignandrea/stable-baselines3-contrib | aca9c2871289f8961ee349befa374983108fae20 | [
"MIT"
] | null | null | null | sb3_contrib/tdqn/__init__.py | zappavignandrea/stable-baselines3-contrib | aca9c2871289f8961ee349befa374983108fae20 | [
"MIT"
] | null | null | null | from sb3_contrib.tdqn.policies import MlpPolicy
from sb3_contrib.tdqn.tdqn import TDQN
| 29 | 47 | 0.862069 |
acec5f73e615a4ac2673aebb24b0308f610b1a93 | 14,770 | py | Python | QUANTAXIS/QAData/financial_mean.py | wangyuefengGH/QUANTAXIS | 2e198b621680c5f3a609f9c202bd4ae583621d31 | [
"MIT"
] | 1 | 2018-07-18T04:42:46.000Z | 2018-07-18T04:42:46.000Z | QUANTAXIS/QAData/financial_mean.py | Alvin917/QUANTAXIS | 4f988776a4b08ac2bbd46c0cbef31a3112771920 | [
"MIT"
] | null | null | null | QUANTAXIS/QAData/financial_mean.py | Alvin917/QUANTAXIS | 4f988776a4b08ac2bbd46c0cbef31a3112771920 | [
"MIT"
] | 1 | 2018-09-11T11:50:23.000Z | 2018-09-11T11:50:23.000Z | # coding:utf-8
financial_dict = {
# 1.每股指标
'001基本每股收益': 'EPS',
'002扣除非经常性损益每股收益': 'deductEPS',
'003每股未分配利润': 'undistributedProfitPerShare',
'004每股净资产': 'netAssetsPerShare',
'005每股资本公积金': 'capitalReservePerShare',
'006净资产收益率': 'ROE',
'007每股经营现金流量': 'operatingCashFlowPerShare',
# 2. 资产负债表 BALANCE SHEET
# 2.1 资产
# 2.1.1 流动资产
'008货币资金': 'moneyFunds',
'009交易性金融资产': 'tradingFinancialAssets',
'010应收票据': 'billsReceivables',
'011应收账款': 'accountsReceivables',
'012预付款项': 'prepayments',
'013其他应收款': 'otherReceivables',
'014应收关联公司款': 'interCompanyReceivables',
'015应收利息': 'interestReceivables',
'016应收股利': 'dividendsReceivables',
'017存货': 'inventory',
'018其中:消耗性生物资产': 'expendableBiologicalAssets',
'019一年内到期的非流动资产': 'noncurrentAssetsDueWithinOneYear',
'020其他流动资产': 'otherLiquidAssets',
'021流动资产合计': 'totalLiquidAssets',
# 2.1.2 非流动资产
'022可供出售金融资产': 'availableForSaleSecurities',
'023持有至到期投资': 'heldToMaturityInvestments',
'024长期应收款': 'longTermReceivables',
'025长期股权投资': 'longTermEquityInvestment',
'026投资性房地产': 'investmentRealEstate',
'027固定资产': 'fixedAssets',
'028在建工程': 'constructionInProgress',
'029工程物资': 'engineerMaterial',
'030固定资产清理': 'fixedAssetsCleanUp',
'031生产性生物资产': 'productiveBiologicalAssets',
'032油气资产': 'oilAndGasAssets',
'033无形资产': 'intangibleAssets',
'034开发支出': 'developmentExpenditure',
'035商誉': 'goodwill',
'036长期待摊费用': 'longTermDeferredExpenses',
'037递延所得税资产': 'deferredIncomeTaxAssets',
'038其他非流动资产': 'otherNonCurrentAssets',
'039非流动资产合计': 'totalNonCurrentAssets',
'040资产总计': 'totalAssets',
# 2.2 负债
# 2.2.1 流动负债
'041短期借款': 'shortTermLoan',
'042交易性金融负债': 'tradingFinancialLiabilities',
'043应付票据': 'billsPayable',
'044应付账款': 'accountsPayable',
'045预收款项': 'advancedReceivable',
'046应付职工薪酬': 'employeesPayable',
'047应交税费': 'taxPayable',
'048应付利息': 'interestPayable',
'049应付股利': 'dividendPayable',
'050其他应付款': 'otherPayable',
'051应付关联公司款': 'interCompanyPayable',
'052一年内到期的非流动负债': 'noncurrentLiabilitiesDueWithinOneYear',
'053其他流动负债': 'otherCurrentLiabilities',
'054流动负债合计': 'totalCurrentLiabilities',
# 2.2.2 非流动负债
'055长期借款': 'longTermLoans',
'056应付债券': 'bondsPayable',
'057长期应付款': 'longTermPayable',
'058专项应付款': 'specialPayable',
'059预计负债': 'estimatedLiabilities',
'060递延所得税负债': 'defferredIncomeTaxLiabilities',
'061其他非流动负债': 'otherNonCurrentLiabilities',
'062非流动负债合计': 'totalNonCurrentLiabilities',
'063负债合计': 'totalLiabilities',
# 2.3 所有者权益
'064实收资本(或股本)': 'totalShare',
'065资本公积': 'capitalReserve',
'066盈余公积': 'surplusReserve',
'067减:库存股': 'treasuryStock',
'068未分配利润': 'undistributedProfits',
'069少数股东权益': 'minorityEquity',
'070外币报表折算价差': 'foreignCurrencyReportTranslationSpread',
'071非正常经营项目收益调整': 'abnormalBusinessProjectEarningsAdjustment',
'072所有者权益(或股东权益)合计': 'totalOwnersEquity',
'073负债和所有者(或股东权益)合计': 'totalLiabilitiesAndOwnersEquity',
# 3. 利润表
'074其中:营业收入': 'operatingRevenue',
'075其中:营业成本': 'operatingCosts',
'076营业税金及附加': 'taxAndSurcharges',
'077销售费用': 'salesCosts',
'078管理费用': 'managementCosts',
'079堪探费用': 'explorationCosts',
'080财务费用': 'financialCosts',
'081资产减值损失': 'assestsDevaluation',
'082加:公允价值变动净收益': 'profitAndLossFromFairValueChanges',
'083投资收益': 'investmentIncome',
'084其中:对联营企业和合营企业的投资收益': 'investmentIncomeFromAffiliatedBusinessAndCooperativeEnterprise',
'085影响营业利润的其他科目': 'otherSubjectsAffectingOperatingProfit',
'086三、营业利润': 'operatingProfit',
'087加:补贴收入': 'subsidyIncome',
'088营业外收入': 'nonOperatingIncome',
'089减:营业外支出': 'nonOperatingExpenses',
'090其中:非流动资产处置净损失': 'netLossFromDisposalOfNonCurrentAssets',
'091加:影响利润总额的其他科目': 'otherSubjectsAffectTotalProfit',
'092四、利润总额': 'totalProfit',
'093减:所得税': 'incomeTax',
'094加:影响净利润的其他科目': 'otherSubjectsAffectNetProfit',
'095五、净利润': 'netProfit',
'096归属于母公司所有者的净利润': 'netProfitsBelongToParentCompanyOwner',
'097少数股东损益': 'minorityProfitAndLoss',
# 4. 现金流量表
# 4.1 经营活动 Operating
'098销售商品、提供劳务收到的现金': 'cashFromGoodsSalesorOrRenderingOfServices',
'099收到的税费返还': 'refundOfTaxAndFeeReceived',
'100收到其他与经营活动有关的现金': 'otherCashRelatedBusinessActivitiesReceived',
'101经营活动现金流入小计': 'cashInflowsFromOperatingActivities',
'102购买商品、接受劳务支付的现金': 'buyingGoodsReceivingCashPaidForLabor',
'103支付给职工以及为职工支付的现金': 'paymentToEmployeesAndCashPaidForEmployees',
'104支付的各项税费': 'paymentsOfVariousTaxes',
'105支付其他与经营活动有关的现金': 'paymentOfOtherCashRelatedToBusinessActivities',
'106经营活动现金流出小计': 'cashOutflowsFromOperatingActivities',
'107经营活动产生的现金流量净额': 'netCashFlowsFromOperatingActivities',
# 4.2 投资活动 Investment
'108收回投资收到的现金': 'cashReceivedFromInvestmentReceived',
'109取得投资收益收到的现金': 'cashReceivedFromInvestmentIncome',
'110处置固定资产、无形资产和其他长期资产收回的现金净额': 'disposalOfNetCashForRecoveryOfFixedAssetsIntangibleAssetsAndOtherLongTermAssets',
'111处置子公司及其他营业单位收到的现金净额': 'disposalOfNetCashReceivedFromSubsidiariesAndOtherBusinessUnits',
'112收到其他与投资活动有关的现金': 'otherCashReceivedRelatingToInvestingActivities',
'113投资活动现金流入小计': 'cashinFlowsFromInvestmentActivities',
'114购建固定资产、无形资产和其他长期资产支付的现金': 'cashForThePurchaseConstructionPaymentOfFixedAssetsIntangibleAssetsAndOtherLongTermAssets',
'115投资支付的现金': 'cashInvestment',
'116取得子公司及其他营业单位支付的现金净额': 'acquisitionOfNetCashPaidBySubsidiariesAndOtherBusinessUnits',
'117支付其他与投资活动有关的现金': 'otherCashPaidRelatingToInvestingActivities',
'118投资活动现金流出小计': 'cashOutflowsFromInvestmentActivities',
'119投资活动产生的现金流量净额': 'netCashFlowsFromInvestingActivities',
# 4.3 筹资活动 Financing
'120吸收投资收到的现金': 'cashReceivedFromInvestors',
'121取得借款收到的现金': 'cashFromBorrowings',
'122收到其他与筹资活动有关的现金': 'otherCashReceivedRelatingToFinancingActivities',
'123筹资活动现金流入小计': 'cashInflowsFromFinancingActivities',
'124偿还债务支付的现金': 'cashPaymentsOfAmountBorrowed',
'125分配股利、利润或偿付利息支付的现金': 'cashPaymentsForDistrbutionOfDividendsOrProfits',
'126支付其他与筹资活动有关的现金': 'otherCashPaymentRelatingToFinancingActivities',
'127筹资活动现金流出小计': 'cashOutflowsFromFinancingActivities',
'128筹资活动产生的现金流量净额': 'netCashFlowsFromFinancingActivities',
# 4.4 汇率变动
'129四、汇率变动对现金的影响': 'effectOfForeignExchangRateChangesOnCash',
'130四(2)、其他原因对现金的影响': 'effectOfOtherReasonOnCash',
# 4.5 现金及现金等价物净增加
'131五、现金及现金等价物净增加额': 'netIncreaseInCashAndCashEquivalents',
'132期初现金及现金等价物余额': 'initialCashAndCashEquivalentsBalance',
# 4.6 期末现金及现金等价物余额
'133期末现金及现金等价物余额': 'theFinalCashAndCashEquivalentsBalance',
# 4.x 补充项目 Supplementary Schedule:
# 现金流量附表项目 Indirect Method
# 4.x.1 将净利润调节为经营活动现金流量 Convert net profit to cash flow from operating activities
'134净利润': 'netProfitFromOperatingActivities',
'135资产减值准备': 'provisionForAssetsLosses',
'136固定资产折旧、油气资产折耗、生产性生物资产折旧': 'depreciationForFixedAssets',
'137无形资产摊销': 'amortizationOfIntangibleAssets',
'138长期待摊费用摊销': 'amortizationOfLong-termDeferredExpenses',
'139处置固定资产、无形资产和其他长期资产的损失': 'lossOfDisposingFixedAssetsIntangibleAssetsAndOtherLong-termAssets',
'140固定资产报废损失': 'scrapLossOfFixedAssets',
'141公允价值变动损失': 'lossFromFairValueChange',
'142财务费用': 'financialExpenses',
'143投资损失': 'investmentLosses',
'144递延所得税资产减少': 'decreaseOfDeferredTaxAssets',
'145递延所得税负债增加': 'increaseOfDeferredTaxLiabilities',
'146存货的减少': 'decreaseOfInventory',
'147经营性应收项目的减少': 'decreaseOfOperationReceivables',
'148经营性应付项目的增加': 'increaseOfOperationPayables',
'149其他': 'others',
'150经营活动产生的现金流量净额2': 'netCashFromOperatingActivities2',
# 4.x.2 不涉及现金收支的投资和筹资活动 Investing and financing activities not involved in cash
'151债务转为资本': 'debtConvertedToCSapital',
'152一年内到期的可转换公司债券': 'convertibleBondMaturityWithinOneYear',
'153融资租入固定资产': 'leaseholdImprovements',
# 4.x.3 现金及现金等价物净增加情况 Net increase of cash and cash equivalents
'154现金的期末余额': 'cashEndingBal',
'155现金的期初余额': 'cashBeginingBal',
'156现金等价物的期末余额': 'cashEquivalentsEndingBal',
'157现金等价物的期初余额': 'cashEquivalentsBeginningBal',
'158现金及现金等价物净增加额': 'netIncreaseOfCashAndCashEquivalents',
# 5. 偿债能力分析
'159流动比率': 'liquidityRatio', # 流动资产/流动负债
'160速动比率': 'acidTestRatio', # (流动资产-存货)/流动负债
'161现金比率(%)': 'cashRatio', # (货币资金+有价证券)÷流动负债
'162利息保障倍数': 'interestCoverageRatio', # (利润总额+财务费用(仅指利息费用部份))/利息费用
'163非流动负债比率(%)': 'noncurrentLiabilitiesRatio',
'164流动负债比率(%)': 'currentLiabilitiesRatio',
'165现金到期债务比率(%)': 'cashDebtRatio', # 企业经营现金净流入/(本期到期长期负债+本期应付票据)
'166有形资产净值债务率(%)': 'debtToTangibleAssetsRatio',
'167权益乘数(%)': 'equityMultiplier', # 资产总额/股东权益总额
'168股东的权益/负债合计(%)': 'equityDebtRatio', # 权益负债率
'169有形资产/负债合计(%)': 'tangibleAssetDebtRatio ', # 有形资产负债率
'170经营活动产生的现金流量净额/负债合计(%)': 'netCashFlowsFromOperatingActivitiesDebtRatio',
'171EBITDA/负债合计(%)': 'EBITDA/Liabilities',
# 6. 经营效率分析
# 销售收入÷平均应收账款=销售收入\(0.5 x(应收账款期初+期末))
'172应收帐款周转率': 'turnoverRatioOfReceivable;',
'173存货周转率': 'turnoverRatioOfInventory',
# (存货周转天数+应收帐款周转天数-应付帐款周转天数+预付帐款周转天数-预收帐款周转天数)/365
'174运营资金周转率': 'turnoverRatioOfOperatingAssets',
'175总资产周转率': 'turnoverRatioOfTotalAssets',
'176固定资产周转率': 'turnoverRatioOfFixedAssets', # 企业销售收入与固定资产净值的比率
'177应收帐款周转天数': 'daysSalesOutstanding', # 企业从取得应收账款的权利到收回款项、转换为现金所需要的时间
'178存货周转天数': 'daysSalesOfInventory', # 企业从取得存货开始,至消耗、销售为止所经历的天数
'179流动资产周转率': 'turnoverRatioOfCurrentAssets', # 流动资产周转率(次)=主营业务收入/平均流动资产总额
'180流动资产周转天数': 'daysSalesofCurrentAssets',
'181总资产周转天数': 'daysSalesofTotalAssets',
'182股东权益周转率': 'equityTurnover', # 销售收入/平均股东权益
# 7. 发展能力分析
'183营业收入增长率(%)': 'operatingIncomeGrowth',
'184净利润增长率(%)': 'netProfitGrowthRate', # NPGR 利润总额-所得税
'185净资产增长率(%)': 'netAssetsGrowthRate',
'186固定资产增长率(%)': 'fixedAssetsGrowthRate',
'187总资产增长率(%)': 'totalAssetsGrowthRate',
'188投资收益增长率(%)': 'investmentIncomeGrowthRate',
'189营业利润增长率(%)': 'operatingProfitGrowthRate',
'190暂无': 'None1',
'191暂无': 'None2',
'192暂无': 'None3',
# 8. 获利能力分析
'193成本费用利润率(%)': 'rateOfReturnOnCost',
'194营业利润率': 'rateOfReturnOnOperatingProfit',
'195营业税金率': 'rateOfReturnOnBusinessTax',
'196营业成本率': 'rateOfReturnOnOperatingCost',
'197净资产收益率': 'rateOfReturnOnCommonStockholdersEquity',
'198投资收益率': 'rateOfReturnOnInvestmentIncome',
'199销售净利率(%)': 'rateOfReturnOnNetSalesProfit',
'200总资产报酬率': 'rateOfReturnOnTotalAssets',
'201净利润率': 'netProfitMargin',
'202销售毛利率(%)': 'rateOfReturnOnGrossProfitFromSales',
'203三费比重': 'threeFeeProportion',
'204管理费用率': 'ratioOfChargingExpense',
'205财务费用率': 'ratioOfFinancialExpense',
'206扣除非经常性损益后的净利润': 'netProfitAfterExtraordinaryGainsAndLosses',
'207息税前利润(EBIT)': 'EBIT',
'208息税折旧摊销前利润(EBITDA)': 'EBITDA',
'209EBITDA/营业总收入(%)': 'EBITDA/GrossRevenueRate',
# 9. 资本结构分析
'210资产负债率(%)': 'assetsLiabilitiesRatio',
'211流动资产比率': 'liquidityRatio',
'212货币资金比率': 'monetaryFundRatio',
'213存货比率': 'inventoryRatio',
'214固定资产比率': 'fixedAssetsRatio',
'215负债结构比': 'liabilitiesStructureRatio',
'216归属于母公司股东权益/全部投入资本(%)': 'shareholdersOwnershipOfAParentCompany/TotalCapital',
'217股东的权益/带息债务(%)': 'shareholdersInterest/InterestRateDebtRatio',
'218有形资产/净债务(%)': 'tangibleAssets/NetDebtRatio',
# 10. 现金流量分析
'219每股经营性现金流(元)': 'operatingCashFlowPerShare',
'220营业收入现金含量(%)': 'cashOfOperatingIncome',
'221经营活动产生的现金流量净额/经营活动净收益(%)': 'netOperatingCashFlow/netOperationProfit',
'222销售商品提供劳务收到的现金/营业收入(%)': 'cashFromGoodsSales/OperatingRevenue',
'223经营活动产生的现金流量净额/营业收入': 'netOperatingCashFlow/OperatingRevenue',
'224资本支出/折旧和摊销': 'capitalExpenditure/DepreciationAndAmortization',
'225每股现金流量净额(元)': 'netCashFlowPerShare',
'226经营净现金比率(短期债务)': 'operatingCashFlow/ShortTermDebtRatio',
'227经营净现金比率(全部债务)': 'operatingCashFlow/LongTermDebtRatio',
'228经营活动现金净流量与净利润比率': 'cashFlowRateAndNetProfitRatioOfOperatingActivities',
'229全部资产现金回收率': 'cashRecoveryForAllAssets',
# 11. 单季度财务指标
'230营业收入': 'operatingRevenueSingle',
'231营业利润': 'operatingProfitSingle',
'232归属于母公司所有者的净利润': 'netProfitBelongingToTheOwnerOfTheParentCompanySingle',
'233扣除非经常性损益后的净利润': 'netProfitAfterExtraordinaryGainsAndLossesSingle',
'234经营活动产生的现金流量净额': 'netCashFlowsFromOperatingActivitiesSingle',
'235投资活动产生的现金流量净额': 'netCashFlowsFromInvestingActivitiesSingle',
'236筹资活动产生的现金流量净额': 'netCashFlowsFromFinancingActivitiesSingle',
'237现金及现金等价物净增加额': 'netIncreaseInCashAndCashEquivalentsSingle',
# 12.股本股东
'238总股本': 'totalCapital',
'239已上市流通A股': 'listedAShares',
'240已上市流通B股': 'listedBShares',
'241已上市流通H股': 'listedHShares',
'242股东人数(户)': 'numberOfShareholders',
'243第一大股东的持股数量': 'theNumberOfFirstMajorityShareholder',
'244十大流通股东持股数量合计(股)': 'totalNumberOfTopTenCirculationShareholders',
'245十大股东持股数量合计(股)': 'totalNumberOfTopTenMajorShareholders',
# 13.机构持股
'246机构总量(家)': 'institutionNumber',
'247机构持股总量(股)': 'institutionShareholding',
'248QFII机构数': 'QFIIInstitutionNumber',
'249QFII持股量': 'QFIIShareholding',
'250券商机构数': 'brokerNumber',
'251券商持股量': 'brokerShareholding',
'252保险机构数': 'securityNumber',
'253保险持股量': 'securityShareholding',
'254基金机构数': 'fundsNumber',
'255基金持股量': 'fundsShareholding',
'256社保机构数': 'socialSecurityNumber',
'257社保持股量': 'socialSecurityShareholding',
'258私募机构数': 'privateEquityNumber',
'259私募持股量': 'privateEquityShareholding',
'260财务公司机构数': 'financialCompanyNumber',
'261财务公司持股量': 'financialCompanyShareholding',
'262年金机构数': 'pensionInsuranceAgencyNumber',
'263年金持股量': 'pensionInsuranceAgencyShareholfing',
# 14.新增指标
# [注:季度报告中,若股东同时持有非流通A股性质的股份(如同时持有流通A股和流通B股),取的是包含同时持有非流通A股性质的流通股数]
'264十大流通股东中持有A股合计(股)': 'totalNumberOfTopTenCirculationShareholders',
'265第一大流通股东持股量(股)': 'firstLargeCirculationShareholdersNumber',
# [注:1.自由流通股=已流通A股-十大流通股东5%以上的A股;2.季度报告中,若股东同时持有非流通A股性质的股份(如同时持有流通A股和流通H股),5%以上的持股取的是不包含同时持有非流通A股性质的流通股数,结果可能偏大; 3.指标按报告期展示,新股在上市日的下个报告期才有数据]
'266自由流通股(股)': 'freeCirculationStock',
'267受限流通A股(股)': 'limitedCirculationAShares',
'268一般风险准备(金融类)': 'generalRiskPreparation',
'269其他综合收益(利润表)': 'otherComprehensiveIncome',
'270综合收益总额(利润表)': 'totalComprehensiveIncome',
'271归属于母公司股东权益(资产负债表)': 'shareholdersOwnershipOfAParentCompany ',
'272银行机构数(家)(机构持股)': 'bankInstutionNumber',
'273银行持股量(股)(机构持股)': 'bankInstutionShareholding',
'274一般法人机构数(家)(机构持股)': 'corporationNumber',
'275一般法人持股量(股)(机构持股)': 'corporationShareholding',
'276近一年净利润(元)': 'netProfitLastYear'
}
| 46.30094 | 145 | 0.728368 |
acec60601af98f669791144ea53c1c1c3449215b | 3,063 | py | Python | views/tasks.py | koskorya/love | cef74856893996a6d185c70ef2a72b0971a93d97 | [
"MIT"
] | null | null | null | views/tasks.py | koskorya/love | cef74856893996a6d185c70ef2a72b0971a93d97 | [
"MIT"
] | null | null | null | views/tasks.py | koskorya/love | cef74856893996a6d185c70ef2a72b0971a93d97 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from flask import request
from flask import Response
import json
from google.cloud import ndb
from logic.event import add_event
import logic.employee
import logic.notifier
import logic.love
import logic.love_count
import logic.love_link
from main import app
from models import Love
from util.decorators import appengineTaskOrCron
from main import oidc
# All tasks that are to be executed by cron need to use HTTP GET
# see https://cloud.google.com/appengine/docs/python/config/cron
@app.route('/tasks/employees/load/s3', methods=['GET'])
@appengineTaskOrCron
def load_employees_from_s3():
logic.employee.load_employees()
# we need to rebuild the love count index as the departments may have changed.
add_event('load_S3', '/tasks/love_count/rebuild', {}, 'GET')
add_event('load_S3', '/tasks/employees/load/data/autocomplete', {}, 'GET')
return Response(status=200)
@app.route('/tasks/employees/load/data/autocomplete', methods=['GET'])
@appengineTaskOrCron
def load_mysql_data():
logic.employee.load_employees_into_mysql()
return Response(status=200)
# This task has a web UI to trigger it, so let's use POST
@app.route('/tasks/employees/load/csv', methods=['POST'])
@oidc.require_login
def load_employees_from_csv():
logic.employee.load_employees_from_csv()
# we need to rebuild the love count index as the departments may have changed.
add_event('load_CSV', '/tasks/love_count/rebuild', {}, 'GET')
return Response(status=200)
# One-off tasks are much easier to trigger using GET
@app.route('/tasks/employees/combine', methods=['GET'])
@oidc.require_login
def combine_employees():
old_username, new_username = request.args['old'], request.args['new']
if not old_username:
return Response(response='{} is not a valid username'.format(old_username), status=400)
elif not new_username:
return Response(response='{} is not a valid username'.format(new_username), status=400)
logic.employee.combine_employees(old_username, new_username)
return Response(status=200)
@app.route('/tasks/love/email', methods=['POST'])
@appengineTaskOrCron
def email_love():
payload = json.loads(request.get_data(as_text=True).replace("'", '"'))
love_id = int(payload['id'])
love = ndb.Key(Love, love_id).get()
logic.love.send_love_email(love)
return Response(status=200)
@app.route('/tasks/love_count/rebuild', methods=['GET'])
@appengineTaskOrCron
def rebuild_love_count():
logic.love_count.rebuild_love_count()
return Response(status=200)
@app.route('/tasks/subscribers/notify', methods=['POST'])
@appengineTaskOrCron
def notify_subscribers():
payload = json.loads(request.get_data(as_text=True).replace("'", '"'))
notifier = logic.notifier.notifier_for_event(payload['event'])(**payload['options'])
notifier.notify()
return Response(status=200)
@app.route('/tasks/lovelinks/cleanup', methods=['GET'])
@appengineTaskOrCron
def lovelinks_cleanup():
logic.love_link.love_links_cleanup()
return Response(status=200)
| 33.293478 | 95 | 0.738492 |
acec61a79fcd04cee30aedfc6589463c20a31a97 | 751 | py | Python | tests/functional/modules/pyi_testmod_submodule_global_shadowed/submodule.py | yoda-vid/pyinstaller | 419f349dad721a253b19d9c596e251818132d6ba | [
"Apache-2.0"
] | 2 | 2017-02-08T22:22:09.000Z | 2020-10-08T12:28:36.000Z | tests/functional/modules/pyi_testmod_submodule_global_shadowed/submodule.py | 416426/pyinstaller | 0f2b2e921433ab5a510c7efdb21d9c1d7cfbc645 | [
"Apache-2.0"
] | 3 | 2020-04-06T15:48:37.000Z | 2021-03-23T10:22:21.000Z | tests/functional/modules/pyi_testmod_submodule_global_shadowed/submodule.py | 416426/pyinstaller | 0f2b2e921433ab5a510c7efdb21d9c1d7cfbc645 | [
"Apache-2.0"
] | 4 | 2018-06-04T20:40:37.000Z | 2020-10-13T22:38:40.000Z | # -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2021, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License (version 2
# or later) with exception for distributing the bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#
# SPDX-License-Identifier: (GPL-2.0-or-later WITH Bootloader-exception)
#-----------------------------------------------------------------------------
'''
Mock module of the same name as _and_ shadowed by a global variable defined by
the `__init__` submodule of this package.
This module is exercised by the `test_import_submodule_global_shadowed`
functional test.
'''
| 37.55 | 78 | 0.604527 |
acec61ba71f1c9f58597de27ee292176a6a23a1e | 11,644 | py | Python | Lib/tkinter/dnd.py | pxeger/cpython | 959580bd9ff8824590e8b24895bc2276f3f10b35 | [
"0BSD"
] | 52,316 | 2015-01-01T15:56:25.000Z | 2022-03-31T23:19:01.000Z | Lib/tkinter/dnd.py | dalakatt/cpython | 2f49b97cc5426087b46515254b9a97a22ee8c807 | [
"0BSD"
] | 25,286 | 2015-03-03T23:18:02.000Z | 2022-03-31T23:17:27.000Z | Lib/tkinter/dnd.py | dalakatt/cpython | 2f49b97cc5426087b46515254b9a97a22ee8c807 | [
"0BSD"
] | 31,623 | 2015-01-01T13:29:37.000Z | 2022-03-31T19:55:06.000Z | """Drag-and-drop support for Tkinter.
This is very preliminary. I currently only support dnd *within* one
application, between different windows (or within the same window).
I am trying to make this as generic as possible -- not dependent on
the use of a particular widget or icon type, etc. I also hope that
this will work with Pmw.
To enable an object to be dragged, you must create an event binding
for it that starts the drag-and-drop process. Typically, you should
bind <ButtonPress> to a callback function that you write. The function
should call Tkdnd.dnd_start(source, event), where 'source' is the
object to be dragged, and 'event' is the event that invoked the call
(the argument to your callback function). Even though this is a class
instantiation, the returned instance should not be stored -- it will
be kept alive automatically for the duration of the drag-and-drop.
When a drag-and-drop is already in process for the Tk interpreter, the
call is *ignored*; this normally averts starting multiple simultaneous
dnd processes, e.g. because different button callbacks all
dnd_start().
The object is *not* necessarily a widget -- it can be any
application-specific object that is meaningful to potential
drag-and-drop targets.
Potential drag-and-drop targets are discovered as follows. Whenever
the mouse moves, and at the start and end of a drag-and-drop move, the
Tk widget directly under the mouse is inspected. This is the target
widget (not to be confused with the target object, yet to be
determined). If there is no target widget, there is no dnd target
object. If there is a target widget, and it has an attribute
dnd_accept, this should be a function (or any callable object). The
function is called as dnd_accept(source, event), where 'source' is the
object being dragged (the object passed to dnd_start() above), and
'event' is the most recent event object (generally a <Motion> event;
it can also be <ButtonPress> or <ButtonRelease>). If the dnd_accept()
function returns something other than None, this is the new dnd target
object. If dnd_accept() returns None, or if the target widget has no
dnd_accept attribute, the target widget's parent is considered as the
target widget, and the search for a target object is repeated from
there. If necessary, the search is repeated all the way up to the
root widget. If none of the target widgets can produce a target
object, there is no target object (the target object is None).
The target object thus produced, if any, is called the new target
object. It is compared with the old target object (or None, if there
was no old target widget). There are several cases ('source' is the
source object, and 'event' is the most recent event object):
- Both the old and new target objects are None. Nothing happens.
- The old and new target objects are the same object. Its method
dnd_motion(source, event) is called.
- The old target object was None, and the new target object is not
None. The new target object's method dnd_enter(source, event) is
called.
- The new target object is None, and the old target object is not
None. The old target object's method dnd_leave(source, event) is
called.
- The old and new target objects differ and neither is None. The old
target object's method dnd_leave(source, event), and then the new
target object's method dnd_enter(source, event) is called.
Once this is done, the new target object replaces the old one, and the
Tk mainloop proceeds. The return value of the methods mentioned above
is ignored; if they raise an exception, the normal exception handling
mechanisms take over.
The drag-and-drop processes can end in two ways: a final target object
is selected, or no final target object is selected. When a final
target object is selected, it will always have been notified of the
potential drop by a call to its dnd_enter() method, as described
above, and possibly one or more calls to its dnd_motion() method; its
dnd_leave() method has not been called since the last call to
dnd_enter(). The target is notified of the drop by a call to its
method dnd_commit(source, event).
If no final target object is selected, and there was an old target
object, its dnd_leave(source, event) method is called to complete the
dnd sequence.
Finally, the source object is notified that the drag-and-drop process
is over, by a call to source.dnd_end(target, event), specifying either
the selected target object, or None if no target object was selected.
The source object can use this to implement the commit action; this is
sometimes simpler than to do it in the target's dnd_commit(). The
target's dnd_commit() method could then simply be aliased to
dnd_leave().
At any time during a dnd sequence, the application can cancel the
sequence by calling the cancel() method on the object returned by
dnd_start(). This will call dnd_leave() if a target is currently
active; it will never call dnd_commit().
"""
import tkinter
__all__ = ["dnd_start", "DndHandler"]
# The factory function
def dnd_start(source, event):
h = DndHandler(source, event)
if h.root is not None:
return h
else:
return None
# The class that does the work
class DndHandler:
root = None
def __init__(self, source, event):
if event.num > 5:
return
root = event.widget._root()
try:
root.__dnd
return # Don't start recursive dnd
except AttributeError:
root.__dnd = self
self.root = root
self.source = source
self.target = None
self.initial_button = button = event.num
self.initial_widget = widget = event.widget
self.release_pattern = "<B%d-ButtonRelease-%d>" % (button, button)
self.save_cursor = widget['cursor'] or ""
widget.bind(self.release_pattern, self.on_release)
widget.bind("<Motion>", self.on_motion)
widget['cursor'] = "hand2"
def __del__(self):
root = self.root
self.root = None
if root is not None:
try:
del root.__dnd
except AttributeError:
pass
def on_motion(self, event):
x, y = event.x_root, event.y_root
target_widget = self.initial_widget.winfo_containing(x, y)
source = self.source
new_target = None
while target_widget is not None:
try:
attr = target_widget.dnd_accept
except AttributeError:
pass
else:
new_target = attr(source, event)
if new_target is not None:
break
target_widget = target_widget.master
old_target = self.target
if old_target is new_target:
if old_target is not None:
old_target.dnd_motion(source, event)
else:
if old_target is not None:
self.target = None
old_target.dnd_leave(source, event)
if new_target is not None:
new_target.dnd_enter(source, event)
self.target = new_target
def on_release(self, event):
self.finish(event, 1)
def cancel(self, event=None):
self.finish(event, 0)
def finish(self, event, commit=0):
target = self.target
source = self.source
widget = self.initial_widget
root = self.root
try:
del root.__dnd
self.initial_widget.unbind(self.release_pattern)
self.initial_widget.unbind("<Motion>")
widget['cursor'] = self.save_cursor
self.target = self.source = self.initial_widget = self.root = None
if target is not None:
if commit:
target.dnd_commit(source, event)
else:
target.dnd_leave(source, event)
finally:
source.dnd_end(target, event)
# ----------------------------------------------------------------------
# The rest is here for testing and demonstration purposes only!
class Icon:
def __init__(self, name):
self.name = name
self.canvas = self.label = self.id = None
def attach(self, canvas, x=10, y=10):
if canvas is self.canvas:
self.canvas.coords(self.id, x, y)
return
if self.canvas is not None:
self.detach()
if canvas is None:
return
label = tkinter.Label(canvas, text=self.name,
borderwidth=2, relief="raised")
id = canvas.create_window(x, y, window=label, anchor="nw")
self.canvas = canvas
self.label = label
self.id = id
label.bind("<ButtonPress>", self.press)
def detach(self):
canvas = self.canvas
if canvas is None:
return
id = self.id
label = self.label
self.canvas = self.label = self.id = None
canvas.delete(id)
label.destroy()
def press(self, event):
if dnd_start(self, event):
# where the pointer is relative to the label widget:
self.x_off = event.x
self.y_off = event.y
# where the widget is relative to the canvas:
self.x_orig, self.y_orig = self.canvas.coords(self.id)
def move(self, event):
x, y = self.where(self.canvas, event)
self.canvas.coords(self.id, x, y)
def putback(self):
self.canvas.coords(self.id, self.x_orig, self.y_orig)
def where(self, canvas, event):
# where the corner of the canvas is relative to the screen:
x_org = canvas.winfo_rootx()
y_org = canvas.winfo_rooty()
# where the pointer is relative to the canvas widget:
x = event.x_root - x_org
y = event.y_root - y_org
# compensate for initial pointer offset
return x - self.x_off, y - self.y_off
def dnd_end(self, target, event):
pass
class Tester:
def __init__(self, root):
self.top = tkinter.Toplevel(root)
self.canvas = tkinter.Canvas(self.top, width=100, height=100)
self.canvas.pack(fill="both", expand=1)
self.canvas.dnd_accept = self.dnd_accept
def dnd_accept(self, source, event):
return self
def dnd_enter(self, source, event):
self.canvas.focus_set() # Show highlight border
x, y = source.where(self.canvas, event)
x1, y1, x2, y2 = source.canvas.bbox(source.id)
dx, dy = x2-x1, y2-y1
self.dndid = self.canvas.create_rectangle(x, y, x+dx, y+dy)
self.dnd_motion(source, event)
def dnd_motion(self, source, event):
x, y = source.where(self.canvas, event)
x1, y1, x2, y2 = self.canvas.bbox(self.dndid)
self.canvas.move(self.dndid, x-x1, y-y1)
def dnd_leave(self, source, event):
self.top.focus_set() # Hide highlight border
self.canvas.delete(self.dndid)
self.dndid = None
def dnd_commit(self, source, event):
self.dnd_leave(source, event)
x, y = source.where(self.canvas, event)
source.attach(self.canvas, x, y)
def test():
root = tkinter.Tk()
root.geometry("+1+1")
tkinter.Button(command=root.quit, text="Quit").pack()
t1 = Tester(root)
t1.top.geometry("+1+60")
t2 = Tester(root)
t2.top.geometry("+120+60")
t3 = Tester(root)
t3.top.geometry("+240+60")
i1 = Icon("ICON1")
i2 = Icon("ICON2")
i3 = Icon("ICON3")
i1.attach(t1.canvas)
i2.attach(t2.canvas)
i3.attach(t3.canvas)
root.mainloop()
if __name__ == '__main__':
test()
| 35.827692 | 78 | 0.655531 |
acec641ce3f9d583822f4898c35f8491dfe065ab | 664 | py | Python | utils/config.py | kai13xd/ShihTzu-Bott | ebcf2dafb01cd3eb445d15385dfec2661cdd25c1 | [
"CC0-1.0"
] | null | null | null | utils/config.py | kai13xd/ShihTzu-Bott | ebcf2dafb01cd3eb445d15385dfec2661cdd25c1 | [
"CC0-1.0"
] | null | null | null | utils/config.py | kai13xd/ShihTzu-Bott | ebcf2dafb01cd3eb445d15385dfec2661cdd25c1 | [
"CC0-1.0"
] | null | null | null | import json
import boto3
import os
BUCKET_NAME = os.getenv('BUCKET_NAME')
def getconfig(filepath: str = 'ShihTzu/config.json') -> dict:
s3 = boto3.client('s3')
try:
data = s3.get_object(Bucket=BUCKET_NAME, Key=filepath)
content = data['Body']
except:
print("The filepath does not exists!")
print('Config loaded!')
return json.load(content)
def saveconfig(config: dict, filepath: str = 'ShihTzu/config.json'):
s3 = boto3.client('s3')
try:
s3.put_object(Body=json.dumps(config,indent=2),Bucket=BUCKET_NAME, Key=filepath)
except:
print('Failed to save to S3!')
print('Saved Sucessfully!')
| 27.666667 | 88 | 0.656627 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.