content stringlengths 5 1.05M |
|---|
import json, git, os, shutil
from . import db,config
from .errors import *
def exportSnapshot(inCaseName,inSnapshotName,inFile):
data = db.dump(inCaseName,inSnapshotName)
with open(inFile, 'w') as outfile:
json.dump(data, outfile)
def importSnapshot(inCaseName,inFile):
with open(inFile, 'r') as afile:
snapshot = json.loads(afile.read())
if snapshot["name"] in db.getSnapshotList(inCaseName):
raise SnapshotExists
db.addSnapshot(inCaseName,snapshot["name"],snapshot["path"],snapshot["date"])
db.setSnapshot(inCaseName,snapshot["name"])
for afile in snapshot["files"]:
db.addFile(afile["name"],afile["path"],afile["size"],afile["hash"],afile["type"],afile["permissions"],afile["changeTime"],afile["modifiedTime"],afile["birthTime"])
for aflag in afile["flags"]:
db.addFlag(inCaseName,snapshot["name"],afile["path"],afile["name"],aflag["category"],aflag["key"],aflag["description"])
db.commit()
def upgrade():
try:
shutil.rmtree(config.home + "rules")
except FileNotFoundError:
pass
try:
shutil.rmtree(config.home + "jade")
except FileNotFoundError:
pass
git.Git(config.home).clone("https://github.com/tjr3xx/jade")
git.Git(config.home).clone("https://github.com/Yara-Rules/rules")
shutil.copyfile(config.home + "jade/docs/jade.yar", config.home + "rules/jade.yar")
try:
status = os.system("python3 " + config.home + "jade/setup.py install --user")
if status != 0:
os.system("python " + config.home + "jade/setup.py install --user")
except:
raise PermissionDenied
|
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
#
# This source code is licensed under the BSD license found in the
# LICENSE file in the root directory of this source tree.
# pylint: disable=missing-module-docstring
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
""" Test AdaScale with DDP. """
import tempfile
import numpy as np
import pytest
import torch
from torch import Tensor
import torch.distributed as dist
import torch.multiprocessing as mp
from torch.nn import Linear
from torch.nn.parallel import DistributedDataParallel as DDP
from torch.optim import SGD
from fairscale.optim import AdaScale
skip_if_single_gpu = pytest.mark.skipif(torch.cuda.device_count() < 2, reason="multiple GPUs are required")
def _dist_init(rank, world_size, tempfile_name, backend):
url = "file://" + tempfile_name
dist.init_process_group(init_method=url, backend=backend, rank=rank, world_size=world_size)
torch.cuda.set_device(rank)
def _test_basic_func(rank, world_size, tempfile_name):
_dist_init(rank, world_size, tempfile_name, backend="nccl") # Covers nccl
model = Linear(2, 2, bias=False)
model.to("cuda")
model = DDP(model, device_ids=[rank])
optim = AdaScale(SGD(model.parameters(), lr=0.1))
# iter 1
in_data = Tensor([0.0, 0.0])
in_data[rank] = 1.0
in_data = in_data.cuda()
out = model(in_data)
out.sum().backward()
assert np.allclose(optim.gain(), 2.0), optim.gain()
optim.step()
optim.zero_grad()
dist.destroy_process_group()
@skip_if_single_gpu
def test_basic():
"""Test adascale with DDP without gradient accumulation"""
world_size = 2
temp_file_name = tempfile.mkstemp()[1]
mp.spawn(_test_basic_func, args=(world_size, temp_file_name), nprocs=world_size, join=True)
def _test_grad_accum_func(rank, world_size, tempfile_name):
_dist_init(rank, world_size, tempfile_name, backend="gloo") # Covers gloo
model = Linear(4, 2, bias=False)
model.to("cuda")
model = DDP(model, device_ids=[rank])
optim = AdaScale(SGD(model.parameters(), lr=0.1), num_gradients_to_accumulate=2)
with model.no_sync():
# iter 1, input vectors are pointing dim0 and dim1
in_data = Tensor([0.0] * 4)
in_data[rank] = 1.0
in_data = in_data.cuda()
out = model(in_data)
out.sum().backward()
# iter 2, input vectors are pointing dim2 and dim3
in_data = Tensor([0.0] * 4)
in_data[rank + 2] = 1.0
in_data = in_data.cuda()
out = model(in_data)
out.sum().backward()
# since all inputs are orthogonal, the gain should be exactly 4.0.
assert np.allclose(optim.gain(), 4.0), optim.gain()
optim.step()
optim.zero_grad()
dist.destroy_process_group()
@skip_if_single_gpu
def test_grad_accum():
"""Test adascale with DDP + gradient accumulation using ddp.no_sync()"""
world_size = 2
temp_file_name = tempfile.mkstemp()[1]
mp.spawn(_test_grad_accum_func, args=(world_size, temp_file_name), nprocs=world_size, join=True)
|
import pytest
import responses
import status
from django.urls import reverse
from apps.tickets.models import Purchase
pytestmark = pytest.mark.django_db
def test_event_list(admin_client, event):
url = reverse('tickets:event_list')
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert 'cart' in response.context
assert event in response.context['events']
def test_event_detail(admin_client, event):
url = reverse('tickets:event_detail', args=[event.id])
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert 'cart' in response.context
assert response.context['event'] == event
def test_cart_detail(admin_client):
url = reverse('tickets:cart_detail')
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert 'cart' in response.context
def test_cart_clear(admin_client, admin_user, cart_item):
url = reverse('tickets:cart_clear')
cart = cart_item.cart
assert cart.cart_items.count() == 1
response = admin_client.post(url, follow=True)
assert response.status_code == status.HTTP_200_OK
assert cart.cart_items.count() == 0
def test_cart_add_item(admin_client, cart, ticket):
url = reverse('tickets:cart_add_item')
assert cart.cart_items.count() == 0
response = admin_client.post(url, {'ticket': ticket.id, 'quantity': 1}, follow=True)
assert response.status_code == status.HTTP_200_OK
assert cart.cart_items.count() == 1
@responses.activate
def test_purchase_create(admin_client, cart_item, pagseguro_checkout_response):
responses.add(
responses.POST,
'https://ws.sandbox.pagseguro.uol.com.br/v2/checkout',
body=pagseguro_checkout_response,
status=200
)
url = reverse('tickets:purchase_create')
response = admin_client.post(url, follow=True)
assert response.status_code == status.HTTP_200_OK
purchase = Purchase.objects.filter(user=cart_item.cart.user).first()
assert purchase.status == 'pending'
assert purchase.pagseguro_redirect_url
def test_purchase_list(admin_client, purchase):
url = reverse('tickets:purchase_list')
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert purchase in response.context['purchases']
def test_purchase_detail(admin_client, purchase):
url = reverse('tickets:purchase_detail', args=[purchase.id])
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert response.context['purchase'] == purchase
|
from app.admin2.views import admin # noqa
|
from django.conf.urls import url
from . import views
app_name = 'tools'
urlpatterns = [
url(r'^$', views.overview, name='overview'),
url(r'^create/$', views.create_tool, name='create'),
url(r'^delete/(\d+)/$', views.delete_tool, name='delete'),
url(r'^edit/(\d+)/$', views.edit_tool, name='edit'),
url(r'^download/(\d+)/$', views.download_tool, name='download'),
]
|
import numpy as np
from imageio import imread
from pathlib import Path
from typing import List, Optional, Tuple, Union
from napari_imc.io.base import FileReaderBase, ImageDimensions
from napari_imc.models import IMCFileModel, IMCFileAcquisitionModel, IMCFilePanoramaModel
try:
import zarr
except:
zarr = None
class ImaxtFileReader(FileReaderBase):
def __init__(self, path: Union[str, Path]):
super(ImaxtFileReader, self).__init__(self._get_zarr_path(path))
self._zarr_group: Optional['zarr.hierarchy.Group'] = None
def _get_imc_file_panoramas(self, imc_file: IMCFileModel) -> List[IMCFilePanoramaModel]:
return [
IMCFilePanoramaModel(imc_file, panorama['id'], panorama['type'], panorama['description'])
for panorama in self._zarr_group.attrs['meta']['panoramas'] if panorama['type'] != 'Default'
]
def _get_imc_file_acquisitions(self, imc_file: IMCFileModel) -> List[IMCFileAcquisitionModel]:
return [
IMCFileAcquisitionModel(imc_file, acquisition['id'], acquisition['description'], [
channel['target'] for channel in acquisition['channels']
])
for acquisition in self._zarr_group.attrs['meta']['acquisitions']
]
def read_panorama(self, panorama_id: int) -> Tuple[ImageDimensions, np.ndarray]:
panorama = next(filter(lambda x: x['id'] == panorama_id, self._zarr_group.attrs['meta']['panoramas']))
data = imread(self._path / Path(panorama['file']))
xs_physical = [x for x, y in panorama['slide_pos_um']]
ys_physical = [y for x, y in panorama['slide_pos_um']]
x_physical, y_physical = min(xs_physical), min(ys_physical)
w_physical, h_physical = max(xs_physical) - x_physical, max(ys_physical) - y_physical
if x_physical != xs_physical[0]:
data = data[:, ::-1, :]
if y_physical != ys_physical[0]:
data = data[::-1, :, :]
return (x_physical, y_physical, w_physical, h_physical), data
def read_acquisition(self, acquisition_id: int, channel_label: str) -> Tuple[ImageDimensions, np.ndarray]:
acquisition = next(filter(lambda x: x['id'] == acquisition_id, self._zarr_group.attrs['meta']['acquisitions']))
channel_index = [channel['target'] for channel in acquisition['channels']].index(channel_label)
xs_physical = [acquisition['roi_start_pos_um'][0] / 1000, acquisition['roi_end_pos_um'][0]]
ys_physical = [acquisition['roi_start_pos_um'][1] / 1000, acquisition['roi_end_pos_um'][1]]
x_physical, y_physical = min(xs_physical), min(ys_physical)
w_physical, h_physical = max(xs_physical) - x_physical, max(ys_physical) - y_physical
acquisition_group = self._zarr_group[acquisition['group']]
channel_index = list(acquisition_group['channel']).index(channel_index)
data = acquisition_group[acquisition['group']][channel_index]
if x_physical != xs_physical[0]:
data = data[:, ::-1]
if y_physical != ys_physical[0]:
data = data[::-1, :]
return (x_physical, y_physical, w_physical, h_physical), data
def __enter__(self) -> 'FileReaderBase':
self._zarr_group = zarr.open_group(str(self._path), mode='r')
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
@classmethod
def accepts(cls, path: Union[str, Path]) -> bool:
if zarr:
try:
with zarr.open_group(str(cls._get_zarr_path(path)), mode='r') as zarr_group:
meta = zarr_group.attrs.get('meta')
if meta is not None:
return meta.get('scan_type') == 'IMC'
except:
pass # ignored intentionally
return False
@staticmethod
def _get_zarr_path(path: Union[str, Path]) -> Path:
path = Path(path)
if path.is_file() and path.name == 'mcd_schema.xml':
path = path.parent
return path
|
import tensorflow as tf
class MaskedACC(tf.keras.metrics.Metric):
def __init__(self, name="Accuracy", **kwargs):
super(MaskedACC, self).__init__(name=name, **kwargs)
self.acc_value = tf.constant(0.)
def update_state(self, y_true, y_pred, sample_weight=None):
real = tf.slice(y_true, [0,0,0], [-1,-1,1])
mask = tf.slice(y_true, [0,0,1], [-1,-1,1])
real = tf.cast(real, dtype=tf.int64)
mask = tf.cast(mask, dtype=tf.bool)
real = tf.squeeze(real)
mask = tf.squeeze(mask)
accuracies = tf.equal(real, tf.argmax(y_pred, axis=2))
accuracies = tf.math.logical_and(mask, accuracies)
accuracies = tf.cast(accuracies, dtype=tf.float32)
mask = tf.cast(mask, dtype=tf.float32)
self.acc_value = tf.reduce_sum(accuracies)/tf.reduce_sum(mask)
def result(self):
return self.acc_value
class CustomAccuracy(tf.keras.metrics.Metric):
def __init__(self, name="Accuracy", **kwargs):
super(CustomAccuracy, self).__init__(name=name, **kwargs)
self.acc_value = 0.
def update_state(self, y_true, y_pred, sample_weight=None):
y_pred_labels = tf.argmax(y_pred, axis=1)
y_real_labels = tf.argmax(y_true, axis=1)
accuracies = tf.equal(y_real_labels, y_pred_labels)
accuracies = tf.cast(accuracies, dtype=tf.float32)
self.acc_value = tf.reduce_mean(accuracies)
def result(self):
return self.acc_value
|
# -*- coding: utf-8 -*-
"""Version-check script."""
import os
import sys
import codecs
from art.art_param import *
Failed = 0
VERSION = "5.2"
README_ITEMS = ['<td align="center">{0}</td>'.format(str(FONT_COUNTER)),
'<img src="https://img.shields.io/badge/Art List-{0}-orange.svg">'.format(str(ART_COUNTER)),
'<img src="https://img.shields.io/badge/Font List-{0}-blue.svg">'.format(str(FONT_COUNTER)),
'<td align="center">{0}</td>'.format(str(ART_COUNTER)),
'<td align="center">{0}</td>'.format(str(DECORATION_COUNTER)),
'<img src="https://img.shields.io/badge/Decor List-{0}-green.svg">'.format(str(DECORATION_COUNTER))]
SETUP_ITEMS = [
"version='{0}'"]
INSTALL_ITEMS = [
"[Version {0}](https://github.com/sepandhaghighi/art/archive/v{0}.zip)",
"pip install art=={0}",
"pip3 install art=={0}",
'easy_install "art=={0}"']
CHANGELOG_ITEMS = [
"## [{0}]",
"https://github.com/sepandhaghighi/art/compare/v{0}...dev",
"[{0}]:"]
ART_LIST_ITEMS = ["### Version : {0}"]
FONT_LIST_ITEMS = ["### Version : {0}"]
PARAMS_ITEMS = ['ART_VERSION = "{0}"']
FILES = {
"setup.py": SETUP_ITEMS,
"INSTALL.md": INSTALL_ITEMS,
"CHANGELOG.md": CHANGELOG_ITEMS,
"FontList.ipynb": FONT_LIST_ITEMS,
"ArtList.ipynb": ART_LIST_ITEMS,
os.path.join(
"art",
"art_param.py"): PARAMS_ITEMS}
TEST_NUMBER = len(FILES.keys()) + 1
def print_result(failed=False):
"""
Print final result.
:param failed: failed flag
:type failed: bool
:return: None
"""
message = "Version/Counter tag tests "
if not failed:
print("\n" + message + "passed!")
else:
print("\n" + message + "failed!")
print("Passed : " + str(TEST_NUMBER - Failed) + "/" + str(TEST_NUMBER))
if __name__ == "__main__":
for file_name in FILES.keys():
try:
file_content = codecs.open(
file_name, "r", "utf-8", "ignore").read()
for test_item in FILES[file_name]:
if file_content.find(test_item.format(VERSION)) == -1:
print("Incorrect version tag in " + file_name)
Failed += 1
break
except Exception as e:
Failed += 1
print("Error in " + file_name + "\n" + "Message : " + str(e))
try:
readme_file_content = codecs.open(
"README.md", "r", "utf-8", "ignore").read()
for test_item in README_ITEMS:
if readme_file_content.find(
test_item) == -1:
print("Incorrect counter in " + "README.md")
Failed += 1
break
except Exception as e:
Failed += 1
print("Error in README.md" + "\n" + "Message : " + str(e))
if Failed == 0:
print_result(False)
sys.exit(0)
else:
print_result(True)
sys.exit(1)
|
def language_page(lang):
return "https://www.opensubtitles.org/de/search/sublanguageid-{}".format(lang)
def movie_page(movie):
return "https://www.opensubtitles.org/en/subtitles/{}".format(movie)
|
# Copyright 2022 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The actual logic of a agent."""
import functools
from typing import Tuple
import chex
import haiku as hk
import jax
from ml_collections import config_dict
from emergent_communication_at_scale import types
from emergent_communication_at_scale.losses import listeners as listener_losses
from emergent_communication_at_scale.losses import speakers as speaker_losses
from emergent_communication_at_scale.networks import listeners
from emergent_communication_at_scale.networks import speakers
class SpeakerListenerGame:
"""Plays a speaker/listener game with multi symbol."""
def __init__(self, config: config_dict.ConfigDict) -> None:
# Prepares constructor.
speaker = functools.partial(speakers.Speaker, **config.speaker)
listener = functools.partial(listeners.Listener, **config.listener)
# hk.transform requires lambda to be built a posteriori in a pmap
# pylint: disable=unnecessary-lambda
# pylint: disable=g-long-lambda
self._speaker = hk.transform_with_state(
lambda games, training_mode, actions_to_follow=None: speaker()(
games=games,
training_mode=training_mode,
actions_to_follow=actions_to_follow,
))
self._listener = hk.transform_with_state(
lambda games, training_mode: listener()(
games=games,
training_mode=training_mode,
))
# pylint: enable=unnecessary-lambda
# pylint: enable=g-long-lambda
if config.loss.get('speaker', False):
self._speaker_loss = speaker_losses.speaker_loss_factory(
**config.loss.speaker)
else:
# We do not have speaker loss in EOL
self._speaker_loss = None
self._listener_loss = listener_losses.listener_loss_factory(
**config.loss.listener)
self._config = config
@property
def speaker(self):
return self._speaker
@property
def listener_loss(self):
return self._listener_loss
def init(
self,
rng: chex.PRNGKey,
init_games: types.GamesInputs,
training_mode: types.TrainingMode,
) -> Tuple[types.Params, types.States]:
"""Returns speaker and listener params."""
games = types.Games(
speaker_inp=init_games.speaker_inp, labels=init_games.labels)
speaker_rng, target_speaker_rng, listener_rng = jax.random.split(rng, 3)
params_speaker, states_speaker = self._speaker.init(
speaker_rng,
games=games,
training_mode=training_mode,
)
speaker_outputs, _ = self._speaker.apply(
params_speaker,
states_speaker,
speaker_rng,
games=games,
training_mode=training_mode,
)
target_params_speaker, target_states_speaker = self._speaker.init(
target_speaker_rng,
games=games,
training_mode=types.TrainingMode.FORCING,
actions_to_follow=speaker_outputs.action,
)
games = games._replace(speaker_outputs=speaker_outputs)
params_listener, state_listener = self._listener.init(
listener_rng,
games=games,
training_mode=training_mode,
)
joint_states = types.States(
speaker=states_speaker,
listener=state_listener,
target_speaker=target_states_speaker)
joint_params = types.Params(
speaker=params_speaker,
listener=params_listener,
target_speaker=target_params_speaker,
)
return joint_params, joint_states
def unroll(
self,
params: types.Params,
states: types.States,
rng: chex.PRNGKey,
games: types.GamesInputs,
training_mode: types.TrainingMode,
) -> types.Games:
"""Unrolls the game for the forward pass."""
# Prepares output.
speaker_rng, listener_rng, rng = jax.random.split(rng, 3)
games = types.Games(speaker_inp=games.speaker_inp, labels=games.labels)
# Step1 : Speaker play.
speaker_outputs, _ = self._speaker.apply(
params.speaker,
states.speaker,
speaker_rng,
games=games,
training_mode=training_mode,
)
target_speaker_outputs, _ = self._speaker.apply(
params.target_speaker,
states.target_speaker,
speaker_rng,
games=games,
training_mode=types.TrainingMode.FORCING,
actions_to_follow=speaker_outputs.action,
)
games = games._replace(
speaker_outputs=speaker_outputs,
target_speaker_outputs=target_speaker_outputs,
)
# Step 2 : Listener play.
listener_outputs, _ = self._listener.apply(
params.listener,
states.listener,
listener_rng,
games=games,
training_mode=training_mode,
)
games = games._replace(listener_outputs=listener_outputs)
return games
def compute_loss(
self,
games: types.Games,
rng: chex.PRNGKey,
) -> types.AgentLossOutputs:
"""Computes Listener and Speaker losses."""
# Computes listener loss and stats.
listener_loss_outputs = self._listener_loss.compute_listener_loss(
games=games,
rng=rng,
)
loss = listener_loss_outputs.loss
stats = listener_loss_outputs.stats
# Computes speaker loss and stats. (if necessary).
if self._speaker_loss is not None:
speaker_loss_outputs = self._speaker_loss.compute_speaker_loss(
games=games,
reward=listener_loss_outputs.reward,
)
loss += speaker_loss_outputs.loss
stats.update(speaker_loss_outputs.stats)
return types.AgentLossOutputs(
loss=loss,
probs=listener_loss_outputs.probs,
stats=stats,
)
|
emp1 = [8, 24]
emp2 = [8, 12]
totalhours = [0, 24]
filledhours = [0, 0]
|
from datetime import datetime
from typing import List, Tuple, Dict, Any, Iterable, Optional
import attr
import consts
import tags
@attr.s()
class ModelMeta:
id = attr.ib(validator=attr.validators.instance_of(str))
title = attr.ib(validator=attr.validators.instance_of(str))
author = attr.ib(validator=attr.validators.instance_of(str))
summary = attr.ib()
github_link = attr.ib(
validator=attr.validators.instance_of(str)
) # TODO change to URL validator
tags = attr.ib()
update_time = attr.ib(
default=None,
init=False,
validator=attr.validators.optional(attr.validators.instance_of(datetime)),
)
@summary.validator
def summary_validtor(self, attribute, value):
if not isinstance(value, dict):
raise ValueError(
"summary must be a mapping from language to text, not {}".format(
type(value)
)
)
for k, v in value.items():
if k not in consts.ALL_LANGUAGES:
raise ValueError("summary key must be a supported language")
if not isinstance(v, str):
raise ValueError("summary value must be text")
@tags.validator
def tags_validtor(self, attribute, value):
if not isinstance(value, list):
raise ValueError("tags must be a list")
supported_tags = set(map(lambda t: t.id, tags.TAGS))
for t in value:
if t not in supported_tags:
raise ValueError(
"tag '{}' is not in tag list, please update list in tags.py".format(
t
)
)
def to_jsondict(self):
return {
"id": self.id,
"title": self.title,
"author": self.author,
"summary": self.summary,
"githubLink": self.github_link,
"tags": self.tags,
"updateTime": self.update_time,
}
@attr.s(auto_attribs=True)
class ModelContent:
sample_code: str
full_text: Dict[str, str]
def to_jsondict(self):
return {
"sampleCode": self.sample_code,
"fullText": self.full_text,
}
@attr.s(auto_attribs=True)
class Model:
meta: ModelMeta
content: ModelContent
def to_jsondict(self):
return {"meta": self.meta.to_jsondict(), "content": self.content.to_jsondict()}
|
from collections import defaultdict
from contextlib import ExitStack
from datetime import date, datetime
from unittest.mock import Mock, patch
import pytest
import pytz
from todoist.models import Item, Project
from synctogit.service import ServiceAPIError, ServiceTokenExpiredError
from synctogit.todoist import models
from synctogit.todoist.todoist import Todoist
def ultimate_defaultdict():
return defaultdict(ultimate_defaultdict)
@pytest.fixture
def todoist_user_timezone_name():
return "Europe/Moscow"
@pytest.fixture
def todoist_user_timezone(todoist_user_timezone_name):
return pytz.timezone(todoist_user_timezone_name)
@pytest.fixture
def todoist(todoist_user_timezone_name):
with patch("synctogit.todoist.todoist.Todoist._create_api", Mock()):
todoist = Todoist("", None)
todoist.api.state = ultimate_defaultdict()
todoist.api.state["user"]["tz_info"]["timezone"] = todoist_user_timezone_name
return todoist
@pytest.mark.parametrize(
"date_raw, date_expected",
[
("2017-09-02T14:12:53+01:00", (2017, 9, 2, 16, 12, 53)),
("2017-10-22T14:12:53Z", (2017, 10, 22, 17, 12, 53)),
],
)
def test_parse_datetime(todoist, date_raw, date_expected, todoist_user_timezone):
datetime_expected = todoist_user_timezone.localize(datetime(*date_expected))
assert todoist._parse_datetime(date_raw) == datetime_expected
def test_get_projects(todoist):
todoist.api.state["projects"] = [
Project(
{
"collapsed": 0,
"color": 48,
"has_more_notes": False,
"id": 1111,
"inbox_project": True,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"is_favorite": 0,
"item_order": 0,
"name": "Inbox",
"parent_id": None,
"shared": False,
},
None,
),
Project(
{
"collapsed": 0,
"color": 35,
"has_more_notes": False,
"id": 23167531,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"is_favorite": 1,
"item_order": 4,
"name": "Green Favorite Проект",
"parent_id": None,
"shared": False,
},
None,
),
Project(
{
"collapsed": 0,
"color": 48,
"has_more_notes": False,
"id": 333333,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"is_favorite": 0,
"item_order": 1,
"name": "Grey root project",
"parent_id": None,
"shared": False,
},
None,
),
Project(
{
"collapsed": 0,
"color": 48,
"has_more_notes": False,
"id": 555555,
"indent": 2,
"is_archived": 0,
"is_deleted": 0,
"is_favorite": 0,
"item_order": 2,
"name": "Grey child project",
"parent_id": 333333,
"shared": False,
},
None,
),
Project(
{
"collapsed": 0,
"color": 48,
"has_more_notes": False,
"id": 66666,
"indent": 3,
"is_archived": 0,
"is_deleted": 0,
"is_favorite": 0,
"item_order": 3,
"name": "Grey subchild project",
"parent_id": 555555,
"shared": False,
},
None,
),
Project(
{
"shared": False,
"is_deleted": 0,
"parent_id": None,
"name": "New API",
"id": 99999922222,
"is_archived": 0,
"is_favorite": 0,
"collapsed": 0,
"child_order": 10,
"color": 32,
},
None,
),
]
expected_projects = [
models.TodoistProject(
id=1111,
color="rgb(184, 184, 184)",
is_favorite=False,
is_inbox=True,
name="Inbox",
subprojects=[],
),
models.TodoistProject(
id=333333,
color="rgb(184, 184, 184)",
is_favorite=False,
is_inbox=False,
name="Grey root project",
subprojects=[
models.TodoistProject(
id=555555,
color="rgb(184, 184, 184)",
is_favorite=False,
is_inbox=False,
name="Grey child project",
subprojects=[
models.TodoistProject(
id=66666,
color="rgb(184, 184, 184)",
is_favorite=False,
is_inbox=False,
name="Grey subchild project",
subprojects=[],
)
],
)
],
),
models.TodoistProject(
id=23167531,
color="rgb(126, 204, 73)",
is_favorite=True,
is_inbox=False,
name="Green Favorite Проект",
subprojects=[],
),
models.TodoistProject(
id=99999922222,
color="rgb(255, 153, 51)",
is_favorite=False,
is_inbox=False,
name="New API",
subprojects=[],
),
]
assert todoist.get_projects() == expected_projects
@pytest.mark.parametrize("project_extra", [dict(is_archived=1), dict(is_deleted=1)])
def test_hidden_projects(todoist, project_extra):
project = {
"collapsed": 0,
"color": 35,
"has_more_notes": False,
"is_archived": 0,
"is_deleted": 0,
"is_favorite": 1,
"shared": False,
}
todoist.api.state["projects"] = [
Project(
{
"id": 1234,
"indent": 1,
"item_order": 4,
"name": "DELETED PROJECT",
"parent_id": None,
**project,
**project_extra,
},
None,
),
Project(
{
"id": 2222,
"indent": 2,
"item_order": 5,
"name": "CHILD PROJECT",
"parent_id": 1234,
**project,
},
None,
),
]
expected_projects = []
assert todoist.get_projects() == expected_projects
def test_get_todo_items(todoist, todoist_user_timezone):
todoist.api.state["items"] = [
Item(
{
"all_day": True,
"assigned_by_uid": 999,
"checked": 0,
"collapsed": 0,
"content": "Child",
"date_added": "2018-09-25T22:42:56Z",
"date_completed": None,
"date_lang": "en",
"date_string": "28 Sep",
"day_order": 6,
"due_date_utc": None,
"has_more_notes": False,
"id": 3456,
"in_history": 0,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"item_order": 822,
"labels": [],
"parent_id": 2345,
"priority": 1,
"project_id": 999,
"responsible_uid": None,
"sync_id": None,
"user_id": 999,
},
None,
),
Item(
{
"all_day": True,
"assigned_by_uid": 999,
"checked": 0,
"collapsed": 0,
"content": "Subchild",
"date_added": "2018-09-25T22:42:56Z",
"date_completed": None,
"date_lang": "en",
"date_string": "28 Sep",
"day_order": 10,
"due_date_utc": None,
"has_more_notes": False,
"id": 4567,
"in_history": 0,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"item_order": 1822,
"labels": [],
"parent_id": 3456,
"priority": 1,
"project_id": 999,
"responsible_uid": None,
"sync_id": None,
"user_id": 999,
},
None,
),
Item(
{
"all_day": False,
"assigned_by_uid": 999,
"checked": 0,
"collapsed": 0,
"content": "Root",
"date_added": "2018-09-25T22:42:56Z",
"date_completed": None,
"date_lang": "en",
"date_string": "28 Sep",
"day_order": 6,
"due": {
"date": "2018-09-29T13:00:00Z",
"is_recurring": False,
"lang": "en",
"string": "29 Sep 16:00",
"timezone": None,
},
"due_date_utc": None,
"has_more_notes": False,
"id": 2345,
"in_history": 0,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"item_order": 821,
"labels": [],
"parent_id": None,
"priority": 4,
"project_id": 999,
"responsible_uid": None,
"sync_id": None,
"user_id": 999,
},
None,
),
Item(
{
"all_day": True,
"assigned_by_uid": 999,
"checked": 0,
"collapsed": 0,
"content": "All day Привет",
"date_added": "2018-09-25T22:42:56Z",
"date_completed": None,
"date_lang": "en",
"date_string": "28 Sep",
"day_order": 6,
"due_date_utc": None,
"has_more_notes": False,
"id": 1234,
"in_history": 0,
"indent": 1,
"is_archived": 0,
"is_deleted": 0,
"item_order": 820,
"labels": [],
"parent_id": None,
"priority": 1,
"project_id": 999,
"responsible_uid": None,
"sync_id": None,
"user_id": 999,
},
None,
),
]
expected_items = {
999: [
models.TodoistTodoItem(
id=1234,
all_day=True,
content="All day Привет",
added_datetime=todoist_user_timezone.localize(
datetime(2018, 9, 26, 1, 42, 56)
),
due_date=None,
due_datetime=None,
priority=models.TodoistItemPriority.p1,
subitems=[],
),
models.TodoistTodoItem(
id=2345,
all_day=False,
content="Root",
added_datetime=todoist_user_timezone.localize(
datetime(2018, 9, 26, 1, 42, 56)
),
due_date=date(2018, 9, 29),
due_datetime=todoist_user_timezone.localize(
datetime(2018, 9, 29, 16, 0, 0)
),
priority=models.TodoistItemPriority.p4,
subitems=[
models.TodoistTodoItem(
id=3456,
all_day=True,
content="Child",
added_datetime=todoist_user_timezone.localize(
datetime(2018, 9, 26, 1, 42, 56)
),
due_date=None,
due_datetime=None,
priority=models.TodoistItemPriority.p1,
subitems=[
models.TodoistTodoItem(
id=4567,
all_day=True,
content="Subchild",
added_datetime=todoist_user_timezone.localize(
datetime(2018, 9, 26, 1, 42, 56)
),
due_date=None,
due_datetime=None,
priority=models.TodoistItemPriority.p1,
subitems=[],
)
],
)
],
),
]
}
assert todoist.get_todo_items() == expected_items
@pytest.mark.parametrize(
"item_extra", [dict(is_archived=1), dict(is_deleted=1), dict(checked=1)]
)
def test_hidden_todo_items(todoist, item_extra):
item = {
"all_day": True,
"assigned_by_uid": 999,
"checked": 0,
"collapsed": 0,
"date_added": "2018-09-25T22:42:56Z",
"date_completed": None,
"date_lang": "en",
"date_string": "28 Sep",
"day_order": 6,
"due_date_utc": None,
"has_more_notes": False,
"in_history": 0,
"is_archived": 0,
"is_deleted": 0,
"labels": [],
"priority": 1,
"project_id": 999,
"responsible_uid": None,
"sync_id": None,
"user_id": 999,
}
todoist.api.state["items"] = [
Item(
{
"content": "All day Привет",
"id": 1234,
"indent": 1,
"item_order": 820,
"parent_id": None,
**item,
**item_extra,
},
None,
),
Item(
{
"content": "Non-hidden child",
"id": 2234,
"indent": 2,
"item_order": 821,
"parent_id": 1234,
**item,
},
None,
),
]
expected_items = {}
assert todoist.get_todo_items() == expected_items
@pytest.mark.parametrize(
"todo_item, due_date, due_datetime",
[
({}, None, None),
(
{
"due_date_utc": None,
"due": {
"date": "2018-09-29T13:00:00Z", # fake
"is_recurring": False,
"lang": "en",
"string": "29 Sep 16:00",
"timezone": None,
},
},
date(2018, 9, 29),
datetime(2018, 9, 29, 16, 0, 0),
),
(
{
"due_date_utc": None,
"due": {
"date": "2018-09-29T16:00:00", # actual
"is_recurring": False,
"lang": "en",
"string": "29 Sep 16:00",
"timezone": None,
},
},
date(2018, 9, 29),
datetime(2018, 9, 29, 16, 0, 0),
),
(
{
"due_date_utc": None,
"due": {
"date": "2018-09-29T16:00:00+03:00", # fake
"is_recurring": False,
"lang": "en",
"string": "29 Sep 16:00",
"timezone": None,
},
},
date(2018, 9, 29),
datetime(2018, 9, 29, 16, 0, 0),
),
(
{
"due_date_utc": None,
"due": {
"date": "2018-09-29",
"string": "29 Sep",
"is_recurring": False,
"lang": "en",
"timezone": None,
},
},
date(2018, 9, 29),
None,
),
(
{
"date_string": None,
"due": {
"lang": "en",
"string": "30 Sep",
"timezone": None,
"is_recurring": False,
"date": "2018-09-30",
},
"due_date_utc": None,
},
date(2018, 9, 30),
None,
),
],
)
def test_parse_due_time(
todoist, todo_item, due_date, due_datetime, todoist_user_timezone
):
if due_datetime is not None:
due_datetime = todoist_user_timezone.localize(due_datetime)
assert todoist._parse_due_date_time(todo_item) == (due_date, due_datetime)
@pytest.mark.parametrize(
"todo_item",
# fmt: off
[
{"due_date_utc": "2018-09-29T16:00:00"}, # must be None
],
# fmt: on
)
def test_parse_due_time_raises(todoist, todo_item):
with pytest.raises(AssertionError):
todoist._parse_due_date_time(todo_item)
@pytest.mark.parametrize(
"exc, response",
[
(ServiceAPIError, "Error!"),
(
ServiceTokenExpiredError,
{
"error_extra": {"retry_after": 3, "access_type": "access_token"},
"http_code": 403,
"error_code": 401,
"error_tag": "AUTH_INVALID_TOKEN",
"error": "Invalid token",
},
),
(
None,
{
"notes": [],
"projects": [],
"sync_token": "ZZZZ",
"items": [],
"filters": [],
},
),
],
)
def test_sync_error_processing(todoist, exc, response):
with patch.object(todoist.api, "sync", return_value=response):
with ExitStack() as stack:
if exc is not None:
stack.enter_context(pytest.raises(exc))
todoist.sync()
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from .agent import DDPG
from .amc_env import AMCEnv
|
""" Handles scheduling of data and news updates requested from the front end.
Attributes:
log (Logger): The logger for the covid_dashboard.
scheduler (scheduler): The sched scheduler object.
scheduled_events (list): A list of dictionaries of scheduled events.
"""
import sched
import time
import logging
from datetime import timedelta, datetime
from utils import time_until, get_settings
from covid_data_handler import get_covid_data
from covid_news_handling import update_news
scheduler = sched.scheduler(time.time, time.sleep)
scheduled_events: list = []
log = logging.getLogger("covid_dashboard")
def schedule_event(
target_time: timedelta,
label: str,
repeat: bool,
data: bool,
news: bool,
new: bool = True,
) -> None:
"""Schedule a new event to occur at a given time.
Args:
target_time (timedelta): The target time the scheduled event should run, as a timedelta.
label (str): The name of the scheduled event.
repeat (bool): If the scheduled event should repeat in 24 hours or not.
data (bool): If the scheduled event should update the dashboards COVID data.
news (bool): If the scheduled event should update the dashboards news data.
new (bool, optional): If the event is new (has just been added). Defaults to True.
"""
log.info( # pylint: disable=logging-fstring-interpolation
f"Scheduling {'new' if new else 'repeating'} event {label} at"
f" {target_time} with {repeat = } for {data = } and {news = }"
)
label_exists = any(event["title"] == label for event in scheduled_events)
if not label_exists:
time_delta = time_until(target_time)
sched_event = scheduler.enter(
time_delta.seconds, 1, call_event, (label, target_time, repeat, data, news)
)
new_event = {
"title": label,
"target_time": target_time,
"repeat": repeat,
"data": data,
"news": news,
"sched_event": sched_event,
"new": new,
}
# Insert in time order
if not scheduled_events:
log.info("Appending %s to scheduled_events", label)
scheduled_events.append(new_event)
else:
for index, event in enumerate(scheduled_events):
if time_until(target_time) < time_until(event["target_time"]):
scheduled_events.insert(index, new_event)
break
if index == len(scheduled_events) - 1:
log.info("Appending %s to scheduled_events", label)
scheduled_events.append(new_event)
break
else:
log.warning("Scheduled update with label = %s already exists", label)
def call_event(
label: str, target_time: timedelta, repeat: bool, data: bool, news: bool
) -> None:
"""Called when the time comes for a scheduled event to run. Utilises the necessary COVID and
news function and deals with repeated events.
Args:
label (str): Label of event to be run.
target_time (timedelta): The time that the event should run, as a timedelta.
repeat (bool): If the event should repeat in 24 hours or not.
data (bool): If the event should update the COVID data.
news (bool): If the event should update the news.
"""
log.info( # pylint: disable=logging-fstring-interpolation
f"Running scheduled event {label} with {repeat = } for {data = } and {news = }"
)
log.info(
"This event was scheduled to run at %s, it is currently %s",
target_time,
datetime.now(),
)
remove_event(label)
if data:
location, nation = get_settings( # pylint: disable=unbalanced-tuple-unpacking
"location", "nation"
)
get_covid_data(location, nation, force_update=True)
if news:
update_news()
if repeat:
log.info("Repeating event for %s", target_time)
schedule_event(target_time, label, repeat, data, news, new=False)
def remove_event(title: str) -> None:
"""Removes an event from the scheduler.
Args:
title (str): The title of the event to be removed
"""
log.info("Removing event %s", title)
global scheduled_events
scheduled_events[:] = [
event for event in scheduled_events if event["title"] != title
]
def keep_alive() -> None:
"""This function does nothing but print a smiley face to the console every 10 seconds. If you
are thinking that this is completely pointless, you are correct. However without this function,
the whole scheduling system breaks in the most spectacular ways. Indeed, even removing the
print statement alone stops the scheduler from working at all. Its highly likely that it does
not need to be a smiley face that is printed to the console in order for this to work, but at
this point I am simply to scared to even begin to modify this function.
"""
scheduler.enter(10, 1, keep_alive)
#################
# DO NOT REMOVE #
#################
print(":)")
# This is terrible, but sched has forced my hand.
|
'''This program provides functions used in census_geocoder.py'''
import urllib.request, urllib.parse, urllib.error
import ssl
import json
import numpy as np
import pandas as pd
def ignore_ssl():
'''Returns context to ignore SSL certificate errors'''
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
return ctx
def open_connection(df, i, baseurl, parameters):
'''Opens connection to Census Geocoder for address corresponding
to the given index (i) in the given dataframe (df)'''
parameters['street'] = df['addressline1'][i]
parameters['city'] = df['city'][i]
parameters['state'] = df['state'][i]
parameters['zipcode'] = df['zipcode'][i]
url = baseurl + urllib.parse.urlencode(parameters)
try:
connection = urllib.request.urlopen(url, context = ignore_ssl())
return connection
except:
return None
def retrieve_geodata(connection):
'''Uses Census Geocoder connection to return a dictionary with
matched address, latitude, longitude, census tract code, and county code
(returns empty dictionary if there is no match data)'''
try:
datastring = connection.read().decode()
js = json.loads(datastring)
geodata = {}
if len(js['result']['addressMatches']) == 0:
return geodata
geodata['matchedaddress'] = js['result']['addressMatches'][0]['matchedAddress']
geodata['longitude'] = js['result']['addressMatches'][0]['coordinates']['x']
geodata['latitude'] = js['result']['addressMatches'][0]['coordinates']['y']
geodata['countycode'] = js['result']['addressMatches'][0]['geographies']['Counties'][0]['GEOID']
geodata['censustractcode'] = js['result']['addressMatches'][0]['geographies']['Census Tracts'][0]['GEOID']
return geodata
except:
return None |
# @Author: charles
# @Date: 2021-06-05 11:06:90
# @Email: charles.berube@polymtl.ca
# @Last modified by: charles
# @Last modified time: 2021-06-05 11:06:27
import numpy as np
import matplotlib.pyplot as plt
from pydlc import dense_lines
if __name__ == "__main__":
# Generate random synthetic time series
x = np.linspace(0, 90, 25)
ys = []
for _ in range(10000):
ys.append(np.random.randn(1)*np.exp(-x/100))
# Plot here
fig, axs = plt.subplots(1, 2, figsize=(8, 3), sharey=True, sharex=True)
axs[0].plot(x, np.array(ys).T, lw=1) # this is slow and cluttered
axs[0].set_title('Line Chart')
im = dense_lines(ys, x=x, ax=axs[1], cmap='magma') # this is fast and clean
axs[1].set_title('Density Lines Chart')
fig.colorbar(im)
fig.tight_layout()
plt.savefig('./figures/example.png', dpi=144, bbox_inches='tight')
plt.show()
|
import logging
import requests
class SomethingWrongError(Exception):
pass
def _extract_bike_location(bike, lon_abbrev='lon'):
"""
Standardize the bike location data from GBFS. Some have extra fields,
and some are missing fields.
Arguments:
bike (dict[str, str]): A GBFS bike object as it appears in free_bike_status.json
lon_abbrev (str): The abbreviation used for `longitude`
Returns:
dict[str, str]: A normalized GBFS bike object
"""
output = {key: bike.get(key) for key in ['bike_id', 'lat', 'is_reserved', 'is_disabled']}
output['lon'] = bike.get(lon_abbrev)
return output
def _pull_from_gbfs(url):
"""
Pull data from a GBFS URL.
Arguments:
url (str): The URL of free_bike_status.json
Returns:
list[dict[str, str]]: The GBFS position data of bikes
"""
r = requests.get(url)
return [_extract_bike_location(bike) for bike in r.json()['data']['bikes']]
def pull_jump():
data = _pull_from_gbfs('https://dc.jumpmobility.com/opendata/free_bike_status.json')
logging.info('Got data on %s bikes', len(data))
return data
def pull_lime():
r = requests.get('https://lime.bike/api/partners/v1/bikes?region=Washington%20DC%20Proper',
headers={'Authorization': 'Bearer limebike-PMc3qGEtAAXqJa'})
data = [{
'bike_id': bike['id'],
'lat': bike['attributes']['latitude'],
'lon': bike['attributes']['longitude'],
'is_reserved': None,
'is_disabled': None
} for bike in r.json()['data']]
logging.info('Got data on %s bikes', len(data))
return data
def pull_ofo():
payload = {
'token': 'c902b87e3ce8f9f95f73fe7ee14e81fe',
'name': 'Washington',
'lat': 38.894432,
'lng': -77.013655
}
r = requests.post('http://ofo-global.open.ofo.com/api/bike', data=payload)
data = [_extract_bike_location(bike, lon_abbrev='lng') for bike in r.json()['values']['cars']]
logging.info('Got data on %d bikes', len(data))
return data
def pull_spin():
data = _pull_from_gbfs('https://web.spin.pm/api/gbfs/v1/free_bike_status')
logging.info('Got data on %d bikes', len(data))
return data
def for_provider(provider):
if provider == 'jump':
return pull_jump
if provider == 'lime':
return pull_lime
if provider == 'ofo':
return pull_ofo
if provider == 'spin':
return pull_spin
|
import os
import time
import random
import pygame
pygame.font.init()
# Main window
VERSION = "1.0"
WIDTH, HEIGHT = 750, 750
WIN = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Space Invaders")
# loading images
RED_SPACE_SHIP = pygame.image.load(
os.path.join("assets/", "pixel_ship_red_small.png"))
BLUE_SPACE_SHIP = pygame.image.load(
os.path.join("assets/", "pixel_ship_blue_small.png"))
GREEN_SPACE_SHIP = pygame.image.load(
os.path.join("assets/", "pixel_ship_green_small.png"))
# player ship
YELLOW_SPACE_SHIP = pygame.image.load(
os.path.join("assets/", "pixel_ship_yellow.png"))
# loading lasers
RED_LASER = pygame.image.load(os.path.join("assets/", "pixel_laser_red.png"))
BLUE_LASER = pygame.image.load(os.path.join("assets/", "pixel_laser_blue.png"))
GREEN_LASER = pygame.image.load(
os.path.join("assets/", "pixel_laser_green.png"))
YELLOW_LASER = pygame.image.load(
os.path.join("assets/", "pixel_laser_yellow.png"))
# Background images
BG = pygame.transform.scale(pygame.image.load(os.path.join(
"assets/", "background-black.png")), (WIDTH, HEIGHT))
class Laser:
def __init__(self, x, y, img):
self.x = x
self.y = y
self.img = img
self.mask = pygame.mask.from_surface(self.img)
def draw(self, window):
window.blit(self.img, (self.x, self.y))
def move(self, vel):
self.y += vel
def off_screen(self, height):
return not(self.y <= height and self.y >= 0)
def collision(self, obj):
return collide(obj, self)
def collide(obj1, obj2):
offset_x = obj2.x - obj1.x
offset_y = obj2.y - obj1.y
return obj1.mask.overlap(obj2.mask, (offset_x, offset_y)) != None
class Ship:
COOLDOWN = 30
def __init__(self, x, y, health=100):
self.x = x
self.y = y
self.health = health
self.ship_img = None
self.laser_img = None
self.lasers = []
self.cool_down_counter = 0
def draw(self, window):
window.blit(self.ship_img, (self.x, self.y))
for laser in self.lasers:
laser.draw(window)
def get_width(self):
return self.ship_img.get_width()
def get_height(self):
return self.ship_img.get_height()
def move_lasers(self, vel, obj):
self.cooldown()
for laser in self.lasers:
laser.move(vel)
if laser.off_screen(HEIGHT):
self.lasers.remove(laser)
elif laser.collision(obj):
obj.health -= 10
self.lasers.remove(laser)
def cooldown(self):
if self.cool_down_counter >= self.COOLDOWN:
self.cool_down_counter = 0
elif self.cool_down_counter > 0:
self.cool_down_counter += 1
def shoot(self):
if self.cool_down_counter == 0:
laser = Laser(self.x, self.y, self.laser_img)
self.lasers.append(laser)
self.cool_down_counter = 1
class Player(Ship):
def __init__(self, x, y, health=100):
super().__init__(x, y, health)
self.ship_img = YELLOW_SPACE_SHIP
self.laser_img = YELLOW_LASER
self.mask = pygame.mask.from_surface(self.ship_img)
self.max_health = health
def move_lasers(self, vel, objs):
self.cooldown()
for laser in self.lasers:
laser.move(vel)
if laser.off_screen(HEIGHT):
self.lasers.remove(laser)
else:
for obj in objs:
if laser.collision(obj):
objs.remove(obj)
if laser in self.lasers:
self.lasers.remove(laser)
def draw(self, window):
super().draw(window)
self.healthbar(window)
def healthbar(self, window):
pygame.draw.rect(window, (255,0,0), (self.x, self.y + self.ship_img.get_height() + 10, self.ship_img.get_width(), 10))
pygame.draw.rect(window, (0,255,0), (self.x, self.y + self.ship_img.get_height() + 10, self.ship_img.get_width() * (self.health/self.max_health), 10))
class Enemy(Ship):
COLOR_MAP = {
"red": (RED_SPACE_SHIP, RED_LASER),
"blue": (BLUE_SPACE_SHIP, BLUE_LASER),
"green": (GREEN_SPACE_SHIP, GREEN_LASER)
}
def __init__(self, x, y, color, health=100): # "red", "green", "blue"
super().__init__(x, y, health)
self.ship_img, self.laser_img = self.COLOR_MAP[color]
self.mask = pygame.mask.from_surface(self.ship_img)
def move(self, vel):
self.y += vel
def shoot(self):
if self.cool_down_counter == 0:
laser = Laser(self.x-25, self.y, self.laser_img)
self.lasers.append(laser)
self.cool_down_counter = 1 |
#
# PySNMP MIB module RIVERSTONE-INVENTORY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/RIVERSTONE-INVENTORY-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:49:09 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint")
entPhysicalIndex, = mibBuilder.importSymbols("ENTITY-MIB", "entPhysicalIndex")
riverstoneMibs, = mibBuilder.importSymbols("RIVERSTONE-SMI-MIB", "riverstoneMibs")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
ObjectIdentity, iso, MibIdentifier, Counter32, NotificationType, TimeTicks, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, ModuleIdentity, Gauge32, Counter64, Unsigned32, Integer32, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "iso", "MibIdentifier", "Counter32", "NotificationType", "TimeTicks", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ModuleIdentity", "Gauge32", "Counter64", "Unsigned32", "Integer32", "Bits")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
rsInventoryMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 5567, 2, 40))
rsInventoryMIB.setRevisions(('2001-08-22 00:00', '2001-06-19 00:00', '2001-06-11 00:00', '2001-06-01 00:00', '2001-05-22 00:00',))
if mibBuilder.loadTexts: rsInventoryMIB.setLastUpdated('200108220000Z')
if mibBuilder.loadTexts: rsInventoryMIB.setOrganization('Riverstone Networks, Inc')
rsInventoryMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1))
rsInventoryOther = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 1))
rsInventoryUnknown = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 2))
rsInventoryChassis = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 3))
rsInventoryBackplane = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 4))
rsInventoryPowerSupply = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 6))
rsInventoryFan = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 7))
rsInventorySensor = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 8))
rsInventoryModule = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 9))
rsInventoryPort = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 10))
rsInventoryStack = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 11))
class RSMemorySize(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 2147483647)
class RSModuleServiceString(TextualConvention, OctetString):
status = 'current'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(0, 255)
rsInventoryChassisTable = MibTable((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 3, 1), )
if mibBuilder.loadTexts: rsInventoryChassisTable.setStatus('current')
rsInventoryChassisEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 3, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"))
if mibBuilder.loadTexts: rsInventoryChassisEntry.setStatus('current')
rsInventoryChassisId = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 3, 1, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsInventoryChassisId.setStatus('current')
rsInventoryChassisAssetCLEI = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 3, 1, 1, 2), SnmpAdminString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(10, 10), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsInventoryChassisAssetCLEI.setStatus('current')
rsInventoryPowerSupplyTable = MibTable((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 6, 1), )
if mibBuilder.loadTexts: rsInventoryPowerSupplyTable.setStatus('current')
rsInventoryPowerSupplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 6, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"))
if mibBuilder.loadTexts: rsInventoryPowerSupplyEntry.setStatus('current')
rsInventoryPowerSupplyAssetCLEI = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 6, 1, 1, 1), SnmpAdminString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(10, 10), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsInventoryPowerSupplyAssetCLEI.setStatus('current')
rsInventoryFanTable = MibTable((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 7, 1), )
if mibBuilder.loadTexts: rsInventoryFanTable.setStatus('current')
rsInventoryFanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 7, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"))
if mibBuilder.loadTexts: rsInventoryFanEntry.setStatus('current')
rsInventoryFanAssetCLEI = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 7, 1, 1, 1), SnmpAdminString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(10, 10), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsInventoryFanAssetCLEI.setStatus('current')
rsInventoryModuleTable = MibTable((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 9, 1), )
if mibBuilder.loadTexts: rsInventoryModuleTable.setStatus('current')
rsInventoryModuleEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 9, 1, 1), ).setIndexNames((0, "ENTITY-MIB", "entPhysicalIndex"))
if mibBuilder.loadTexts: rsInventoryModuleEntry.setStatus('current')
rsInventoryModuleMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 9, 1, 1, 1), RSMemorySize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsInventoryModuleMemorySize.setStatus('current')
rsInventoryModuleServiceString = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 9, 1, 1, 2), RSModuleServiceString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rsInventoryModuleServiceString.setStatus('current')
rsInventoryModuleAssetCLEI = MibTableColumn((1, 3, 6, 1, 4, 1, 5567, 2, 40, 1, 9, 1, 1, 3), SnmpAdminString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(10, 10), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rsInventoryModuleAssetCLEI.setStatus('current')
rsInventoryConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3))
rsInventoryCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3, 1))
rsInventoryGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3, 2))
rsInventoryCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3, 1, 1)).setObjects(("RIVERSTONE-INVENTORY-MIB", "rsChassisGroup"), ("RIVERSTONE-INVENTORY-MIB", "rsModuleGroup"), ("RIVERSTONE-INVENTORY-MIB", "rsEnvGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rsInventoryCompliance = rsInventoryCompliance.setStatus('current')
rsChassisGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3, 2, 1)).setObjects(("RIVERSTONE-INVENTORY-MIB", "rsInventoryChassisId"), ("RIVERSTONE-INVENTORY-MIB", "rsInventoryChassisAssetCLEI"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rsChassisGroup = rsChassisGroup.setStatus('current')
rsModuleGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3, 2, 2)).setObjects(("RIVERSTONE-INVENTORY-MIB", "rsInventoryModuleMemorySize"), ("RIVERSTONE-INVENTORY-MIB", "rsInventoryModuleServiceString"), ("RIVERSTONE-INVENTORY-MIB", "rsInventoryModuleAssetCLEI"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rsModuleGroup = rsModuleGroup.setStatus('current')
rsEnvGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 5567, 2, 40, 3, 2, 3)).setObjects(("RIVERSTONE-INVENTORY-MIB", "rsInventoryPowerSupplyAssetCLEI"), ("RIVERSTONE-INVENTORY-MIB", "rsInventoryFanAssetCLEI"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
rsEnvGroup = rsEnvGroup.setStatus('current')
mibBuilder.exportSymbols("RIVERSTONE-INVENTORY-MIB", rsInventoryChassisAssetCLEI=rsInventoryChassisAssetCLEI, rsInventoryFanTable=rsInventoryFanTable, rsInventoryGroups=rsInventoryGroups, rsModuleGroup=rsModuleGroup, rsInventoryModuleTable=rsInventoryModuleTable, rsChassisGroup=rsChassisGroup, rsInventoryFanEntry=rsInventoryFanEntry, rsInventoryMIBObjects=rsInventoryMIBObjects, rsInventoryPort=rsInventoryPort, rsInventoryModuleEntry=rsInventoryModuleEntry, rsInventoryChassis=rsInventoryChassis, rsInventoryModuleMemorySize=rsInventoryModuleMemorySize, rsInventoryBackplane=rsInventoryBackplane, rsInventoryChassisId=rsInventoryChassisId, rsInventorySensor=rsInventorySensor, rsInventoryConformance=rsInventoryConformance, rsInventoryMIB=rsInventoryMIB, rsInventoryCompliances=rsInventoryCompliances, rsInventoryModuleServiceString=rsInventoryModuleServiceString, rsInventoryStack=rsInventoryStack, rsInventoryPowerSupplyAssetCLEI=rsInventoryPowerSupplyAssetCLEI, PYSNMP_MODULE_ID=rsInventoryMIB, rsInventoryPowerSupply=rsInventoryPowerSupply, rsInventoryModuleAssetCLEI=rsInventoryModuleAssetCLEI, rsInventoryPowerSupplyEntry=rsInventoryPowerSupplyEntry, rsInventoryModule=rsInventoryModule, rsInventoryChassisTable=rsInventoryChassisTable, RSMemorySize=RSMemorySize, rsInventoryCompliance=rsInventoryCompliance, rsInventoryChassisEntry=rsInventoryChassisEntry, rsInventoryFanAssetCLEI=rsInventoryFanAssetCLEI, rsInventoryPowerSupplyTable=rsInventoryPowerSupplyTable, RSModuleServiceString=RSModuleServiceString, rsInventoryFan=rsInventoryFan, rsInventoryUnknown=rsInventoryUnknown, rsEnvGroup=rsEnvGroup, rsInventoryOther=rsInventoryOther)
|
from django.db import models
class Publisher(models.Model):
name = models.CharField(max_length=40)
def __str__(self):
return "%s" % (self.name)
class Author(models.Model):
first_name = models.CharField(max_length=25)
last_name = models.CharField(max_length=25)
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
class Book(models.Model):
title = models.CharField(max_length=40)
genre = models.CharField(max_length=25)
pub_date = models.IntegerField(default=0)
publisher = models.ForeignKey(Publisher, on_delete=models.CASCADE)
author = models.ForeignKey(Author, on_delete=models.CASCADE)
def __str__(self):
return "%s" % (self.title) |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-01 19:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('instaapp', '0002_auto_20190901_1701'),
]
operations = [
migrations.AddField(
model_name='profile',
name='name',
field=models.CharField(default='Anonym', max_length=50),
),
migrations.AlterField(
model_name='profile',
name='bio',
field=models.CharField(blank=True, max_length=80),
),
]
|
#!/usr/bin/python3
from socket import * #importing all
host = "127.0.0.1" # setting the host on port
port = 9999
# open the socket
s = socket(AF_INET, SOCK_STREAM, 0)
# where AF_INET is the type of addresses that socket can communicate with = IPv4
# SOCK_STREAM - Connection based protocol -> TCP
# third param: 0 (default protocol)
s.bind((host,port)) # creating connections
s.listen(5) # number of connections
while(True):
c, addr = s.accept() # accepting connections on the server
rmsg = c.recv(1024) # received message on TCP connections, where 1024 is bufsize
print(rmsg.decode('ascii')) # printing the received message decoding via ASCII
msg = "This is a message for the server"
c.send(msg.encode('ascii')) # sending message
break
s.close()
|
from mbed.generation import make_line_mesh
from mbed.preprocessing import refine
import numpy as np
def test():
coords = np.array([[0, 0], [1, 0], [1, 0.2], [1, 0.5], [1, 0.7], [1, 1], [0, 1.]])
mesh = make_line_mesh(coords, close_path=True)
rmesh, mapping = refine(mesh, threshold=0.6)
x = mesh.coordinates()
y = rmesh.coordinates()
assert np.linalg.norm(x - y[:len(x)]) < 1E-13
e2v, E2V = mesh.topology()(1, 0), rmesh.topology()(1, 0)
for c in range(mesh.num_cells()):
x0, x1 = x[e2v(c)]
e = x1 - x0
e = e/np.linalg.norm(e)
for C in np.where(mapping == c)[0]:
y0, y1 = y[E2V(C)]
E = y1 - y0
E = E/np.linalg.norm(E)
assert abs(1-abs(np.dot(e, E))) < 1E-13
|
#
# Copyright (c) 2016, deepsense.io
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from future.builtins import object
import ast
import io
import re
import yaml
from neptune.internal.cli.exceptions.job_config_exceptions import (
InvalidJobConfigException,
JobConfigFileNotYAMLException,
NeptuneReadConfigException,
ParameterYAMLInvalidDefinitionException
)
from neptune.internal.cli.exceptions.params_exceptions import RangeParameterParseException
from neptune.internal.common.config.neptune_config_validator import NeptuneConfigValidator
from neptune.internal.common.models.key_value_properties_utils import properties_from_config_file
from neptune.internal.common.parsers.tracked_parameter_parser import TrackedParameterParser
from neptune.internal.common.parsers.tracked_parameter_regexes import (
range_regex_part,
range_brackets_regex_part,
list_brackets_regex_part
)
from neptune.internal.common.utils.str import to_bytestring
from neptune.internal.common.utils.system import IS_WINDOWS
class ConfigKeys(object):
# Profile
PROFILE = 'profile'
# Project
PROJECT = 'project'
# Experiment
NAME = 'name'
DESCRIPTION = 'description'
TAGS = 'tags'
REQUIREMENTS = 'requirements' # probably to be deprecated and eventually removed
PIP_REQUIREMENTS_FILE = 'pip-requirements-file'
PROPERTIES = 'properties'
PARAMETERS = 'parameters'
METRIC = 'metric'
# Neptune connection
OPEN_WEBBROWSER = 'open-webbrowser'
LOG_CHANNELS = 'log-channels'
EXCLUDE = 'exclude'
COMMAND = 'command'
NOTEBOOK_FILE = 'notebook-file'
POSITIONAL_NOTEBOOK = 'positional_notebook'
BACKUP = 'backup'
DISABLE_STDOUT_CHANNEL = 'disable-stdout-channel'
DISABLE_STDERR_CHANNEL = 'disable-stderr-channel'
WORKER = "worker"
ENVIRONMENT = "environment"
INPUT = "input"
ML_FRAMEWORK = 'ml-framework'
DEBUG = 'debug'
TRACKING = 'tracking'
NEPTUNE_CONFIG = [
OPEN_WEBBROWSER
]
EXPERIMENT_CONFIG = [
NAME,
DESCRIPTION,
TAGS,
REQUIREMENTS,
PIP_REQUIREMENTS_FILE,
PROPERTIES,
EXCLUDE,
COMMAND,
PROJECT,
LOG_CHANNELS,
ML_FRAMEWORK,
DISABLE_STDOUT_CHANNEL,
DISABLE_STDERR_CHANNEL,
BACKUP,
NOTEBOOK_FILE,
POSITIONAL_NOTEBOOK,
WORKER,
ENVIRONMENT,
INPUT
]
BOOLEAN_PARAMETERS = [
OPEN_WEBBROWSER
]
class JobConfig(object):
def __init__(self, config_path):
self.path = config_path
self.neptune_config_validator = NeptuneConfigValidator(self.path)
try:
config_path_loc = config_path
if not IS_WINDOWS:
config_path_loc = to_bytestring(config_path_loc)
with io.open(config_path_loc, 'r', encoding='utf-8') as configfile:
config = yaml.load(configfile.read())
except yaml.YAMLError:
raise JobConfigFileNotYAMLException(self.path)
except IOError as io_error:
raise NeptuneReadConfigException(io_error)
if config is None:
config = dict()
elif not isinstance(config, dict):
raise InvalidJobConfigException(
job_config_path=self.path,
cause='The structure of the file is incorrect.')
if ConfigKeys.PROPERTIES not in config:
config[ConfigKeys.PROPERTIES] = []
config[ConfigKeys.PROPERTIES] = self._properties_from_config(config_path, config)
self.neptune_config_validator.validate(
'neptune_config_schema.yaml', source_data=dict(config))
parameters = config.get(ConfigKeys.PARAMETERS, {})
config[ConfigKeys.PARAMETERS] = self._pre_process_params(parameters)
self._config = dict(config)
def _pre_process_params(self, raw_params):
params = {}
raw_params = raw_params or {}
for name, param in raw_params.items():
if not isinstance(param, dict):
param = {'value' : param}
if 'value' not in param and 'description' not in param:
raise ParameterYAMLInvalidDefinitionException(name)
# Make sure that we have empty dicts instead of None values.
param = param or {}
if 'value' in param:
param_to_string = str(param['value']).replace("'", "\"")
matched_range_brackets = re.match(r'^' + range_brackets_regex_part() + r'$', param_to_string)
matched_list_brackets = re.match(r'^' + list_brackets_regex_part(with_api_part=False) + r'$',
param_to_string)
if matched_range_brackets and not param_to_string.startswith("\""):
matched_range = re.match(r'^' + range_regex_part() + r'$', param_to_string)
if not matched_range:
raise RangeParameterParseException(name)
param['repr'] = str(param['value'])
param['value'] = param_to_string
elif matched_list_brackets and isinstance(param['value'], list):
array = ast.literal_eval(matched_list_brackets.group('value'))
array = [u'' + TrackedParameterParser.process_quotable_parameter_field(str(a)) for a in array]
param['repr'] = str(param['value'])
param['value'] = array
else:
if re.search(r'\s', str(param['value'])) and not str(param['value']).startswith("\""):
param['repr'] = '\"' + str(param['value']) + '\"'
else:
param['repr'] = str(param['value'])
self._check_parameter_default_value(param_to_string)
param['value'] = param_to_string
params[name] = param
return params
def _check_parameter_default_value(self, value):
if isinstance(value, dict):
self.neptune_config_validator.validate('param_gridable_default.yaml', source_data=value)
else:
self.neptune_config_validator.validate('param_simple_default.yaml', source_data=value)
@staticmethod
def _properties_from_config(config_path, config):
raw_properties = config.get(ConfigKeys.PROPERTIES)
try:
return properties_from_config_file(raw_properties)
except KeyError:
raise InvalidJobConfigException(
job_config_path=config_path,
cause="Section `properties` has invalid structure. "
"Each property should consist of fields: 'key', 'value'.")
def get(self, obj, objtype=None):
return self._config.get(obj, objtype)
def get_config(self):
return self._config
def __getitem__(self, item):
return self._config.__getitem__(item)
def __str__(self):
return self._config.__str__()
def __contains__(self, item):
return self._config.__contains__(item)
|
from __future__ import division, print_function
import json
import os
import sys
from video_jpg_wedding import VIDAUG_BATCHES
def convert_csv_to_dict(csv_path, subset):
keys = []
key_labels = []
with open(csv_path, 'r') as fp:
lines = fp.read().splitlines()
for l in lines:
tokens = list(filter(None, l.split(',')))
src_file = tokens[0]
class_name = tokens[1].strip()
folder, file_name_and_ext = os.path.split(src_file)
name, ext = os.path.splitext(file_name_and_ext)
key = f"{folder}__{name}___{subset}_{class_name}"
keys.append(key)
if subset != 'testing':
key_labels.append(class_name)
for batch_index in range(VIDAUG_BATCHES):
key = f"{folder}_vidaug_{batch_index}__{name}___{subset}_{class_name}"
keys.append(key)
if subset != 'testing':
key_labels.append(class_name)
database = {}
for i in range(len(keys)):
key = keys[i]
database[key] = {}
database[key]['subset'] = subset
if subset != 'testing':
label = key_labels[i]
database[key]['annotations'] = {'label': label}
else:
database[key]['annotations'] = {}
return database
def load_labels(train_csv_path, val_csv_path):
class_names = set()
for path in [train_csv_path, val_csv_path]:
with open(path, 'r') as fp:
lines = fp.read().splitlines()
for l in lines:
tokens = list(filter(None, l.split(',')))
src_file = tokens[0]
class_name = tokens[1].strip()
class_names.add(class_name)
class_names = list(class_names)
class_names.sort()
return class_names
def convert_kinetics_csv_to_activitynet_json(train_csv_path, val_csv_path, test_csv_path, dst_json_path):
labels = load_labels(train_csv_path, val_csv_path)
train_database = convert_csv_to_dict(train_csv_path, 'training')
val_database = convert_csv_to_dict(val_csv_path, 'validation')
test_database = convert_csv_to_dict(test_csv_path, 'testing')
dst_data = {}
dst_data['labels'] = labels
dst_data['database'] = {}
dst_data['database'].update(train_database)
dst_data['database'].update(val_database)
dst_data['database'].update(test_database)
print(f"{len(labels)} classes.")
with open(dst_json_path, 'w') as dst_file:
json.dump(dst_data, dst_file)
if __name__ == "__main__":
train_csv_path = sys.argv[1]
val_csv_path = sys.argv[2]
test_csv_path = sys.argv[3]
dst_json_path = sys.argv[4]
convert_kinetics_csv_to_activitynet_json(
train_csv_path, val_csv_path, test_csv_path, dst_json_path)
|
"""
This module calculates the Turbulence and Systemic Risk indicators.
"""
class Calculate:
"""
Calculates the Turbulence and Systemic Risk indicators.
"""
def __init__(self):
self.recession_start_dates = ['2001-04-01', '2008-01-01', '2020-03-01']
self.recession_end_dates = ['2001-11-01', '2009-06-01', '2020-04-01']
self.turbulence = {}
self.systemic_risk = {}
def exponential_smoother(self, raw_data, half_life):
"""
Purpose: performs exponential smoothing on "raw_data". Begins recursion
with the first data item (i.e. assumes that data in "raw_data" is listed
in chronological order).
Output: a list containing the smoothed values of "raw_data".
"raw_data": iterable, the data to be smoothed.
"half_life": float, the half-life for the smoother. The smoothing factor
(alpha) is calculated as alpha = 1 - exp(ln(0.5) / half_life)
"""
import math
raw_data = list(raw_data)
half_life = float(half_life)
smoothing_factor = 1 - math.exp(math.log(0.5) / half_life)
smoothed_values = [raw_data[0]]
for index in range(1, len(raw_data)):
previous_smooth_value = smoothed_values[-1]
new_unsmooth_value = raw_data[index]
new_smooth_value = ((smoothing_factor * new_unsmooth_value)
+ ((1 - smoothing_factor) * previous_smooth_value))
smoothed_values.append(new_smooth_value)
return(smoothed_values)
def append_recession_series(self, current_date, dataframe):
"""
Purpose: determine if the "current_date" is in a recessionary time period.
Appends the result (100 if in recession, 0 if not) to the "Recession"
series of the "dataframe".
"""
recession_value = 0
for recession_instance in range(0, len(self.recession_end_dates)):
recession_start = self.recession_start_dates[recession_instance]
recession_end = self.recession_end_dates[recession_instance]
after_start_date = current_date >= recession_start
before_end_date = current_date <= recession_end
if after_start_date and before_end_date:
recession_value = 100
break
dataframe['Recession'].append(recession_value)
def calculate_turbulence(self, returns, initial_window_size=250):
"""
Purpose: calculate the Turbulence of the asset pool.
Output: a dictionary containing the Turbulence values and their date-stamps.
"returns": dataframe, the returns of the asset pool (in reverse chronological
order)
"initial window_size": integer, the initial window size used to calculate
the covariance matrix. This window size grows as the analysis proceeds
across time.
"""
import copy
import numpy as np
from scipy.spatial import distance
window_size = copy.deepcopy(int(initial_window_size))
self.turbulence = {'Dates': [], 'Raw Turbulence': [], 'Recession': []}
chronological_returns = returns.iloc[::-1]
while window_size < len(chronological_returns):
historical_sample = chronological_returns.iloc[:window_size, 1:]
historical_sample_means = historical_sample.mean()
inverse_covariance_matrix = np.linalg.pinv(historical_sample.cov())
current_data = chronological_returns.iloc[[window_size]]
current_date = current_data['Dates'].values[0]
mahalanobis_distance = distance.mahalanobis(u=current_data.iloc[:, 1:],
v=historical_sample_means,
VI=inverse_covariance_matrix)
turbulence = mahalanobis_distance**2
self.turbulence['Raw Turbulence'].append(turbulence)
self.turbulence['Dates'].append(current_date)
self.append_recession_series(current_date=current_date,
dataframe=self.turbulence)
window_size += 1
self.turbulence['Turbulence'] = self.exponential_smoother(raw_data=self.turbulence['Raw Turbulence'],
half_life=12)
return(self.turbulence)
def gini(self, values):
"""
Purpose: calculate the Gini coefficient for a one-dimensional set of values.
Output: the Gini coefficient, as a float object.
"values": iterable, the values on which to calculate the Gini coefficient.
The data in "values" does not have to be sorted in ascending or descending order.
"""
values = list(values)
values.sort()
minimum_value = values[0]
lorenz_curve_value = minimum_value
average_input = sum(values)/len(values)
line_of_equality = [average_input]
gap_area = [line_of_equality[0] - lorenz_curve_value]
for index in range(1, len(values)):
lorenz_curve_value += values[index]
line_of_equality.append(line_of_equality[index - 1] + average_input)
gap_area.append(line_of_equality[index - 1] + average_input
- lorenz_curve_value)
return(sum(gap_area)/sum(line_of_equality))
def calculate_systemic_risk(self, returns, window_size=250):
"""
Purpose: calculate the Systemic Risk of the asset pool.
Output: a dictionary containing the Systemic Risk values and their date-stamps.
"returns": dataframe, the returns of the asset pool (in reverse chronological
order)
"window_size": integer, the window size used to calculate
the covariance matrix. This window size shifts forward as the analysis proceeds
across time.
"""
import numpy as np
import copy
self.systemic_risk = {'Dates': [], 'Systemic Risk': [], 'Recession': []}
chronological_returns = returns.iloc[::-1]
window_endpoint = copy.deepcopy(int(window_size))
while window_endpoint < len(chronological_returns):
covariance_matrix = chronological_returns.iloc[window_endpoint + 1 - window_size : window_endpoint + 1].cov()
eigenvalues = np.sort(np.linalg.eig(covariance_matrix)[0])
current_date = chronological_returns.iloc[window_endpoint, 0]
self.systemic_risk['Systemic Risk'].append(self.gini(values=eigenvalues))
self.systemic_risk['Dates'].append(current_date)
self.append_recession_series(current_date=current_date,
dataframe=self.systemic_risk)
window_endpoint += 1
return(self.systemic_risk) |
##########################################################################
#
# Copyright (c) 2007-2010, Image Engine Design Inc. All rights reserved.
#
# Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios),
# its affiliates and/or its licensors.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import os
import IECore
class SkeletonPrimitiveTests(unittest.TestCase):
def setUp(self):
mts = IECore.M44fVectorData()
mts.resize(10)
ids = IECore.IntVectorData()
ids.resize(10)
for i in range(0, 10):
ids[i] = i-1
vec = IECore.V3f(0-i, i+1, 2*i)
mts[i] = mts[i].setTranslation(vec)
self.skeletonPrimitive = IECore.SkeletonPrimitive(mts, ids, IECore.SkeletonPrimitive.Space.World)
def testDefaultInitialisation(self):
sp = IECore.SkeletonPrimitive()
self.assert_( sp.numJoints()==0 ) # check we have no joints
self.assertEqual( sp.getJointNames(), IECore.StringVectorData() )
self.assertTrue( sp.isEqualTo(sp) )
self.assertTrue( sp.isSimilarTo(sp.copy()) )
def testCopyConstractor(self):
sp = IECore.SkeletonPrimitive(self.skeletonPrimitive)
self.assertTrue( sp.isSimilarTo(self.skeletonPrimitive) )
def testMatricesInitialisation(self):
mts = IECore.M44fVectorData()
mts.resize(10)
ids = IECore.IntVectorData()
ids.resize(10)
for i in range(0, 10):
ids[i] = i-1
mts[i] = mts[i].setTranslation(IECore.V3f(0, i+10, 0))
# In World space
sp = IECore.SkeletonPrimitive(mts, ids, IECore.SkeletonPrimitive.Space.World)
assert sp.numJoints() == 10
for i in range(0, 10):
assert mts[i].translation().y == sp.getJointPose(i, IECore.SkeletonPrimitive.Space.World).value.translation().y
assert mts[i].translation().y != sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Reference).value.translation().y
if i == 0:
assert mts[i].translation().y == sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Local).value.translation().y
else:
assert mts[i].translation().y != sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Local).value.translation().y
# In Reference space
sp = IECore.SkeletonPrimitive(mts, ids, IECore.SkeletonPrimitive.Space.Reference)
assert sp.numJoints() == 10
for i in range(0, 10):
if i==0:
assert mts[i].translation().y == sp.getJointPose(i, IECore.SkeletonPrimitive.Space.World).value.translation().y
else:
assert mts[i].translation().y != sp.getJointPose(i, IECore.SkeletonPrimitive.Space.World).value.translation().y
assert mts[i].translation().y == sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Reference).value.translation().y
assert mts[i].translation().y != sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Local).value.translation().y
# In Local space
sp = IECore.SkeletonPrimitive(mts, ids, IECore.SkeletonPrimitive.Space.Local)
assert sp.numJoints() == 10
for i in range(0, 10):
if i==0:
assert mts[i].translation().y == sp.getJointPose(i, IECore.SkeletonPrimitive.Space.World).value.translation().y
else:
assert mts[i].translation().y != sp.getJointPose(i, IECore.SkeletonPrimitive.Space.World).value.translation().y
assert mts[i].translation().y != sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Reference).value.translation().y
assert mts[i].translation().y == sp.getJointPose(i, IECore.SkeletonPrimitive.Space.Local).value.translation().y
def testSaveLoad(self):
tempFile = os.tempnam()+".cob"
writer = IECore.Writer.create(self.skeletonPrimitive, tempFile)
writer.write()
reader = IECore.Reader.create(tempFile)
otherSp = reader.read()
self.assertTrue( self.skeletonPrimitive.isSimilarTo(otherSp) )
def testExtractingParentIds(self):
self.assert_( self.skeletonPrimitive.getParentId(0)==-1 ) # check the parent id of joint 0 is -1
self.assert_( len( self.skeletonPrimitive.getParentIds() )==10 ) # check we get a list of 10 ids
def testExtractingMatrices(self):
self.assert_( self.skeletonPrimitive.getJointPoses(IECore.SkeletonPrimitive.Space.World).size()==10 )
self.assert_( self.skeletonPrimitive.getJointPoses(IECore.SkeletonPrimitive.Space.Local).size()==10 )
self.assert_( self.skeletonPrimitive.getJointPoses(IECore.SkeletonPrimitive.Space.Reference).size()==10 )
testedMat = self.skeletonPrimitive.getJointPoses()[4] # default for getTransforms is world space
testerMat = IECore.M44f().setTranslation( IECore.V3f(-4, 5, 8) )
# check the matrix returned
for i in range(0, 4):
for j in range(0, 4):
self.assertEqual(testedMat[i,j], testerMat[i,j])
def testCopy(self):
s = IECore.SkeletonPrimitive()
s.setAsCopyOf(self.skeletonPrimitive)
s.isSimilarTo(self.skeletonPrimitive)
if __name__ == "__main__":
print 'Testing SkeletonPrimitive\n----------------------------------------------------------------------\n'
import sys
t = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
unittest.main(testRunner=t)
|
"""create an address table
Revision ID: b2ec3f7fb212
Revises:
Create Date: 2017-10-14 15:35:18.647291
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b2ec3f7fb212'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'address',
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('address', sa.String(50), nullable=False),
sa.Column('city', sa.String(50), nullable=False),
sa.Column('state', sa.String(20), nullable=False),
)
def downgrade():
op.drop_table('address')
|
"""Settings"""
import sys
import os
import logging
from logging.handlers import RotatingFileHandler, SMTPHandler
from flask import Flask, request
from config import BaseConfiguration
# For visibility of folder
sys.path.insert(0, os.path.join('./../algorithms'))
def register_logger(app, logs_dir):
"""Setting up logger"""
log_file_format = """
[%(levelname)s] - %(asctime)s - %(name)s - : %(message)s in %(pathname)s:[%(funcName)s]:%(lineno)d
"""
log_console_format = "[%(levelname)s]: %(message)s"
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter(log_console_format))
file_handler = RotatingFileHandler(
os.path.join(
logs_dir,
'api.log'),
maxBytes=1e6,
backupCount=10,
encoding='utf-8')
file_handler.setFormatter(logging.Formatter(log_file_format))
app.logger.addHandler(file_handler)
log_werkzeug = logging.getLogger('werkzeug')
log_werkzeug.addHandler(file_handler)
log_werkzeug.addHandler(console_handler)
def create_app(config_class=BaseConfiguration):
"""Setting up app"""
app = Flask(__name__)
app.config.from_object(config_class)
from app.api import bp as api_bp
app.register_blueprint(api_bp, url_prefix='/api')
base_dir = os.path.join(app.root_path, '..')
logs_dir = os.path.join(base_dir, 'logs')
if not os.path.exists(logs_dir):
os.mkdir(logs_dir)
register_logger(app, logs_dir)
app.logger.setLevel(logging.INFO)
app.logger.info('Krasnodar-Road-Network API startup')
return app
|
'''
Bubbles Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re
import urllib
import kodi
import log_utils # @UnusedImport
import dom_parser2
import json
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import XHR
from salts_lib.utils2 import i18n
import scraper
logger = log_utils.Logger.get_logger():
def __init__(self):
self.priority = 0
self.language = ['en']
self.domains = ['sockshare.biz']
self.base_link = 'http://sockshare.biz'
def movie(self, imdb, title, localtitle, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtitle, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if not str(url).startswith('http'):
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
if 'tvshowtitle' in data:
y = str((int(data['year']) + int(data['season'])) - 1)
url = '/watch/%s-s%02d-%s-online.html' % (cleantitle.geturl(title), int(data['season']), y)
episode = '%01d' % int(data['episode'])
else:
url = '/watch/%s-%s-online.html' % (cleantitle.geturl(title), data['year'])
year = data['year']
episode = None
url = urlparse.urljoin(self.base_link, url)
r = client.request(url, output='geturl')
if 'error.html' in r: raise Exception()
else:
try: url, episode = re.findall('(.+?)\?episode=(\d*)$', url)[0]
except: episode = None
try: episode = '%01d' % int(data['episode'])
except: pass
r = client.request(url)
h = {'User-Agent': client.agent(), 'X-Requested-With': 'XMLHttpRequest'}
ip = client.parseDOM(r, 'input', ret='value', attrs = {'name': 'phimid'})[0]
ep = episode if not episode == None else '1'
p = {'ip_film': ip, 'ip_name': ep, 'ipplugins': '1', 'ip_server': '11'}
p = urllib.urlencode(p)
u = '/ip.file/swf/plugins/ipplugins.php'
u = urlparse.urljoin(self.base_link, u)
r = client.request(u, post=p, headers=h, referer=url)
r = json.loads(r)
u = '/ip.file/swf/ipplayer/ipplayer.php'
u = urlparse.urljoin(self.base_link, u)
p = {'u': r['s'], 's': r['v'], 'w': '100%', 'h': '360', 'n':'0'}
p = urllib.urlencode(p)
r = client.request(u, post=p, headers=h, referer=url)
r = json.loads(r)['data']
u = [i['files'] for i in r if 'files' in i]
for i in u:
try: sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False})
except: pass
return sources
except:
return sources
def resolve(self, url):
return directstream.googlepass(url)
|
dirs = [{"^": 1j, ">": 1, "v": -1j, "<": -1}[c] for c in open("input_3", "r").readline().strip()]
# Part 1:
print("Part 1:", len(set(sum(dirs[:i]) for i in range(len(dirs) + 1))))
# Part 2:
visited_by_santa = set(sum(dirs[: 2 * i : 2]) for i in range(len(dirs[::2]) + 1))
visited_by_robos = set(sum(dirs[1 : 2 * i : 2]) for i in range(len(dirs[1::2]) + 1))
print("Part 2:", len(visited_by_santa | visited_by_robos))
|
from BusinessLogicLayer.cluster.master import *
class ActionXjCloud(ActionMasterGeneral):
def __init__(self, register_url='https://www.xjycloud.xyz/auth/register', silence=True):
super(ActionXjCloud, self).__init__(register_url=register_url, silence=silence, email='@qq.com', life_cycle=62,
hyper_params={'anti_slider': True})
|
bl_info = {
"name": "keQuickScale",
"author": "Kjell Emanuelsson",
"category": "Modeling",
"version": (1, 0, 3),
"blender": (2, 80, 0),
}
import bpy
from mathutils import Vector
from .ke_utils import average_vector
class VIEW3D_OT_ke_quickscale(bpy.types.Operator):
bl_idname = "view3d.ke_quickscale"
bl_label = "Quick Scale"
bl_description = "Set size and scale axis to fit. Object(s) & Edit mode (selection)"
bl_options = {'REGISTER', 'UNDO'}
user_axis : bpy.props.EnumProperty(
items=[("0", "X", "", 1),
("1", "Y", "", 2),
("2", "Z", "", 3)],
name="Axis",
default=1)
each : bpy.props.EnumProperty(
items=[("SEPARATE", "Separate", "Each object uses its own Local Transform", 1),
("COMBINED", "Combined", "Using combined BBox in Global Transform (via Edit Mesh)", 2)],
name="Process Objects",
default=1)
unit_size : bpy.props.BoolProperty(name="Unit Size", default=False,
description="Unit Size: Scales all axis as needed "
"for selected axis to be desired size")
user_value : bpy.props.FloatProperty(name="Size", default=1)
def draw(self, context):
layout = self.layout
layout.use_property_split = True
layout.use_property_decorate = False
row = layout.row(align=True)
row.prop(self, "each", expand=True)
row = layout.row(align=True)
row.prop(self, "user_axis", expand=True)
row = layout.row(align=True)
row.prop(self, "user_value")
row = layout.row(align=True)
row.prop(self, "unit_size", toggle=True)
layout.separator()
@classmethod
def poll(cls, context):
return (context.object is not None and
context.object.type == 'MESH')
def scale_object(self, obj, dimension, user_axis, other_axis):
obj_val = self.user_value / (dimension[user_axis] / obj.scale[user_axis])
factor = obj_val / obj.scale[user_axis]
new_scale = [1, 1, 1]
if self.unit_size:
new_scale[user_axis] = obj_val
new_scale[other_axis[0]] = factor * obj.scale[other_axis[0]]
new_scale[other_axis[1]] = factor * obj.scale[other_axis[1]]
obj.scale = (new_scale[0], new_scale[1], new_scale[2])
else:
obj.scale[user_axis] = obj_val
def scale_mesh(self, new_dimensions, c_o):
bpy.ops.transform.resize(value=new_dimensions, orient_type='GLOBAL', orient_matrix_type='GLOBAL',
center_override=c_o,
constraint_axis=(False, False, False), mirror=False, use_proportional_edit=False,
use_proportional_connected=False, use_proportional_projected=False, snap=False,
gpencil_strokes=False, texture_space=False, remove_on_cancel=False,
release_confirm=False)
def calc_bbox(self, vpos):
x, y, z = [], [], []
for i in vpos:
x.append(i[0])
y.append(i[1])
z.append(i[2])
x, y, z = sorted(x), sorted(y), sorted(z)
return (x[-1] - x[0], y[-1] - y[0], z[-1] - z[0]), (x, y, z)
def execute(self, context):
user_axis = int(self.user_axis)
other_axis = [0 ,1 ,2]
other_axis.pop(user_axis)
# Check selections
sel_obj = [obj for obj in context.selected_objects if obj.type == "MESH"]
if not sel_obj:
sel_obj = [context.object]
sel_obj.select_set(True)
if len(sel_obj) == 1:
self.each = "SEPARATE"
tot_bb = []
for obj in sel_obj:
if self.each == "SEPARATE":
vpos = []
if context.mode == "OBJECT":
dimension = obj.dimensions
else:
obj.update_from_editmode()
sel_verts = [v.index for v in obj.data.vertices if v.select]
if not sel_verts:
break
vpos.extend([obj.matrix_world @ obj.data.vertices[v].co for v in sel_verts])
dimension, bb = self.calc_bbox(vpos)
if dimension[user_axis] == 0:
self.report({'INFO'}, "Cancelled: Zero dimension on selected axis")
return {'CANCELLED'}
if context.mode == "OBJECT":
self.scale_object(obj, dimension, user_axis, other_axis)
else:
edit_val = self.user_value / dimension[user_axis]
new_dimensions = [edit_val, edit_val, edit_val]
c = average_vector(vpos)
c_o = (c[0], c[1], bb[1][-1])
if not self.unit_size:
new_dimensions[other_axis[0]] = 1
new_dimensions[other_axis[1]] = 1
self.scale_mesh(new_dimensions, c_o)
else:
obj.update_from_editmode()
if context.mode == "OBJECT":
bb = [obj.matrix_world @ Vector(co) for co in obj.bound_box]
tot_bb.extend(bb)
else:
sel_verts = [v.index for v in obj.data.vertices if v.select]
if sel_verts:
tot_bb.extend([obj.matrix_world @ obj.data.vertices[v].co for v in sel_verts])
if self.each == "COMBINED":
dimension, bb = self.calc_bbox(tot_bb)
edit_val = self.user_value / dimension[user_axis]
new_dimensions = [edit_val, edit_val, edit_val]
c_o = average_vector(tot_bb)
if not self.unit_size:
new_dimensions[other_axis[0]] = 1
new_dimensions[other_axis[1]] = 1
# Meh! Just gonna mashem up in edit mode ;P
if context.mode == "OBJECT":
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action="SELECT")
self.scale_mesh(new_dimensions, c_o)
bpy.ops.object.editmode_toggle()
else:
self.scale_mesh(new_dimensions, c_o)
# Classic - To avoid undo issues n stuff
bpy.ops.object.editmode_toggle()
bpy.ops.object.editmode_toggle()
# Update stored values ( if tweaked in redo)
bpy.context.scene.kekit.qs_user_value = self.user_value
bpy.context.scene.kekit.qs_unit_size = self.unit_size
return {'FINISHED'}
# -------------------------------------------------------------------------------------------------
# Class Registration & Unregistration
# -------------------------------------------------------------------------------------------------
def register():
bpy.utils.register_class(VIEW3D_OT_ke_quickscale)
def unregister():
bpy.utils.unregister_class(VIEW3D_OT_ke_quickscale)
if __name__ == "__main__":
register()
|
"""
setup.py
written in Python3
author: C. Lockhart <chris@lockhartlab.org>
"""
# import setuptools # noqa
import numpy as np
from numpy.distutils.core import Extension, setup
from numpy.distutils.misc_util import Configuration
import os.path
# Read version
with open('version.yml', 'r') as f:
data = f.read().splitlines()
version_dict = dict([element.split(': ') for element in data])
# Convert the version_data to a string
version = '.'.join([str(version_dict[key]) for key in ['major', 'minor']])
if version_dict['micro'] != 0:
version += '.' + version_dict['micro']
print(version)
# Read in requirements.txt
with open('requirements.txt', 'r') as buffer:
requirements = buffer.read().splitlines()
# Long description
with open('README.rst', 'r') as buffer:
long_description = buffer.read()
# First make sure numpy is installed
# _setup(install_requires=['numpy'])
# Create configuration
def configuration(parent_package='', top_path=None):
config = Configuration('pathogen', parent_package, top_path)
# config.add_data_dir(('_include', 'pathetic/_include')) # not sure why this wasn't working with manifest.in
return config
# Then, install molecular
setup(
# name='pathogen',
version=version,
author='C. Lockhart',
author_email='chris@lockhartlab.org',
description='Helper functions for paths',
long_description=long_description,
url="https://www.lockhartlab.org",
packages=[
'pathogen',
],
install_requires=requirements,
# include_package_data=True,
configuration=configuration,
zip_safe=True,
)
|
# -*- coding: utf-8 -*-
import unittest
from pythonrv import rv
class TestEventNext(unittest.TestCase):
def test_event_next_called_should_be(self):
class M(object):
def m(self):
pass
@rv.monitor(m=M.m)
def spec(event):
event.next_called_should_be(event.fn.m)
a = M()
a.m()
a.m()
a.m()
def test_event_next_called_should_be_more(self):
class M(object):
def m(self):
pass
def n(self):
pass
@rv.monitor(m=M.m, n=M.n)
def spec(event):
event.next_called_should_be(event.fn.m)
a = M()
a.n()
a.m()
a.m()
with self.assertRaises(AssertionError) as e:
a.n()
self.assertEquals(e.exception.message, "Next function called should have been m")
def test_event_general_next(self):
class M(object):
def m(self):
pass
def n(self):
pass
@rv.monitor(m=M.m, n=M.n)
def spec(event):
event.next(after)
def after(event):
raise AssertionError("turtle power")
a = M()
a.m()
with self.assertRaises(AssertionError) as e:
a.m()
self.assertEquals(e.exception.message, "turtle power")
with self.assertRaises(AssertionError) as e:
a.m()
self.assertEquals(e.exception.message, "turtle power")
with self.assertRaises(AssertionError) as e:
a.m()
self.assertEquals(e.exception.message, "turtle power")
class TestMonitorNext(unittest.TestCase):
def test_monitor_next_simple(self):
class M(object):
def m(self):
pass
def raise_error(event):
raise ValueError("buffy")
@rv.monitor(m=M.m)
def spec(event):
event.fn.m.next(raise_error)
a = M()
a.m()
with self.assertRaises(ValueError) as e:
a.m()
self.assertEquals(e.exception.message, "buffy")
def test_monitor_next_multiple(self):
class M(object):
def m(self):
pass
def n(self):
pass
def raise_error(event):
raise ValueError("buffy")
@rv.monitor(m=M.m)
def spec(event):
event.fn.m.next(raise_error)
a = M()
a.m()
a.n()
a.n()
a.n()
with self.assertRaises(ValueError) as e:
a.m()
self.assertEquals(e.exception.message, "buffy")
def test_monitor_next_args(self):
class M(object):
def m(self):
pass
def raise_error(event, x, y, **kwargs):
raise ValueError(x + y + kwargs.get('z', -1))
@rv.monitor(m=M.m)
def spec(event):
event.fn.m.next(raise_error, (1, 2), dict(z=15))
a = M()
a.m()
with self.assertRaises(ValueError) as e:
a.m()
self.assertEquals(e.exception.message, 18)
|
###############################################################################
#
# Copyright 2014 by Shoobx, Inc.
#
###############################################################################
import unittest
import tempfile
import os
import shutil
import textwrap
from migrant import repository
class RepositoryTest(unittest.TestCase):
def setUp(self):
self.dir = tempfile.mkdtemp("migrant")
self.slistfname = os.path.join(self.dir, "scripts.lst")
def tearDown(self):
shutil.rmtree(self.dir)
def test_create_list(self):
repo = repository.DirectoryRepository(self.dir)
repo.init()
newrev = repo.new_script("Hello, World")
revids = repo.list_script_ids()
self.assertEqual(revids, [newrev.split("_")[0]])
def test_is_valid_scriptname(self):
repo = repository.DirectoryRepository(self.dir)
self.assertTrue(repo.is_valid_scriptname("xxxx_hello.py"))
self.assertFalse(repo.is_valid_scriptname("hello.py"))
self.assertFalse(repo.is_valid_scriptname("# Some Comment"))
self.assertFalse(repo.is_valid_scriptname("# Some_Comment"))
def test_weird_list(self):
repo = repository.DirectoryRepository(self.dir)
repo.init()
SCRIPTSLST = textwrap.dedent(
"""
# This is some Comment
a24bc_first.py
weird line
<<<< HEAD
d724a_second.py
====
1babe_third.py
>>>> CONFLICT MARKER
"""
).lstrip()
with open(self.slistfname, "w") as lf:
lf.write(SCRIPTSLST)
revids = repo.list_script_ids()
self.assertEqual(revids, ["a24bc", "d724a", "1babe"])
|
"""APIs from ml.vision.tranforms and ml.audio.transforms
"""
from .api import * |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Atari."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
import tensorflow as tf
ATARI_GAMES = collections.OrderedDict([
# ('BeamRider-v0', 'BeamRider-v0'),
# ('Breakout-v0', 'Breakout-v0'),
('Boxing-v0', 'Boxing-v0'),
# ('Pong-v0', 'Pong-v0'),
# ('Qbert-v0', 'Qbert-v0'),
# ('Seaquest-v0', 'Seaquest-v0'),
# ('SpaceInvaders-v0', 'SpaceInvaders-v0'),
])
HUMAN_SCORES_ATARI = {
# 'BeamRider-v0': 16926.5,
# 'Breakout-v0': 30.5,
'Boxing-v0': 6.0,
# 'Pong-v0': 14.6,
# 'Qbert-v0': 13455.0,
# 'Seaquest-v0': 42054.7,
# 'SpaceInvaders-v0': 1668.7,
}
RANDOM_SCORES_ATARI = {
# 'BeamRider-v0': 0.5,
# 'Breakout-v0': 1.0,
'Boxing-v0': 0.5,
# 'Pong-v0': 1.2,
# 'Qbert-v0': 232.0,
# 'Seaquest-v0': 101.0,
# 'SpaceInvaders-v0': 42.0,
}
ALL_LEVELS_ATARI = frozenset([
'BeamRider-v0',
'Breakout-v0',
'Pong-v0',
'Qbert-v0',
'Seaquest-v0',
'SpaceInvaders-v0',
])
def _transform_level_returns(level_returns):
"""Converts training level names to test level names."""
new_level_returns = {}
for level_name, returns in level_returns.iteritems():
new_level_returns[ATARI_GAMES.get(level_name, level_name)] = returns
test_set = set(ATARI_GAMES.values())
diff = test_set - set(new_level_returns.keys())
if diff:
raise ValueError('Missing levels: %s' % list(diff))
for level_name, returns in new_level_returns.iteritems():
if level_name in test_set:
if not returns:
raise ValueError('Missing returns for level: \'%s\': ' % level_name)
else:
tf.logging.info('Skipping level %s for calculation.', level_name)
# print("(dmlab30.py) level returns: ", new_level_returns)
return new_level_returns
def compute_human_normalized_score(level_returns, per_level_cap):
"""Computes human normalized score.
Levels that have different training and test versions, will use the returns
for the training level to calculate the score. E.g.
'env_id_train' will be used for
'env_id_test'.
Args:
level_returns: A dictionary from level to list of episode returns.
per_level_cap: A percentage cap (e.g. 100.) on the per level human
normalized score. If None, no cap is applied.
Returns:
A float with the human normalized score in percentage.
Raises:
ValueError: If a level is missing from `level_returns` or has no returns.
"""
new_level_returns = _transform_level_returns(level_returns)
def human_normalized_score(level_name, returns):
score = np.mean(returns)
# Checks if we are in atari
if "v0" in level_name:
human = HUMAN_SCORES_ATARI[level_name]
# print("(dmlab30.py) human score: ", human)
random = RANDOM_SCORES_ATARI[level_name]
# print("(dmlab30.py) random score: ", random)
human_normalized_score = (score - random) / (human - random) * 100
# print("(dmlab30.py) normalized score: ", human_normalized_score)
if per_level_cap is not None:
human_normalized_score = min(human_normalized_score, per_level_cap)
# print("(dmlab30.py) normalized score per level cap: ", human_normalized_score)
return human_normalized_score
new_score = [human_normalized_score(k, v) for k, v in new_level_returns.items()]
# print("(dmlab30.py) new score: ", new_score)
return np.mean(new_score)
def compute_baseline_loss(advantages):
# Loss for the baseline, summed over the time dimension.
# Multiply by 0.5 to match the standard update rule:
# d(loss) / d(baseline) = advantage
return .5 * tf.reduce_sum(tf.square(advantages))
def compute_entropy_loss(logits):
policy = tf.nn.softmax(logits)
log_policy = tf.nn.log_softmax(logits)
entropy_per_timestep = tf.reduce_sum(-policy * log_policy, axis=-1)
return -tf.reduce_sum(entropy_per_timestep)
def compute_policy_gradient_loss(logits, actions, advantages):
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=actions, logits=logits)
advantages = tf.stop_gradient(advantages)
policy_gradient_loss_per_timestep = cross_entropy * advantages
return tf.reduce_sum(policy_gradient_loss_per_timestep)
|
#coding: utf-8
from unittest import TestCase
from django.core.cache import cache
from cache_utils.decorators import cached
from cache_utils.utils import sanitize_memcached_key, _func_type, _func_info
def foo(a,b):
pass
class Foo(object):
def foo(self, a, b):
pass
@classmethod
def bar(cls, x):
pass
class FuncTypeTest(TestCase):
def assertFuncType(self, func, tp):
self.assertEqual(_func_type(func), tp)
def test_func(self):
self.assertFuncType(foo, 'function')
def test_method(self):
self.assertFuncType(Foo.foo, 'method')
def test_classmethod(self):
self.assertFuncType(Foo.bar, 'classmethod')
class FuncInfoTest(TestCase):
def assertFuncInfo(self, func, args_in, name, args_out):
info = _func_info(func, args_in)
self.assertEqual(info[0], name)
self.assertEqual(info[1], args_out)
def test_func(self):
self.assertFuncInfo(foo, [1,2], 'cache_utils.tests.foo', [1,2])
def test_method(self):
foo_obj = Foo()
self.assertFuncInfo(Foo.foo, [foo_obj, 1, 2],
'cache_utils.tests.Foo.foo', [1,2])
def test_classmethod(self):
self.assertFuncInfo(Foo.bar, [Foo, 1],
'cache_utils.tests.Foo.bar', [1])
class SanitizeTest(TestCase):
def test_sanitize_keys(self):
key = "12345678901234567890123456789012345678901234567890"
self.assertTrue(len(key) >= 40)
key = sanitize_memcached_key(key, 40)
self.assertTrue(len(key) <= 40)
class ClearMemcachedTest(TestCase):
def tearDown(self):
cache._cache.flush_all()
def setUp(self):
cache._cache.flush_all()
class InvalidationTest(ClearMemcachedTest):
def test_group_invalidation(self):
cache.set('vasia', 'foo', 60, group='names')
cache.set('petya', 'bar', 60, group='names')
cache.set('red', 'good', 60, group='colors')
self.assertEqual(cache.get('vasia', group='names'), 'foo')
self.assertEqual(cache.get('petya', group='names'), 'bar')
self.assertEqual(cache.get('red', group='colors'), 'good')
cache.invalidate_group('names')
self.assertEqual(cache.get('petya', group='names'), None)
self.assertEqual(cache.get('vasia', group='names'), None)
self.assertEqual(cache.get('red', group='colors'), 'good')
cache.set('vasia', 'foo', 60, group='names')
self.assertEqual(cache.get('vasia', group='names'), 'foo')
def test_func_invalidation(self):
self.call_count = 0
@cached(60)
def my_func(a, b):
self.call_count += 1
return self.call_count
self.assertEqual(my_func(1,2), 1)
self.assertEqual(my_func(1,2), 1)
self.assertEqual(my_func(3,2), 2)
self.assertEqual(my_func(3,2), 2)
my_func.invalidate(3,2)
self.assertEqual(my_func(1,2), 1)
self.assertEqual(my_func(3,2), 3)
self.assertEqual(my_func(3,2), 3)
def test_method_invalidation(self):
self.call_count = 0
this = self
class Foo(object):
@cached(60)
def bar(self, x):
this.call_count += 1
return this.call_count
foo = Foo()
self.assertEqual(foo.bar(1), 1)
self.assertEqual(foo.bar(1), 1)
Foo.bar.invalidate(1)
self.assertEqual(foo.bar(1), 2)
def test_invalidate_nonexisting(self):
@cached(60)
def foo(x):
return 1
foo.invalidate(5) # this shouldn't raise exception
class DecoratorTest(ClearMemcachedTest):
def test_decorator(self):
self._x = 0
@cached(60, group='test-group')
def my_func(params=""):
self._x = self._x + 1
return "%d%s" % (self._x, params)
self.assertEqual(my_func(), "1")
self.assertEqual(my_func(), "1")
self.assertEqual(my_func("x"), "2x")
self.assertEqual(my_func("x"), "2x")
self.assertEqual(my_func("Василий"), "3Василий")
self.assertEqual(my_func("Василий"), "3Василий")
self.assertEqual(my_func("й"*240), "4"+"й"*240)
self.assertEqual(my_func("й"*240), "4"+"й"*240)
self.assertEqual(my_func("Ы"*500), "5"+"Ы"*500)
self.assertEqual(my_func("Ы"*500), "5"+"Ы"*500)
|
import numpy as np
import cv2
#image = cv2.imread("_data/blue_marker.jpg")
def detect_color(image, color):
# convert image from BGR to HSV (hue-saturation value)
hsv_image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
# set ranges for red
red_lower = np.array([136, 87, 111], np.uint8)
red_upper = np.array([180, 255, 255], np.uint8)
red_mask = cv2.inRange(hsv_image, red_lower, red_upper)
# green
green_lower = np.array([25, 52, 72], np.uint8)
green_upper = np.array([102, 255, 255], np.uint8)
green_mask = cv2.inRange(hsv_image, green_lower, green_upper)
# blue
blue_lower = np.array([94, 80, 2], np.uint8)
blue_upper = np.array([120, 255, 255], np.uint8)
blue_mask = cv2.inRange(hsv_image, blue_lower, blue_upper)
# Morphological transform, dilatio n for each color and bitwise_and
# operator between image and mask detects only a particular color
kernal = np.ones((5, 5), "uint8")
if (color == "red"):
red_mask = cv2.dilate(red_mask, kernal)
res_red = cv2.bitwise_and(image, image, mask=red_mask)
# Creating contour to track red color
contours, hierarchy = cv2.findContours(red_mask,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE)
for pic, contour in enumerate(contours):
area = cv2.contourArea(contour)
if(area > 300):
# redundant code for display (not used here)
x, y, w, h = cv2.boundingRect(contour)
imageFrame = cv2.rectangle(image, (x, y),
(x + w, y + h),
(0, 0, 255), 2)
cv2.putText(image, "Red Colour", (x, y),
cv2.FONT_HERSHEY_SIMPLEX, 1.0,
(0, 0, 255))
return True
elif (color == "green"):
green_mask = cv2.dilate(green_mask, kernal)
res_green = cv2.bitwise_and(image, image, mask=green_mask)
contours, hierarchy = cv2.findContours(green_mask,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE)
for pic, contour in enumerate(contours):
area = cv2.contourArea(contour)
if (area > 300):
# redundant code for display (not used here)
x, y, w, h = cv2.boundingRect(contour)
imageFrame = cv2.rectangle(image, (x, y),
(x + w, y + h),
(0, 0, 255), 2)
cv2.putText(image, "Green Colour", (x, y),
cv2.FONT_HERSHEY_SIMPLEX, 1.0,
(0, 255, 0))
return True
elif (color == "blue"):
blue_mask = cv2.dilate(blue_mask, kernal)
res_blue = cv2.bitwise_and(image, image,
mask=blue_mask)
contours, hierarchy = cv2.findContours(blue_mask,
cv2.RETR_TREE,
cv2.CHAIN_APPROX_SIMPLE)
for pic, contour in enumerate(contours):
area = cv2.contourArea(contour)
if (area > 300):
# redundant code for display (not used here)
x, y, w, h = cv2.boundingRect(contour)
imageFrame = cv2.rectangle(image, (x, y),
(x + w, y + h),
(0, 0, 255), 2)
cv2.putText(image, "Blue Colour", (x, y),
cv2.FONT_HERSHEY_SIMPLEX, 1.0,
(255, 0, 0))
return True
#cv2.imshow("HIGH", image)
else:
print("Invalid color")
return False
|
import time, os, sys, shutil
# for math and plotting
import pandas as pd
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
#%matplotlib notebook
#%matplotlib widget
from itertools import compress # for list selection with logical
from tqdm import tqdm
from multiprocessing import Process
# ALLSO JIT STUFF
from numba import jit, njit
# and pytorch
import torch
import matplotlib
# Say, "the default sans-serif font is COMIC SANS"
matplotlib.rcParams['font.sans-serif'] = "Liberation Sans"
# Then, "ALWAYS use sans-serif fonts"
matplotlib.rcParams['font.family'] = "sans-serif"
matplotlib.rc('font', family='sans-serif')
matplotlib.rc('text', usetex='false')
matplotlib.rcParams.update({'font.size': 13})
from palettable.cmocean.sequential import Algae_6
cmpl = Algae_6.mpl_colors
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 10)) # outward by 10 points
spine.set_smart_bounds(True)
else:
spine.set_color('none') # don't draw spine
# turn off ticks where there is no spine
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
# no yaxis ticks
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
# no xaxis ticks
ax.xaxis.set_ticks([])
#%% A few geometry functions
# @njit
def unit_vector(v):
if np.sum(v) != 0:
v = v/np.sqrt(v[0]**2+v[1]**2+v[2]**2 )
return v
#@njit
def angle_between(v1, v2):
""" Returns the SIGNED!! angle in radians between vectors 'v1' and 'v2'::
>>> angle_between((1, 0, 0), (0, 1, 0))
1.5707963267948966
>>> angle_between((1, 0, 0), (1, 0, 0))
0.0
>>> angle_between((1, 0, 0), (-1, 0, 0))
3.141592653589793
"""
v1_u = unit_vector(v1)
v2_u = unit_vector(v2)
angle = np.arccos(np.dot(v1_u, v2_u))
cross_product = np.cross(v1_u,v2_u)
ez = np.array([0,0,1.])
# check for the sign!
if np.dot(cross_product,ez)< 0:
return -angle
else:
return angle
def click_one_mouse(positions):
import cmocean
###############
# Show a 2D plot and ask for two clicks
###############
plt.figure(figsize = (5,5))
plt.scatter(positions[:,0],positions[:,1],c=positions[:,2]/np.max(positions[:,2]),s=5,cmap=cmocean.cm.algae_r)
ax = plt.gca
plt.axes().set_aspect('equal', 'datalim')
plt.title('click center of hip, then mid, then head of mouse!')
w,h = 570,800
# plt.get_current_fig_manager().window.setGeometry(1920-w-10,60,w,h)
click_points = np.asanyarray(plt.ginput(3))
hip_click = click_points[0]
mid_click = click_points[1]
nose_click = click_points[2]
# plt.show()
###############
# Now calculate a reference direction
###############
v_click = nose_click-hip_click
# and add to the plot
def add_vec_from_point(c_mid_est,v_ref_est):
data = np.vstack((c_mid_est,c_mid_est+v_ref_est))
plt.plot(data[:,0],data[:,1],c='red')
plt.plot(data[0,0],data[0,1],c='red',marker='o')
plt.figure()
plt.scatter(positions[:,0],positions[:,1],c=positions[:,2]/np.max(positions[:,2]),s=5)
ax = plt.gca
plt.axes().set_aspect('equal', 'datalim')
add_vec_from_point(hip_click,v_click)
plt.axhline(y=0)
plt.axvline(x=0)
plt.xlabel('x [m]')
plt.ylabel('y [m]')
plt.title('estimated hip and heading direction')
w,h = 570,800
# plt.get_current_fig_manager().window.setGeometry(1920-w-10,60,w,h)
plt.show()
return hip_click,mid_click,nose_click
#%% make a handy function to click the first frame_
def good_guess(hip_click,mid_click,nose_click):
# translation vector, which moves the center of the mouse body
z_guess = 0.022 # guess the z as around 2 cm
t_body = np.append(hip_click,z_guess)
# set the scaling
s = 0.8
# guess the rotation as the body model:
# - alpha is around x axis, none, I guess
alpha = 0
# - beta is around y axis, so elevation
beta = 0
# - gamma is around z, so in the xy plane, the left-right angle of the mouse wrt. e^hat_x
# get the vector
v_click = mid_click-hip_click
# make it a 3d vector to use with the handy function
target = np.append(v_click,0)
angle_with_x = angle_between(np.array([1.,0,0]),target)
gamma = angle_with_x
# same with the head
theta = 0
v_click = nose_click-mid_click
# make it a 3d vector to use with the handy function
target = np.append(v_click,0)
angle_with_x = angle_between(np.array([1.,0,0]),target)
phi = angle_with_x - gamma # NB since phi is with respect to x', not x
psi = 0
return alpha,beta,gamma,s,psi,theta,phi,t_body
def click_mouse_body(positions):
hip_click,mid_click,nose_click = click_one_mouse(positions)
#convert the clicks to a guess
alpha,beta,gamma,s,psi,theta,phi,t_body = good_guess(hip_click,mid_click,nose_click)
# and save the best guess
x0_guess = np.hstack((alpha,beta,gamma,s,psi,theta,phi,t_body))
return x0_guess,hip_click,mid_click,nose_click
def initialize_x0(positions,click_start=True):
if click_start:
# open a start frame, plot, and accept clicks
x0_mouse0,hip_click0,mid_click0,nose_click0 = click_mouse_body(positions)
x0_mouse1,hip_click1,mid_click1,nose_click1 = click_mouse_body(positions)
x0_start = np.hstack((x0_mouse0,x0_mouse1))
click_holder = [hip_click0,mid_click0,nose_click0,hip_click1,mid_click1,nose_click1]
else:
# the starting guess, in numpy on cpu
x0_start = np.array([ 0. , 2.90604767, 0.5 , 0. , -0.18267935,
-0.00996607, -0.00510009, 0.022 , 0. , 2.53930531,
0.5 , 0. , 0.28053679, 0.09732097, 0.05387669,
0.022 ])
return x0_start,click_holder
|
import arabic_reshaper
import math
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
import pickle
import seaborn as sns
import sys
import warnings
from bidi.algorithm import get_display
from matplotlib.backends import backend_gtk3
from matplotlib.patches import Rectangle
from iran_stock import get_iran_stock_network
from settings import OUTPUT_DIR
warnings.filterwarnings('ignore', module=backend_gtk3.__name__)
sns.set()
IRAN_STOCK_NETWORK = get_iran_stock_network()
# Algorithm Settings
SINDY_ITERATIONS = 10
MAX_PAST_DAYS = 25
MAX_FUTURE_DAYS = 10
MAX_POWER = 2
LAMBDA_RANGE = [10 ** -14, 2 * 10 ** -2] # empirical
LAMBDA_STEP = 2
# Calculated Settings
CANDIDATE_LAMBDAS = [
LAMBDA_STEP ** i for i in range(
int(math.log(abs(LAMBDA_RANGE[0])) / math.log(LAMBDA_STEP)),
int(math.log(abs(LAMBDA_RANGE[1])) / math.log(LAMBDA_STEP))
)
]
def _get_theta(x):
time_frames = x.shape[0]
column_list = [np.ones(time_frames)]
for i in range(x.shape[1]):
x_i = x[:time_frames, i]
for power in range(1, MAX_POWER + 1):
column_list.append(x_i ** power)
for j in range(x.shape[1]):
if j > i:
x_j = x[:time_frames, j]
for first_power in range(1, MAX_POWER + 1):
for second_power in range(1, MAX_POWER + 1):
column_list.append((x_i ** first_power) * (x_j ** second_power))
theta = np.column_stack(column_list)
return theta
def _sindy(x_dot, theta, candidate_lambda):
xi = np.zeros((x_dot.shape[1], theta.shape[1]))
for i in range(x_dot.shape[1]):
ith_derivative = x_dot[:, i]
ith_xi = np.linalg.lstsq(theta, ith_derivative, rcond=None)[0]
for j in range(SINDY_ITERATIONS):
small_indices = np.flatnonzero(np.absolute(ith_xi) < candidate_lambda)
big_indices = np.flatnonzero(np.absolute(ith_xi) >= candidate_lambda)
ith_xi[small_indices] = 0
ith_xi[big_indices] = np.linalg.lstsq(theta[:, big_indices], ith_derivative, rcond=None)[0]
xi[i] = ith_xi
return xi
def _optimum_sindy(x_dot, theta):
least_cost = sys.maxsize
best_xi = None
for candidate_lambda in CANDIDATE_LAMBDAS:
xi = _sindy(x_dot, theta, candidate_lambda)
complexity = np.count_nonzero(xi)
x_dot_hat = np.matmul(theta, xi.T)
mse = np.square(x_dot - x_dot_hat).mean()
if complexity: # zero would mean no statements
cost = mse * complexity
if cost < least_cost:
least_cost = cost
best_xi = xi
return best_xi
def _least_squares(x_dot, theta):
return np.linalg.lstsq(theta, x_dot, rcond=None)[0].T
def _get_x_dot(x):
x_dot = (x[1:] - x[:len(x) - 1])
return x_dot
def _get_success_rates():
x = IRAN_STOCK_NETWORK.x
x_dot = _get_x_dot(x)
theta = _get_theta(x)
success_rates = np.zeros((MAX_PAST_DAYS + 1, MAX_FUTURE_DAYS + 1, x.shape[1])) # includes 0
for past in range(1, MAX_PAST_DAYS + 1):
for future in range(1, MAX_FUTURE_DAYS + 1):
# progress bar
sys.stdout.write('\r[%d/%d]' % ((past - 1) * MAX_FUTURE_DAYS + future, MAX_PAST_DAYS * MAX_FUTURE_DAYS))
sys.stdout.flush()
cached_path = os.path.join(OUTPUT_DIR, '%d_%d_success_rate.p' % (past, future))
if os.path.exists(cached_path):
with open(cached_path, 'rb') as cached_file:
success_rate = pickle.load(cached_file)
else:
success = np.zeros(x.shape[1])
total_predictions = x.shape[0] - future - past
for today in range(past, x.shape[0] - future):
# progress bar
sys.stdout.write('\r[%d/%d]' % (today - past + 1, total_predictions))
sys.stdout.flush()
past_x_dot = x_dot[today - past:today]
past_theta = theta[today - past:today]
# xi = _optimum_sindy(past_x_dot, past_theta) # TODO uncomment
xi = _least_squares(past_x_dot, past_theta) # TODO remove
future_x = np.copy(x[today:today + 1]) # deep copy, because we will modify it
future_x_dot = None
for _ in range(future):
future_theta = _get_theta(future_x)
future_x_dot = np.matmul(future_theta, xi.T)
future_x += future_x_dot
success += (np.sign(future_x_dot[0]) == np.sign(x_dot[today + future - 1]))
print() # newline
success_rate = success / total_predictions
with open(cached_path, 'wb') as cached_file:
pickle.dump(success_rate, cached_file)
success_rates[past, future] = success_rate
print() # newline
return success_rates
def _create_heat_maps():
success_rates = _get_success_rates() * 100 # convert to percent
for node_id in range(success_rates.shape[2]):
success_matrix = success_rates[1:, 1:, node_id]
hot_spot = None
max_success = 0
for i in range(1, success_matrix.shape[0] - 1):
for j in range(1, success_matrix.shape[1] - 1):
if np.all(success_matrix[i-1:i+2, j-1:j+2] > 50) and success_matrix[i, j] > max_success:
hot_spot = (j, i)
max_success = success_matrix[i, j]
data_set = pd.DataFrame(
success_matrix,
columns=list(range(1, success_rates.shape[1])),
index=list(range(1, success_rates.shape[0]))
)
plt.subplots(figsize=(10, 10))
ax = sns.heatmap(data_set, annot=True, fmt=".1f", vmin=40, vmax=60, center=50, linewidths=3)
if hot_spot:
ax.add_patch(Rectangle(hot_spot, 1, 1, fill=False, edgecolor='blue', lw=5))
node_instrument_id, node_name = IRAN_STOCK_NETWORK.node_labels[node_id].split('_')
reshaped_name = arabic_reshaper.reshape(node_name)
name_display = get_display(reshaped_name)
ax.set(xlabel='Future Days', ylabel='Past Days', title=name_display)
plt.savefig(os.path.join(OUTPUT_DIR, 'node_%d_%s_heat_map.png' % (node_id, node_instrument_id)))
plt.close('all')
def run():
_create_heat_maps()
if __name__ == '__main__':
run()
|
a=int(input('Enter the number:\n'))
count=0
if a==0:
count +=1
else:
while a>0:
a *= 0.1
a = int(a)
count += 1
print(count)
|
from django.contrib import admin
from article.models import Article, ArticleHeading, ArticleFeedback, ArticleEdit, ArticlePost
# Register your models here.
class ChoiceInline(admin.StackedInline):
model = ArticleHeading
extra = 1
class ArticleAdmin(admin.ModelAdmin):
fieldsets = [
(None, {'fields': ['views', 'searchtags', 'publish']}),
('Article Detail', {'fields': ['title', 'author', 'slug', 'content']}),
('Date Information', {'fields': ['timeStamp']}),
('Image', {'fields': ['image_title', 'image']}),
]
list_display = ('title', 'author', 'timeStamp', 'views', 'publish')
list_filter = ['timeStamp', 'publish', 'views', 'author']
search_fields = ['title']
inlines = [ChoiceInline]
admin.site.register(Article, ArticleAdmin)
class ArticleFeedbackAdmin(admin.ModelAdmin):
list_display = ('article', 'user', 'timeStamp')
list_filter = ['timeStamp', 'article', 'user']
admin.site.register(ArticleFeedback, ArticleFeedbackAdmin)
class ArticleEditAdmin(admin.ModelAdmin):
list_display = ('article', 'user', 'timeStamp')
list_filter = ['timeStamp', 'article', 'user']
admin.site.register(ArticleEdit, ArticleEditAdmin)
class ArticlePostAdmin(admin.ModelAdmin):
list_display = ('title', 'user', 'timeStamp')
list_filter = ['timeStamp', 'user']
admin.site.register(ArticlePost, ArticlePostAdmin) |
from Mongo import *
from Mongo.Gaming import *
from Mongo.Gaming.Imports import *
|
from flask import Flask, request
import bs4 as bs
import requests
import json
import os
import datetime
import time
import pytz
import threading
app = Flask(__name__)
debug_print_on = False
######################### Global Variables #########################
f = open('./data.json', "r")
db = json.loads(f.read())
f.close()
users = db["users"]
botToken = db["config"]["botToken"]
extrasPhotoId = db["config"]["extrasPhotoId"]
botUsername = db["config"]["botUsername"]
appUrl = db["config"]["appUrl"]
debugId = db["config"]["debugId"]
secretSauce = db["config"]["secretSauce"]
messUrl = db["config"]["messUrl"]
types = {4: "Current Menu", 1: "Breakfast", 2: "Lunch", 3: "Dinner", 5: "Full Menu"}
BLDString = {0:{1:None, 2:None, 3:None, 4:None, 5:None}, 1:{1:None, 2:None, 3:None, 4:None, 5:None}}
inlineResults = {0:[], 1:[]}
replyMarkup = '{"keyboard":[["/dh1_menu","/dh2_menu"],["/dh1_extras","/dh2_extras"],["/dh1_notifs","/dh2_notifs"],["/both_notifs","/deregister"],["/refresh", "/help"]]}'
helpString = """UPDATE: The notifications have been disabled due to vacations. Remind @vishaaaal to turn them back on if you want! \n\n\nHi. This bot will send you notifications with Menu of SNU Mess of your choice - three times a day: 7:30AM, 11:30AM, 7:30PM.\n\n\nYou can also interact with it here or use it inline in any chat/groups by typing "@snumessbot".\n\n\n/dh1\_menu - Get today's DH1 Menu\n/dh2\_menu - Get today's DH2 Menu\n/dh1\_extras - DH1's Ala-Carte, Evening, Drinks menu.\n/dh2\_extras - DH2's Rollu, Evening, Drinks menu.\n/dh1\_notifs - Daily notifications for DH1\n/dh2\_notifs - Daily notifications for DH2\n/both\_notifs - Daily notifications for BOTH\n/deregister - NO daily notifications\n/refresh - Update menu from SNU website\n/help - Display this help menu\n\nGithub repo: https://github.com/FlameFractal/SNU-Mess-Menu-Notifs/\n\n\nTo report a bug, suggest improvements, ask anything - msg @vishaaaal."""
######################### Some important functions #########################
def debug_print(debug_message):
if debug_print_on==True:
print(str(debug_message))
def update_db():
try:
f = open('./data.json', "w")
db["users"] = users
print(json.dumps(db))
f.write(json.dumps(db)) # update users database
f.close()
except:
debug_print('error updating db')
def sendMessage(user_id, msg):
users[user_id]["last_botReply"] = (requests.get('https://api.telegram.org/bot'+botToken+'/sendMessage?parse_mode=Markdown&chat_id='+str(user_id)+'&text='+msg+'&reply_markup='+replyMarkup+'&disable_web_page_preview=TRUE').text).replace('"',"'")
update_db()
return users[user_id]["last_botReply"]
def sendPhoto(user_id, photo, caption=''):
users[user_id]["last_botReply"] = (requests.get('https://api.telegram.org/bot'+botToken+'/sendPhoto?chat_id='+str(user_id)+'&photo='+str(photo)+'&caption='+str(caption)+'&replyMarkup='+replyMarkup).text).replace('"',"'")
update_db()
return users[user_id]["last_botReply"]
def answerInlineQuery(user_id, query_id, mess):
users[user_id]["last_botReply"] = (requests.get('https://api.telegram.org/bot'+botToken+'/answerInlineQuery?inline_query_id='+str(query_id)+'&switch_pm_text=Slow net? Try inside&switch_pm_parameter=help&results='+json.dumps(inlineResults[mess])).text).replace('"',"'")
update_db()
return users[user_id]["last_botReply"]
def getDishes(menuItems, mess_choice, t): # here type can only be 1,2,3 .... handle 4,5 seperately
s = "*DH"+str(mess_choice+1)+" "+types[t]+"*\n----------------\n"
for dish in menuItems[t].find_all('p'):
s = s+(dish.text).replace('"','').title().strip()+"\n" # why does dh2 menu always have "toast !!! somebody forgot an extra quote, remove it and other artefacts from dish names
return s
def fetchMenuItems():
try:
global inlineResults
global BLDString
time = datetime.datetime.now(pytz.timezone('Asia/Kolkata'))
datestamp = "*"+time.strftime("%A")+", "+time.strftime("%d")+" "+time.strftime("%B")+" "+str(time.year)+"*\n\n"
for mess_choice in (0,1): # construct strings for all 10 types of menus
try:
menuItems= ((bs.BeautifulSoup(requests.get(messUrl, timeout=1).text,'lxml')).find_all(id='dh2MenuItems'))[mess_choice].find_all('td')
if('No Menu' in menuItems[0].text.strip()):
raise requests.exceptions.RequestException("_No Menu Available!_")
for t in types:
if t==1 or t==2 or t==3:
BLDString[mess_choice][t] = datestamp + getDishes(menuItems, mess_choice, t)
if t==4: # get according to current time
if time.hour<=10: #breakfast - midnight to 10:59am #send entire menu at breakfast
BLDString[mess_choice][t] = datestamp + getDishes(menuItems, mess_choice,1)+"\n"+getDishes(menuItems, mess_choice,2)+"\n"+getDishes(menuItems, mess_choice,3)
elif 11<=time.hour<=15: #lunch - 11am to 3:59pm
BLDString[mess_choice][t] = datestamp + getDishes(menuItems, mess_choice,2)
else: # dinner - 4pm to midnight
BLDString[mess_choice][t] = datestamp + getDishes(menuItems, mess_choice,3)
if t==5:
BLDString[mess_choice][t] = datestamp + getDishes(menuItems, mess_choice,1)+"\n"+getDishes(menuItems, mess_choice,2)+"\n"+getDishes(menuItems, mess_choice,3)
except requests.exceptions.RequestException as e:
for t in types:
BLDString[mess_choice][t] = datestamp+"*DH"+str(mess_choice+1)+" "+"*\n----------------\n"+"Oops. Error. Verify at "+messUrl+", and to refresh my menu send /refresh.\n\n*ERROR:* _"+str(e)+"_\n"
# construct strings for fast inline response
counter = 0
inlineResults = {0:[], 1:[]}
for mess_choice in (0,1):
for t in types:
inlineResults[mess_choice].append({"type":"article","id":str(counter),"title":"DH"+str(mess_choice+1)+" - "+types[t],"input_message_content":{"message_text":BLDString[mess_choice][t], "parse_mode": "Markdown"}})
counter = counter+1
inlineResults[mess_choice].append({"type":"photo","id":str(counter),"title":"DH"+str(mess_choice+1)+" - Extras Menu","photo_file_id":str(extrasPhotoId[mess_choice]),"description":"DH"+str(mess_choice+1)+" - Extras Menu","caption":"DH"+str(mess_choice+1)+" - Extras Menu"})
counter = counter + 1
# debug_print(str(BLDString)+"\n\n\n"+str(inlineResults))
return "I'm up to date with SNU website now. Thanks!"
except:
return "Error fetching menu"
def sendCurrentMenuAllUsers():
fetchMenuItems()
for user_id in users:
if "mess_choice" in users[user_id] and users[user_id]["mess_choice"] >= 0: # send only if registered for notifications
if users[user_id]["mess_choice"] == 2:
sendMessage(user_id, BLDString[0][4]+BLDString[1][4])
else:
sendMessage(user_id, BLDString[users[user_id]["mess_choice"]][4])
debug_print("sent notification to "+user_id)
return('')
def att(attended, conducted, percentage='75'):
attended = int(attended)
conducted = int(conducted)
percentage = int(percentage)
if not attended>0 or not conducted>0 or not attended<conducted or not 100>=percentage>=0:
return '/att \[attended] \[conducted] \[req att % eg. 75] (optional)'
temp = conducted
temp2 = attended
# can't miss
if (attended/conducted < percentage/100):
while(temp2/temp <= percentage/100):
temp2 = temp2 + 1
temp = temp + 1
s = "You need to attend "+str(temp2-attended)+" more classes for final of %0.2f" %((temp2*100)/(temp))
return s+"% = "+str(temp2)+"/"+str(temp)
else: # can miss
while(attended/temp>=percentage/100):
temp = temp+1
s = "You can miss "+str(temp-1-conducted)+" more classes for final of %0.2f" %((attended*100)/(temp-1))
return s +"% = "+str(attended)+"/"+str(temp-1)
def webhook_handler(response):
debug_print(response)
try:
requests.get('https://api.telegram.org/bot'+botToken+'/sendMessage?parse_mode=Markdown&chat_id='+debugId+'&text='+str(response), timeout=1)
except:
debug_print('')
if("inline_query" in response):
field = "inline_query"
user_id = str(response[field]["from"]["id"]) # get the user id
if user_id == '376483850':
return 'spam blocked', 200
query_id = str(response[field]["id"]) # get the query id
query_msg = str(response[field]["query"]) # get the message
if user_id in users:
users[user_id]["last_query"] = str(response)
else:
users[user_id] = {}
users[user_id]["last_query"] = str(response)
users[user_id]["name"] = response[field]["from"]["first_name"] if "first_name" in response[field]["from"] else 'unknown' # get the first name
if query_msg != '1' and query_msg != '2': # if not typed 1 or 2, show default
query_msg = users[user_id]["mess_choice"]+1 if "mess_choice" in users[user_id] else "1" # if user hasnt entered query, show him his mess_choice menu !
return(answerInlineQuery(user_id, query_id, int(query_msg)-1))
if("message" in response):
field = "message"
elif("edited_message" in response):
field = "edited_message"
else:
return str(response)
# extract user information
user_msg = str(response[field]["text"]) if "text" in response[field] else '' # get the message
user_id = str(response[field]["from"]["id"]) if "id" in response[field]["from"] else '999' # get the id
if user_id == '376483850':
return 'spam blocked', 200
if user_id not in users:
users[user_id] = {}
users[user_id]["name"] = response[field]["from"]["first_name"] if "first_name" in response[field]["from"] else 'unknown' # get the first name
users[user_id]["name"] = users[user_id]["name"] + " " + response[field]["chat"]["last_name"] if "last_name" in response[field]["from"] else users[user_id]["name"] # get the last name
users[user_id]["username"] = response[field]["chat"]["username"] if "username" in response[field]["chat"] else 'unknown' # get the username
users[user_id]["last_query"] = str(response)
if "mess_choice" not in users[user_id]:
users[user_id]["mess_choice"] = 1
botReply = ""
if '/start' in user_msg :
users[user_id]["mess_choice"] = 1
botReply = "Hello there! Welcome!\nYour request for notifications has been registered.\nDefault Mess DH-2 selected.\nType '/help' to switch mess!"
elif user_msg == '/dh1_notifs':
users[user_id]["mess_choice"] = 0
botReply = "Your request for notifications has been registered.\nMess DH-1 selected.\nThank you!"
elif user_msg == '/dh2_notifs':
users[user_id]["mess_choice"] = 1
botReply = "Your request for notifications has been registered.\nMess DH-2 selected.\nThank you!"
elif user_msg == '/both_notifs':
users[user_id]["mess_choice"] = 2
botReply = "Your request for notifications has been registered.\nMess DH-1 and DH-2 selected.\nThank you!"
elif user_msg == '/deregister':
users[user_id]["mess_choice"] = -1
botReply = "Your request for deregistering for notifications has been noted.\nThank you!"
elif user_msg == '/dh1_menu':
botReply = BLDString[0][5]
elif user_msg == '/dh2_menu':
botReply = BLDString[1][5]
elif user_msg == '/dh1_extras':
return(sendPhoto(user_id, extrasPhotoId[0], 'DH1 Extras Menu'))
elif user_msg == '/dh2_extras':
return(sendPhoto(user_id, extrasPhotoId[1], 'DH2 Extras Menu'))
elif user_msg == '/refresh':
botReply = '*'+str(fetchMenuItems())+'*'
elif '/att' in user_msg:
a = user_msg.split(' ')
botReply = '/att \[attended] \[conducted] \[req att % eg. 75] (optional)' if len(a)<3 else att(a[1],a[2]) if len(a)==3 else att(a[1],a[2],a[3])
elif '/adhoc_update'+secretSauce in user_msg: # admin function
time = datetime.datetime.now(pytz.timezone('Asia/Kolkata'))
datestamp = "*"+time.strftime("%A")+", "+time.strftime("%d")+" "+time.strftime("%B")+" "+str(time.year)+"*\n\n"
new_menu = user_msg.replace('/adhoc_update'+secretSauce,'')
if '/dh1' in new_menu:
new_menu = "*Menu for DH1*\n\n" + new_menu.replace('/dh1 ','').strip()
for user_id in users:
if "mess_choice" in users[user_id] and (users[user_id]["mess_choice"] == 0 or users[user_id]["mess_choice"] == 2):
sendMessage(user_id, datestamp+new_menu.strip())
elif '/dh2' in new_menu:
new_menu = "*Menu for DH2*\n\n" + new_menu.replace('/dh2 ','')
for user_id in users:
if "mess_choice" in users[user_id] and (users[user_id]["mess_choice"] == 1 or users[user_id]["mess_choice"] == 2):
sendMessage(user_id, datestamp+new_menu.strip())
else:
sendMessage(user_id,"Oops. Did not understand that.")
return str(response)
elif user_msg == '/help':
botReply = helpString
else:
botReply = "Oops! I don't understand that yet!\nType '/help' to see all the commands."
sendMessage(user_id, botReply)
return str(response)
######################### APIs to talk to the bot #########################
@app.route('/botWebhook'+botToken, methods=['POST'])
def fn():
try:
debug_print("starting new thread for webhook")
threading.Thread(target=webhook_handler, args=[request.get_json()]).start()
except:
debug_print("coudln't start thread, responsing webhook normally")
webhook_handler(request.get_json())
return ' '
@app.route('/fetchMenuItems'+botToken, methods=['GET'])
def fn2():
try:
debug_print("starting new thread for fetching menu items")
threading.Thread(target=fetchMenuItems).start()
except:
debug_print("coudln't start thread, fetching menu items normally")
fetchMenuItems()
return ' '
@app.route('/sendCurrentMenuAllUsers'+botToken, methods=['GET'])
def fn3():
try:
debug_print("started thread for sending notifications")
threading.Thread(target=sendCurrentMenuAllUsers).start()
except:
debug_print("coudln't start thread, sending notifications normally")
sendCurrentMenuAllUsers()
return ' '
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
return "<a href='http://t.me/"+botUsername+"'>http://t.me/"+botUsername+"</a>"
######################### Start the flask server! #########################
debug_print('webhook set - '+str(requests.get('https://api.telegram.org/bot'+botToken+'/setWebhook?url='+appUrl+'/botWebhook'+botToken))) #set bot webhook automatically
fetchMenuItems()
if __name__ == "__main__":
app.run(threaded=True, host='0.0.0.0', port=int(os.environ.get('PORT', 5000)))
|
BATCH_SIZE = 128
LEARNING_RATE = 0.1
CRUSH_RATE = 0.0001
NUM_CORNERS = 4
ISIZE = 32
CHANNELS = 1
NUM_CLASSES = 13 * 4
SAVE_PATH = "./model"
SETS = ["train", "test"]
NUM_THREADS = 12 |
from django.shortcuts import render, redirect, reverse
from django.http import HttpResponseRedirect
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.decorators import login_required
from . import forms
from . models import Ticket
# Create your views here.
@login_required
def home(request):
ticket_new = Ticket.objects.filter(status='FIRST')
ticket_inprogress = Ticket.objects.filter(status='SECOND')
ticket_done = Ticket.objects.filter(status='THIRD')
ticket_discard = Ticket.objects.filter(status='FOURTH')
context = {'first': ticket_new,
'second': ticket_inprogress, 'third': ticket_done,
'fourth': ticket_discard}
return render(request, 'home.html', context)
@login_required
def user_view(request, id):
html = 'userdata.html'
created_ticket = Ticket.objects.filter(filed_by=id)
assigned_ticket = Ticket.objects.filter(assigned_to=id)
completed_ticket = Ticket.objects.filter(completed_by=id)
return render(request, html, {'created_ticket': created_ticket, 'assigned_ticket': assigned_ticket, 'completed_ticket': completed_ticket})
@login_required
def inserticket(request):
form = forms.Ticketform()
if request.method == 'POST':
form = forms.Ticketform(request.POST)
if form.is_valid():
post = form.save(commit=False)
post.filed_by = request.user
post.save()
return redirect('home')
return render(request, 'insert.html', {'form': form})
@login_required
def ticket_edit(request, id):
ticket = Ticket.objects.get(id=id)
if request.method == 'POST':
form = forms.EditForm(request.POST, instance=ticket)
if form.is_valid():
post = form.save(commit=False)
if post.assigned_to == None and post.completed_by == None and post.filed_by != None:
post.status = 'FIRST'
post.save()
return redirect(home)
elif post.assigned_to != None and post.completed_by == None:
post.status = 'SECOND'
post.save()
return redirect(home)
elif post.assigned_to != None and post.completed_by != None:
post.status = 'THIRD'
post.save()
return redirect(home)
post.save()
return redirect(home)
form = forms.EditForm(instance=ticket)
return render(request, 'ticket_edit.html', {'form': form})
@login_required
def set_invalid(request, id):
ticket = Ticket.objects.get(id=id)
ticket.status = 'FOURTH'
ticket.save()
return redirect(home)
@login_required
def assigned_you(request, id):
ticket = Ticket.objects.get(id=id)
ticket.assigned_to = request.user
ticket.status = 'SECOND'
ticket.save()
return redirect(home)
@login_required
def completed_you(request, id):
ticket = Ticket.objects.get(id=id)
ticket.completed_by = request.user
ticket.status = 'THIRD'
ticket.save()
return redirect(home)
@login_required
def ticket_detail(request, id):
ticket = Ticket.objects.get(id=id)
return render(request, 'details.html', {'ticket': ticket})
@login_required
def signup(request):
form = forms.SignUpForm()
if request.method == 'POST':
form = forms.SignUpForm(request.POST)
if form.is_valid():
user = form.save()
user.set_password(user.password)
user.save()
return redirect('/')
else:
form = forms.SignUpForm()
return render(request, 'signup.html', {'form': form})
def logout_action(request):
logout(request)
return redirect(request.GET.get("next", reverse('login')))
|
def main() -> None:
import sys
from .scanner import PEiDScanner, format_as_katc_peid
if len(sys.argv) != 2:
print(f"Usage: {sys.argv[0]} input_file")
return
peid_scanner = PEiDScanner()
scan_result = peid_scanner.scan_file(sys.argv[1])
print(format_as_katc_peid(scan_result))
if __name__ == "__main__":
main()
|
import pandas as pd
import matplotlib.pyplot as plt
import ipywidgets as widgets
import numpy as np
def heatmap(x, y, size):
'''
https://towardsdatascience.com/better-heatmaps-and-correlation-matrix-plots-in-python-41445d0f2bec
'''
fig, ax = plt.subplots()
# Mapping from column names to integer coordinates
x_labels = [v for v in sorted(x.unique())]
y_labels = [v for v in sorted(y.unique())]
x_to_num = {p[1]:p[0] for p in enumerate(x_labels)}
y_to_num = {p[1]:p[0] for p in enumerate(y_labels)}
size_scale = 500
ax.scatter(
x=x.map(x_to_num), # Use mapping for x
y=y.map(y_to_num), # Use mapping for y
s=size * size_scale, # Vector of square sizes, proportional to size parameter
marker='s' # Use square as scatterplot marker
)
margin = 0.5
ax.set_xlim([- margin, len(x_labels) - 1 + margin])
ax.set_ylim([- margin, len(y_labels) - 1 + margin])
# Show column labels on the axes
ax.set_xticks([x_to_num[v] for v in x_labels])
ax.set_xticklabels(x_labels, rotation=45, horizontalalignment='right')
ax.set_yticks([y_to_num[v] for v in y_labels])
ax.set_yticklabels(y_labels)
plt.show(fig)
def make_occur_plot(X, Y, df, force_dtype={}):
output = df.apply(lambda row: (row[X], row[Y]), axis=1)
values, counts = np.unique(output, return_counts=True)
x = pd.Series([value[0] for value in values])
y = pd.Series([value[1] for value in values])
return heatmap(x, y, counts)
def make_scatter_plot(Xs, Y, df, force_dtype={}, **params):
fig, ax = plt.subplots(
figsize=(params['Figsize_x'], params['Figsize_y'])
)
if isinstance(Xs, str):
Xs = [Xs]
if len(Xs) == 1:
ax.scatter(df[Xs[0]], df[Y],
s=params['markersize'],
alpha=params['alpha'],
color=params['color'],
)
elif len(Xs) == 2:
values = sorted(df[Xs[0]].unique())
for X1_unique in values:
ordered = df[df[Xs[0]] == X1_unique]
ax.scatter(ordered[Xs[1]], ordered[Y],
s=params['markersize'],
alpha=params['alpha'],
)
ax.legend(values)
ax.set_xlabel(params['xlabel'])
ax.set_ylabel(params['ylabel'])
if 'Title' in params:
ax.set_title(params['Title'])
if 'xmin' in params:
ax.set_xlim(left=params['xmin'])
if 'xmax' in params:
ax.set_xlim(right=params['xmax'])
if 'ymin' in params:
ax.set_ylim(bottom=params['ymin'])
if 'ymax' in params:
ax.set_ylim(top=params['ymax'])
if 'half_spine' in params:
ax.spines["top"].set_visible(not params['half_spine'])
ax.spines["right"].set_visible(not params['half_spine'])
plt.show(fig)
if params['save']:
fig.savefig('saved_scatter_plot.png')
def make_interactive_scatter_plot(Xs, Y, df, force_dtype={}):
def plot(**params):
make_scatter_plot(Xs, Y, df, force_dtype, **params)
key = Xs if isinstance(Xs, str) else Xs[1]
widgets.interact(
plot,
Title="",
xlabel=key,
ylabel=Y,
xmin=widgets.FloatSlider(
min=df[key].min() - (df[key].max() - df[key].min())/10,
max=df[key].min(),
value=df[key].min() - (df[key].max() - df[key].min())/20,
step=max(df[key].max() - df[key].min(),1)/100,
),
xmax=widgets.FloatSlider(
min=df[key].max(),
max=df[key].max() + (df[key].max() - df[key].min())/10,
value=df[key].max() + (df[key].max() - df[key].min())/20,
step=(df[key].max() - df[key].min())/100,
),
ymin=widgets.FloatSlider(
min=df[Y].min() - (df[Y].max() - df[Y].min())/10,
max=df[Y].min(),
value=df[Y].min()-(df[Y].max() - df[Y].min())/20,
step=(df[Y].max() - df[Y].min())/100,
),
ymax=widgets.FloatSlider(
min=df[Y].max(),
max=df[Y].max() + (df[Y].max() - df[Y].min())/10,
value=df[Y].max()+(df[Y].max() - df[Y].min())/20,
step=(df[Y].max() - df[Y].min())/100,
),
Figsize_x=widgets.IntSlider(
min=1,
max=10,
value=4,
),
Figsize_y=widgets.IntSlider(
min=1,
max=10,
value=3,
),
markersize=widgets.IntSlider(
min=1,
max=100,
value=10,
step=1,
),
color=[
('steelblue','steelblue'),
('blue','b'),
('green','g'),
('red','r'),
('cyan','c'),
('magenta','m'),
('yellow','y'),
('black','k'),
('white','w'),
],
alpha=widgets.FloatSlider(
min=0,
max=1,
value=1,
step=0.01,
),
half_spine=True,
save=False,
)
def make_line_plot(Xs, Y, df, force_dtype={}, **params):
fig, ax = plt.subplots(
figsize=(params['Figsize_x'], params['Figsize_y'])
)
if isinstance(Xs, str):
Xs = [Xs]
if len(Xs) == 1:
ordered = df.sort_values(Xs[0])
ax.plot(ordered[Xs[0]], ordered[Y],
marker=params['marker'],
markersize=params['markersize'],
linewidth=params['linewidth'],
color=params['color'],
)
elif len(Xs) == 2:
values = sorted(df[Xs[0]].unique())
for X1_unique in values:
ordered = df[df[Xs[0]] == X1_unique].sort_values(Xs[1])
ax.plot(
ordered[Xs[1]],
ordered[Y],
markersize=params['markersize'],
linewidth=params['linewidth'],
)
ax.legend(values)
else:
raise NotImplementedError()
ax.set_xlabel(params['xlabel'])
ax.set_ylabel(params['ylabel'])
if 'Title' in params:
ax.set_title(params['Title'])
if 'xmin' in params:
ax.set_xlim(left=params['xmin'])
if 'xmax' in params:
ax.set_xlim(right=params['xmax'])
if 'ymin' in params:
ax.set_ylim(bottom=params['ymin'])
if 'ymax' in params:
ax.set_ylim(top=params['ymax'])
if 'half_spine' in params:
ax.spines["top"].set_visible(not params['half_spine'])
ax.spines["right"].set_visible(not params['half_spine'])
plt.show(fig)
if params['save']:
fig.savefig('saved_line_plot.png')
def make_interactive_line_plot(Xs, Y, df, force_dtype={}):
def plot(**params):
make_line_plot(Xs, Y, df, force_dtype, **params)
key = Xs if isinstance(Xs, str) else Xs[1]
widgets.interact(
plot,
Title="",
xlabel=key,
ylabel=Y,
xmin=widgets.FloatSlider(
min=df[key].min() - (df[key].max() - df[key].min())/10,
max=df[key].min(),
value=df[key].min() - (df[key].max() - df[key].min())/20,
step=max(df[key].max() - df[key].min(),1)/100,
),
xmax=widgets.FloatSlider(
min=df[key].max(),
max=df[key].max() + (df[key].max() - df[key].min())/10,
value=df[key].max() + (df[key].max() - df[key].min())/20,
step=(df[key].max() - df[key].min())/100,
),
ymin=widgets.FloatSlider(
min=df[Y].min() - (df[Y].max() - df[Y].min())/10,
max=df[Y].min(),
value=df[Y].min()-(df[Y].max() - df[Y].min())/20,
step=(df[Y].max() - df[Y].min())/100,
),
ymax=widgets.FloatSlider(
min=df[Y].max(),
max=df[Y].max() + (df[Y].max() - df[Y].min())/10,
value=df[Y].max()+(df[Y].max() - df[Y].min())/20,
step=(df[Y].max() - df[Y].min())/100,
),
Figsize_x=widgets.IntSlider(
min=1,
max=10,
value=4,
),
Figsize_y=widgets.IntSlider(
min=1,
max=10,
value=3,
),
marker=[('Point', '.'), ('Square', "s"), ("circle", 'o'), ('star', '*')],
markersize=widgets.IntSlider(
min=1,
max=100,
value=10,
step=1,
),
linewidth=widgets.FloatSlider(
min=0.5,
max=10,
value=2,
step=0.5,
),
color=[
('steelblue','steelblue'),
('blue','b'),
('green','g'),
('red','r'),
('cyan','c'),
('magenta','m'),
('yellow','y'),
('black','k'),
('white','w'),
],
half_spine=True,
save=False,
)
def make_pie_chart(X, Y, df, force_dtype={}, **params):
fig, ax = plt.subplots()
if 'shadow' not in params:
params['shadow'] = True
if 'startangle' not in params:
params['startangle'] = startangle
ax.pie(
df[Y],
labels=df[X],
autopct='%1.1f%%',
shadow=params['shadow'],
startangle=params['startangle'],
)
ax.axis('equal') # Equal aspect ratio ensures that pie is drawn as a circle.
plt.show(fig)
def make_violin_plot(X, Y, df, force_dtype={}, **params):
pass
def make_box_plot(X, Y, df, force_dtype={}, **params):
pass |
from deepdab.ai import *
class TDOneTabularPolicy(TabularPolicy):
def __init__(self, board_size, learning_rate=0.0, gamma=0.0, epsilon=0.0, initial_state_value=0.0,
table_file_path=None, softmax_action=False, temperature=0.0):
super(TDOneTabularPolicy, self).__init__(board_size=board_size, epsilon=epsilon,
initial_state_value=initial_state_value, table_file_path=table_file_path,
softmax_action=softmax_action, temperature=temperature)
self._learning_rate = learning_rate
self._gamma = gamma
def update_value(self, reward, backups):
for i in reversed(range(len(backups))):
state_string = self._find_state_string(backups[i])
value = self._value_table[state_string]
if i == len(backups) - 1:
next_value = 0
r = reward
else:
next_value = self._get_value(backups[i + 1])
r = 0
self._value_table[state_string] = value + self._learning_rate * (r + self._gamma * next_value - value)
def get_learning_rate(self):
return self._learning_rate
def set_learning_rate(self, lr):
self._learning_rate = lr
def _get_value(self, state):
state_string = self._find_state_string(state)
return self._value_table[state_string]
|
from decimal import Decimal
import itertools
import pandas
import collections
from adt import AnnualChange
import random
def namedtuple_with_defaults(typename, field_names, default_values=()):
T = collections.namedtuple(typename, field_names)
T.__new__.__defaults__ = (None,) * len(T._fields)
if isinstance(default_values, collections.Mapping):
prototype = T(**default_values)
else:
prototype = T(*default_values)
T.__new__.__defaults__ = tuple(prototype)
return T
def constant_returns(stocks=Decimal('.04'), bonds=Decimal('.02'), inflation=Decimal('.02')):
return itertools.repeat(AnnualChange(year = 0, stocks = stocks, bonds = bonds, inflation = inflation))
def nirp_returns():
return constant_returns(stocks=Decimal('.02'), bonds=Decimal('-.01'), inflation=Decimal('.02'))
def zirp_returns():
return constant_returns(stocks=Decimal('.04'), bonds=Decimal('0'), inflation=Decimal('.02'))
def big_drop(after=10):
# 20 years of ZIRP to exhaust bonds under Prime Harvesting
for i in range(after):
yield AnnualChange(year=0, stocks=Decimal('.04'), bonds=Decimal('0'), inflation=Decimal('.02'))
# big drop of stocks to allow rebalacing to "buy stocks cheap"
yield AnnualChange(year=0, stocks=Decimal('-.25'), bonds=Decimal('0'), inflation=Decimal('.02'))
# now we just have normal (but not amazing) returns.....
while True:
yield AnnualChange(year=0, stocks=Decimal('.08'), bonds=Decimal('.04'), inflation=Decimal('.03'))
class PortfolioCharts_1927:
""" All of the other code only deals with stocks and bonds. Rewriting all
of it to deal with arbitrary asset classes is not appealing at the moment.
I also don't know of an actual use case for it. So we'll do a bit of a hack.
You need to pass in a Weights tuple that tells this how to weight the returns
of the various subclasses. That means the asset allocation is fixed and can't
vary over time. But it also means I don't have to rewrite everything right now. """
asset_classes = [x + y for x in ("LC", "MC", "SC") for y in ("B", "G", "V")]
Weights = namedtuple_with_defaults("Weights", asset_classes, default_values=[0] * len(asset_classes))
def __init__(self, weights):
self.dataframe = pandas.read_csv('stock-index-calculator-20160620-v2.csv')
self.years_of_data = len(self.dataframe)
self.weights = weights
def fmt(self, row):
stock_performance = [row[x] * self.weights._asdict()[x] for x in self.asset_classes]
stock_performance = sum(stock_performance)
return AnnualChange(
year=row['Year'],
stocks=Decimal(stock_performance) / 100,
bonds=Decimal(row['IT Bonds']) / 100,
inflation=Decimal(row['CPI-U']) / 100
)
def random_year(self):
i = random.randint(0, len(self.dataframe) - 1)
return self.fmt(self.dataframe.iloc[i])
def get_year(self, year):
i = random.randint(0, len(self.dataframe) - 1)
return self.fmt(self.dataframe.iloc[year-1927])
def iter_from(self, year, length=None):
start = year - 1927
assert start >= 0
count = 0
for row in self.dataframe.iloc[start:].iterrows():
yield self.fmt(row[1])
count += 1
if length != None and count >= length:
raise StopIteration
class JST:
""" The Jordà-Schularick-Taylor Macrohistory Database stata file containing
country data from "The Return of Everything"
http://www.macrohistory.net/data/
"""
Countries = ['AUS', 'BEL', 'CAN', 'DNK', 'FIN', 'FRA', 'DEU', 'ITA', 'JPN',
'NLD', 'NOR', 'PRT', 'ESP', 'SWE', 'CHE', 'GBR', 'USA']
def __init__(self, country):
df = pandas.read_stata('JSTdatasetR4.dta')
# the year comes through as a float for some reason...coerce back to int
df['year'] = df['year'].astype(int)
df = df[df['iso'] == country]
df = df[['year', 'iso', 'eq_tr', 'bond_tr', 'cpi']]
# the 'cpi' is a CPI-level...not a year-over-year change. ugh.
df['cpi'] = (df['cpi'] / df['cpi'].shift(1)) - 1
df = df.dropna()
self.start_year = df['year'].min()
self.last_year = df['year'].max()
self.country = country
self.data = df
def __len__(self):
return len(self.data)
def fmt(self, row):
# the database only provides the consumer price index level, not the
# annual change. I don't feel like going back and calculating it.
return AnnualChange(
year=row.year,
stocks=Decimal(row.eq_tr),
bonds=Decimal(row.bond_tr),
inflation=Decimal(row.cpi)
)
def iter_from(self, year, length=None):
assert year >= self.start_year
if length:
assert (year+length) <= self.last_year
count = 0
for row in self.data[self.data['year'] >= year].iterrows():
yield self.fmt(row[1])
count += 1
if length != None and count >= length:
return
class UK1900:
def __init__(self, wrap=False):
self.start_year = 1900
self.dataframe = pandas.read_csv('uk1900.csv', index_col=0, parse_dates=True)
if wrap:
self.dataframe += self.dataframe
def __len__(self):
return len(self.dataframe)
def random_year(self):
i = random.randint(0, len(self.dataframe) - 1)
return self.fmt(self.dataframe.iloc[i])
def fmt(self, row):
(stocks, bonds, inflation) = (Decimal(row[x]) for x in ("Real Equity", "Real Gilt", "Inflation"))
return AnnualChange(
year=row.name.year,
stocks=stocks + inflation,
bonds=bonds + inflation,
inflation=inflation
)
def iter_from(self, year, length=None):
start = year - 1900
assert start >= 0
count = 0
for row in self.dataframe.iloc[start:].iterrows():
yield self.fmt(row[1])
count += 1
if length != None and count >= length:
return
class US_1871_Monthly:
def __init__(self):
self.data = pandas.read_csv('US_1871_Monthly.csv', index_col=0, parse_dates=True)
def fmt(self, row):
return AnnualChange(
year=row.name.year,
stocks=Decimal(row['S&P %']),
bonds=Decimal(row['Bond %']),
inflation=0 # it is already reported in real terms
)
def iter_from(self, date, length=None):
count = 0
for row in self.data.loc[date:].iterrows():
yield self.fmt(row[1])
count += 1
if length and count >= length:
raise StopIteration
class Japan_1957:
def __init__(self):
self.start_year = 1957
self.dataframe = pandas.read_csv('japan-1957-2016.csv',
index_col=0,
dtype={'Date': int,
'CPI Japan': object,
'Spliced Bond': object,
'NIKKEI225' : object},
converters={'CPI Japan': Decimal,
'Spliced Bond' : Decimal,
'NIKKEI225' : Decimal})
def __len__(self):
return len(self.dataframe)
def random_year(self):
i = random.randint(0, len(self.dataframe) - 1)
return self.fmt(self.dataframe.iloc[i])
def fmt(self, row):
return AnnualChange(
year=row.name,
stocks=row['NIKKEI225'],
bonds=row['Spliced Bond'],
inflation=row['CPI Japan']
)
def iter_from(self, year, length=None):
start = year - 1957
assert start >= 0
assert start < 2016
count = 0
for row in self.dataframe.iloc[start:].iterrows():
yield self.fmt(row[1])
count += 1
if length and count >= length:
return
class Japan_1975:
def __init__(self):
self.dataframe = pandas.read_csv('japan-1975-2016.csv',
index_col=0,
dtype={'Year': int,
'CPI': object,
'IT Bond': object,
'Equities' : object},
converters={'CPI': Decimal,
'IT Bond' : Decimal,
'Equities' : Decimal})
def fmt(self, row):
return AnnualChange(
year=row.name,
stocks=row['Equities'],
bonds=row['IT Bond'],
inflation=row['CPI']
)
def iter_from(self, year, length=None):
start = year - 1975
assert start >= 0
assert start < 2016
count = 0
for row in self.dataframe.iloc[start:].iterrows():
yield self.fmt(row[1])
count += 1
if length and count >= length:
return
class Returns_US_1871:
def __init__(self, wrap=False):
# import US based data from 1871 from the simba backtesting
# spreadsheet found on bogleheads.org
# https://www.bogleheads.org/forum/viewtopic.php?p=2753812#p2753812
self.start_year = 1871
self.dataframe = pandas.read_csv('1871_returns.csv')
self.years_of_data = len(self.dataframe)
if wrap:
self.dataframe += self.dataframe
def __len__(self):
return len(self.dataframe)
def __iter__(self):
return self.iter_from(1871)
def get_year(self, index):
return self.dataframe.iloc[index]['Year']
def shuffle(self):
index = list(self.dataframe.index)
random.shuffle(index)
self.dataframe = self.dataframe.ix[index]
self.dataframe.reset_index()
return self
def random_year(self):
i = random.randint(0, len(self.dataframe) - 1)
return self.fmt(self.dataframe.iloc[i])
def fmt(self, row):
(stocks, bonds, inflation) = (Decimal(row[x]) / 100 for x in ("VFINX", "IT Bonds", "CPI-U"))
return AnnualChange(
year=row['Year'],
stocks=stocks,
bonds=bonds,
inflation=inflation
)
def iter_from(self, year, length=None):
start = year - 1871
assert start >= 0
assert start < 2016
count = 0
for row in self.dataframe.iloc[start:].iterrows():
yield self.fmt(row[1])
count += 1
if length and count >= length:
return
|
import consumer
import sensor
import analyzer
import threading
import dashboard
import logging
import sys
import sched, time
import ids
import terminaloutput
#logging.getLogger().setLevel(logging.INFO)
IDS_CHECK_INTERVAL = 5
class Monitor:
def __init__(self):
self.consumers = []
s = sensor.Sensor()
hc = consumer.HttpConsumer(5*60)
hc.plugto(s)
fa = analyzer.FrequencyAnalyzer( "HTTP_URL_FREQUENCY_10S", "hostname", 30)
fa.plugto(hc)
fa2 = analyzer.FrequencyAnalyzer( "HTTP_URL_FREQUENCY_2M", "hostname", 30)
fa2.plugto(hc)
self.ids = ids.IDS()
self.ids.plugto("HTTP_URL_FREQUENCY_10S")
self.ids.plugto("HTTP_URL_FREQUENCY_2M")
self.ids.plug(self)
self.terminal = terminaloutput.TerminalOutput()
self.terminal.addsection(terminaloutput.screensection("ALERT", "IDS Alerts", 10))
self.terminal.addsection(terminaloutput.screensection("NOTIFICATION", "IDS Notifications", 20))
self.terminal.start()
self.consumers.append(hc)
def notify(self, msg):
self.terminal.addstr( msg.messagetype, msg.message, msg.timestamp)
def visitids(self):
self.ids.check()
threading.Timer(IDS_CHECK_INTERVAL, self.visitids).start()
def start(self):
threading.Timer(IDS_CHECK_INTERVAL, self.visitids).start()
for c in self.consumers:
c.start()
if __name__=="__main__":
m = Monitor()
m.start()
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for creating tests of implementations of the Face layer."""
# unittest is referenced from specification in this module.
import unittest # pylint: disable=unused-import
# test_interfaces is referenced from specification in this module.
from tests.unit.framework.interfaces.face import _blocking_invocation_inline_service
from tests.unit.framework.interfaces.face import _future_invocation_asynchronous_event_service
from tests.unit.framework.interfaces.face import _invocation
from tests.unit.framework.interfaces.face import test_interfaces # pylint: disable=unused-import
_TEST_CASE_SUPERCLASSES = (
_blocking_invocation_inline_service.TestCase,
_future_invocation_asynchronous_event_service.TestCase,
)
def test_cases(implementation):
"""Creates unittest.TestCase classes for a given Face layer implementation.
Args:
implementation: A test_interfaces.Implementation specifying creation and
destruction of a given Face layer implementation.
Returns:
A sequence of subclasses of unittest.TestCase defining tests of the
specified Face layer implementation.
"""
test_case_classes = []
for invoker_constructor in _invocation.invoker_constructors():
for super_class in _TEST_CASE_SUPERCLASSES:
test_case_classes.append(
type(
invoker_constructor.name() + super_class.NAME,
(super_class,), {
'implementation': implementation,
'invoker_constructor': invoker_constructor,
'__module__': implementation.__module__,
}))
return test_case_classes
|
import numpy as np
from rlgym.utils import RewardFunction, math
from rlgym.utils.common_values import BALL_RADIUS, CAR_MAX_SPEED
from rlgym.utils.gamestates import GameState, PlayerData
class LiuDistancePlayerToBallReward(RewardFunction):
def reset(self, initial_state: GameState):
pass
def get_reward(self, player: PlayerData, state: GameState, previous_action: np.ndarray) -> float:
# Compensate for inside of ball being unreachable (keep max reward at 1)
dist = np.linalg.norm(player.car_data.position - state.ball.position) - BALL_RADIUS
return np.exp(-0.5 * dist / CAR_MAX_SPEED) # Inspired by https://arxiv.org/abs/2105.12196
class VelocityPlayerToBallReward(RewardFunction):
def __init__(self, use_scalar_projection=False):
super().__init__()
self.use_scalar_projection = use_scalar_projection
def reset(self, initial_state: GameState):
pass
def get_reward(self, player: PlayerData, state: GameState, previous_action: np.ndarray) -> float:
vel = player.car_data.linear_velocity
pos_diff = state.ball.position - player.car_data.position
if self.use_scalar_projection:
# Vector version of v=d/t <=> t=d/v <=> 1/t=v/d
# Max value should be max_speed / ball_radius = 2300 / 92.75 = 24.8
# Used to guide the agent towards the ball
inv_t = math.scalar_projection(vel, pos_diff)
return inv_t
else:
# Regular component velocity
norm_pos_diff = pos_diff / np.linalg.norm(pos_diff)
norm_vel = vel / CAR_MAX_SPEED
return float(np.dot(norm_pos_diff, norm_vel))
class FaceBallReward(RewardFunction):
def reset(self, initial_state: GameState):
pass
def get_reward(self, player: PlayerData, state: GameState, previous_action: np.ndarray) -> float:
pos_diff = state.ball.position - player.car_data.position
norm_pos_diff = pos_diff / np.linalg.norm(pos_diff)
return float(np.dot(player.car_data.forward(), norm_pos_diff))
class TouchBallReward(RewardFunction):
def __init__(self, aerial_weight=0.):
self.aerial_weight = aerial_weight
def reset(self, initial_state: GameState):
pass
def get_reward(self, player: PlayerData, state: GameState, previous_action: np.ndarray) -> float:
if player.ball_touched:
# Default just rewards 1, set aerial weight to reward more depending on ball height
return ((state.ball.position[2] + BALL_RADIUS) / (2 * BALL_RADIUS)) ** self.aerial_weight
return 0
|
import numpy as np
import cv2
import imutils
import math
from gpio_controller.ServoController import ServoController
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('-v', '--video', default=1)
args = vars(ap.parse_args())
servo_controller = ServoController()
servo_controller.init()
theta = 0
speed = 0
old_theta = 0
old_speed = 0
SCREEN_WIDTH=240
SCREEN_HEIGHT=320
CAMERA_MIDDLE = 2
CAMERA_LEFT = 1
CAMERA_RIGHT = 3
# print (args['video'])
#3 - right
#1 - middle
#2 - left
cap_left = cv2.VideoCapture(CAMERA_LEFT)
cap_left.set(3, SCREEN_WIDTH)
cap_left.set(4, SCREEN_HEIGHT)
cap_middle = cv2.VideoCapture(CAMERA_MIDDLE)
cap_middle.set(3, SCREEN_WIDTH)
cap_middle.set(4, SCREEN_HEIGHT)
cap_right = cv2.VideoCapture(CAMERA_RIGHT)
cap_right.set(3, SCREEN_WIDTH)
cap_right.set(4, SCREEN_HEIGHT)
screen_width = int(cap_middle.get(3))
screen_height = int(cap_middle.get(4))
x_screen_center = int(screen_width/2)
y_screen_center = int(screen_height/2)
# cap = cv2.VideoCapture(0)
# cap = cv2.VideoCapture(0)
# cap = cv2.VideoCapture(2)
def get_theta(dx, dy):
#if frame is being mirror
dx = -dx
return math.atan(dx/dy) * 180 / 3.14
def get_theta_from_camera(image, camera_pos):
blur = cv2.GaussianBlur(image, (11, 11), 0)
hsv = cv2.cvtColor( blur, cv2.COLOR_BGR2HSV)
lower = np.array([10, 100, 150])
upper = np.array([25, 255, 255])
mask = cv2.inRange( hsv, lower, upper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
ball_cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
ball_cnts = imutils.grab_contours(ball_cnts)
ball_center = None
x_ball_center = -1
y_ball_center = -1
if len(ball_cnts) > 0:
c = max(ball_cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
x_ball_center = int(M["m10"] / M["m00"])
y_ball_center = int(M["m01"] / M["m00"])
ball_center = (x_ball_center,y_ball_center)
# only proceed if the radius meets a minimum size
if radius > 10:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(image, (int(x), int(y)), int(radius), (0, 255, 255), 2)
cv2.circle(image, ball_center, 5, (0, 0, 255), -1)
theta = None
if x_ball_center == -1:
return image, theta
if camera_pos == CAMERA_MIDDLE:
distance = x_ball_center - x_screen_center
dx = distance
dy = (screen_height - y_ball_center)
# if (dy > screen_width/2):
# dy = screen_width/2
theta = get_theta(dx, dy)
elif camera_pos == CAMERA_LEFT:
dx = x_ball_center - screen_width * 3/2
dy = (screen_height - y_ball_center)
theta = get_theta(dx, dy)
elif camera_pos == CAMERA_RIGHT:
dx = x_ball_center + screen_width * 1/2
dy = (screen_height - y_ball_center)
theta = get_theta(dx, dy)
return image, theta
while True:
ret, frame_middle = cap_middle.read()
ret, frame_left = cap_left.read()
ret, frame_right = cap_right.read()
#flip miror
frame_middle = cv2.flip(frame_middle, 1)
frame_left = cv2.flip(frame_left, 1)
frame_right = cv2.flip(frame_right, 1)
cv2.line(frame_middle,(x_screen_center,0),(x_screen_center,screen_height),(255,0,0),5)
'''
blur = cv2.GaussianBlur(frame_middle, (11, 11), 0)
hsv = cv2.cvtColor( blur, cv2.COLOR_BGR2HSV)
lower = np.array([10, 100, 150])
upper = np.array([25, 255, 255])
mask = cv2.inRange( hsv, lower, upper)
mask = cv2.erode(mask, None, iterations=2)
mask = cv2.dilate(mask, None, iterations=2)
ball_cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
ball_cnts = imutils.grab_contours(ball_cnts)
ball_center = None
x_ball_center = -1
y_ball_center = -1
if len(ball_cnts) > 0:
c = max(ball_cnts, key=cv2.contourArea)
((x, y), radius) = cv2.minEnclosingCircle(c)
M = cv2.moments(c)
x_ball_center = int(M["m10"] / M["m00"])
y_ball_center = int(M["m01"] / M["m00"])
ball_center = (x_ball_center,y_ball_center)
# only proceed if the radius meets a minimum size
if radius > 10:
# draw the circle and centroid on the frame,
# then update the list of tracked points
cv2.circle(frame_middle, (int(x), int(y)), int(radius), (0, 255, 255), 2)
cv2.circle(frame_middle, ball_center, 5, (0, 0, 255), -1)
'''
theta = None
frame_middle, theta_middle = get_theta_from_camera(frame_middle, CAMERA_MIDDLE)
frame_left, theta_left = get_theta_from_camera(frame_left, CAMERA_LEFT)
frame_right, theta_right = get_theta_from_camera(frame_right, CAMERA_RIGHT)
if theta_middle is not None:
theta = theta_middle
elif theta_left is not None:
theta = theta_left
elif theta_right is not None:
theta = theta_right
distance = 0
if theta is not None:
speed = 20
else:
speed = 0
if old_speed != speed:
old_speed = servo_controller.set_speed(speed)
if (theta is not None) and (old_theta != theta):
old_theta = servo_controller.set_steer(theta)
old_theta = theta
cv2.imshow('middle', frame_middle)
cv2.imshow('left', frame_left)
cv2.imshow('right', frame_right)
# cv2.imshow('mask', mask)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
cap.release()
cv2.destroyAllWindows() |
class PyGridClientException(Exception):
def __init__(self, message: str) -> None:
super().__init__(message)
class RequestAPIException(Exception):
def __init__(self, message: str) -> None:
message = "Something went wrong during this request: " + message
super().__init__(message)
|
import sys
import os
sys.path.append(os.pardir)
import reg3d_transformations as reg3d_T
from numpy import linalg as LA
import transformations as tf
#******************** BH ********************
# cvg_root = "/Users/isa/Experiments/reg3d_eval/cvg_eo_data"
# dan_root = "/Users/isa/Experiments/reg3d_eval/BH_2006/original"
# Paper with Vishal
vsi_root = "/Users/isa/Dropbox/MultiModalRegPaper/ground_truth/aerial-lidar/BH_VSI"
dan_root = "/Users/isa/Dropbox/MultiModalRegPaper/ground_truth/aerial-lidar/BH_2006"
# # #Load f2-lidar
# f2_lidar_file = cvg_root + "/flight2_sites/site_1/Hs_geo.txt"
# f2_geo = reg3d_T.geo_transformation(f2_lidar_file)
# #Load f4-f2
# f4_f2_file = cvg_root + "/flight4_sites/site_1/f4-f2_Hs.txt"
# f4_f2 = reg3d_T.gt_transformation(f4_f2_file)
# #Load f4-2006
# f4_2006_file = cvg_root + "/flight4_sites/site_1/f4-2006_Hs.txt"
# f4_2006 = reg3d_T.gt_transformation(f4_2006_file)
# #Load cvg-f4-2006
# f4_2006_file = cvg_root + "/flight4_sites/site_1/f4-2006_Hs.txt"
# f4_2006 = reg3d_T.gt_transformation(f4_2006_file)
#Load vsi-2006 this is only used for vishal's paper set up
f4_dan_file = vsi_root + "/vsi-2006_Hs.txt"
f4_dan = reg3d_T.gt_transformation(f4_dan_file)
dan_lidar_file = dan_root + "/Hs_geo.txt"
dan_geo = reg3d_T.geo_transformation(dan_lidar_file)
vsi_geo_Hs = dan_geo.Hs_geo.dot(f4_dan.Hs)
vsi_geo = reg3d_T.gt_transformation(vsi_geo_Hs)
vsi_geo.save_to_file(vsi_root + "/Hs_geo.txt")
# #Compute missing ones
# #2006-lidar
# dan_geo_Hs = f2_geo.Hs_geo.dot(f4_f2.Hs.dot(LA.inv(f4_2006.Hs)))
# dan_geo = reg3d_T.gt_transformation(dan_geo_Hs)
# dan_geo.save_to_file(dan_root + "/Hs_geo")
# import pdb; pdb.set_trace()
#******************** Capitol ********************
# root_2011 = "/Users/isa/Experiments/reg3d_eval/capitol_2011/original"
# root_2006 = "/Users/isa/Experiments/reg3d_eval/capitol_2006/original"
# #Load 2011-2006
# Hfile_2011_2006 = root_2011 + "/2011-2006_Hs.txt"
# T_2011_2006 = reg3d_T.gt_transformation(Hfile_2011_2006)
# #Compute missing ones
# #2006-2011
# Hs_2006_2011 = LA.inv(T_2011_2006.Hs)
# T_2006_2011 = reg3d_T.gt_transformation(Hs_2006_2011)
# T_2006_2011.save_to_file(root_2006 + "/2006-2011_Hs")
#Load 2006-lidar
# Hfile_geo_2006 = "/data/lidar_providence/capitol/capitol-dan_Hs.txt"
# Tgeo_2006 = reg3d_T.geo_transformation(Hfile_geo_2006)
# Hs_geo_2011 = Tgeo_2006.Hs_geo.dot(T_2011_2006.Hs)
# Tgeo_2011 = reg3d_T.gt_transformation(Hs_geo_2011)
# Tgeo_2011.save_to_file(root_2011 + "/Hs_geo")
#******************** Downtown ********************
#
# root_cvg ="/Users/isa/Experiments/reg3d_eval/cvg_eo_data/flight5_sites/site_5"
# root_2011 = "/Users/isa/Experiments/reg3d_eval/downtown_2011/original"
# root_2006 = "/Users/isa/Experiments/reg3d_eval/downtown_2006/original"
# #Load 2011-2006
# Hfile_2011_2006 = root_2011 + "/2011-2006_Hs.txt"
# T_2011_2006 = reg3d_T.gt_transformation(Hfile_2011_2006)
# Hfile_cvg_2006 = root_cvg + "/f5-2006_Hs.txt"
# T_cvg_2006 = reg3d_T.gt_transformation(Hfile_cvg_2006)
# #Load 2006-lidar
# Hfile_geo_2006 = "/data/lidar_providence/downtown_offset-1-financial-dan-Hs.txt"
# Tgeo_2006 = reg3d_T.geo_transformation(Hfile_geo_2006)
# Hs_geo_2011 = Tgeo_2006.Hs_geo.dot(T_2011_2006.Hs)
# Tgeo_2011 = reg3d_T.gt_transformation(Hs_geo_2011)
# Tgeo_2011.save_to_file(root_2011 + "/Hs_geo")
# Hs_geo_cvg = Tgeo_2006.Hs_geo.dot(T_cvg_2006.Hs)
# Tgeo_cvg = reg3d_T.gt_transformation(Hs_geo_cvg)
# Tgeo_cvg.save_to_file(root_cvg + "/Hs_geo")
# import pdb; pdb.set_trace() |
# -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at https://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from typing import List, Optional
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import permissions
from apps.node_man import models
from apps.node_man.constants import IamActionType
from apps.node_man.exceptions import CloudNotExistError
from apps.node_man.handlers.cmdb import CmdbHandler
from apps.node_man.handlers.iam import IamHandler
from apps.utils.local import get_request_username
class GlobalSettingPermission(permissions.BasePermission):
"""
全局配置权限控制
"""
message = _("您没有权限执行操作")
def has_permission(self, request, view):
# 接入点编辑、删除、创建权限
if view.action in ["list", "ap_is_using"]:
# 不需要鉴权的action
return True
# 若没有使用权限中心
if not settings.USE_IAM:
if IamHandler.is_superuser(get_request_username()):
return True
else:
self.message = _("您没有该接入点的权限")
return False
# 使用权限中心
perms = IamHandler().fetch_policy(
get_request_username(),
[IamActionType.ap_edit, IamActionType.ap_delete, IamActionType.ap_create, IamActionType.ap_view],
)
if view.action in ["create", "test"] and perms[IamActionType.ap_create]:
return True
if view.action == "update" and int(view.kwargs.get("pk", 0)) in perms[IamActionType.ap_edit]:
return True
if view.action == "destroy" and int(view.kwargs.get("pk", 0)) in perms[IamActionType.ap_delete]:
return True
if view.action == "retrieve" and int(view.kwargs.get("pk", 0)) in perms[IamActionType.ap_view]:
return True
message_dict = {
"create": _("您没有创建接入点的权限"),
"update": _("您没有编辑该接入点的权限"),
"destroy": _("您没有删除该接入点的权限"),
"view": _("您没有查看该接入点详情的权限"),
}
self.message = message_dict.get(view.action, "您没有相应操作的权限")
return False
class CloudPermission(permissions.BasePermission):
"""
云区域权限控制
"""
message = _("您没有权限执行操作")
def has_permission(self, request, view):
# 云区域查看、编辑、删除、创建权限
if view.action == "list":
# List接口不需要鉴权
return True
# **若没有使用权限中心**
if not settings.USE_IAM:
# 任何人都有创建云区域权限
if view.action == "create":
return True
# 只有创建者和超管才有云区域详情、编辑、删除权限
try:
cloud = models.Cloud.objects.get(pk=int(view.kwargs.get("pk", -1)))
except models.Cloud.DoesNotExist:
raise CloudNotExistError(_("不存在ID为: {bk_cloud_id} 的云区域").format(bk_cloud_id=int(view.kwargs.get("pk"))))
if get_request_username() in cloud.creator or IamHandler.is_superuser(get_request_username()):
return True
else:
return False
# **使用了权限中心**
perms = IamHandler().fetch_policy(
get_request_username(),
[
IamActionType.cloud_view,
IamActionType.cloud_edit,
IamActionType.cloud_delete,
IamActionType.cloud_create,
],
)
if perms[IamActionType.cloud_create] and view.action == "create":
return True
if int(view.kwargs.get("pk", 0)) in perms[IamActionType.cloud_view] and view.action == "retrieve":
return True
if int(view.kwargs.get("pk", 0)) in perms[IamActionType.cloud_edit] and view.action == "update":
return True
if int(view.kwargs.get("pk", 0)) in perms[IamActionType.cloud_delete] and view.action == "destroy":
return True
message_dict = {
"create": _("您没有创建云区域的权限"),
"update": _("您没有编辑该云区域的权限"),
"destroy": _("您没有删除该云区域的权限"),
"retrieve": _("您没有查看该云区域详情的权限"),
}
self.message = message_dict.get(view.action, "您没有相应操作的权限")
return False
class InstallChannelPermission(permissions.BasePermission):
"""
安装通道权限控制,复用云区域的编辑权限
"""
message = _("您没有权限执行操作,请申请云区域编辑权限")
def has_permission(self, request, view):
perms = IamHandler().fetch_policy(
get_request_username(),
[
IamActionType.cloud_edit,
],
)
if view.action == "list":
# List接口不需要鉴权
return True
if request.method == "GET":
bk_cloud_id = request.query_params.get("bk_cloud_id")
else:
bk_cloud_id = request.data.get("bk_cloud_id")
if bk_cloud_id in perms[IamActionType.cloud_edit]:
return True
return False
class DebugPermission(permissions.BasePermission):
"""
Debug接口权限控制
"""
message = _("您没有查询Debug接口的权限")
def has_permission(self, request, view):
# Debug接口权限控制,超管用户才有权限
if IamHandler.is_superuser(get_request_username()):
return True
else:
return False
class PackagePermission(permissions.BasePermission):
"""
插件包权限控制
"""
message = _("您没有该操作的权限")
def has_permission(self, request, view):
if view.action in ["list", "retrieve", "operate", "fetch_package_deploy_info"]:
# 不需要鉴权的action
return True
if IamHandler().is_superuser(get_request_username()):
return True
if not settings.USE_IAM:
return True
# **使用了权限中心**
if settings.USE_IAM:
perms = IamHandler().fetch_policy(
get_request_username(), [IamActionType.plugin_pkg_operate, IamActionType.plugin_pkg_import]
)
if view.action in ["update", "history"]:
return int(view.kwargs.get("pk", 0)) in perms[IamActionType.plugin_pkg_operate]
if view.action in ["plugin_status_operation"]:
return not set(request.data.get("id", [])) - set(perms[IamActionType.plugin_pkg_operate])
if view.action in ["package_status_operation"]:
plugin_package_ids = request.data.get("id", [])
plugin_names = list(
models.Packages.objects.filter(id__in=plugin_package_ids).values_list("project", flat=True)
)
plugin_ids = list(
models.GsePluginDesc.objects.filter(name__in=plugin_names).values_list("id", flat=True)
)
return not set(plugin_ids) - set(perms[IamActionType.plugin_pkg_operate])
if view.action in ["upload", "parse", "create_register_task", "query_register_task"]:
return perms[IamActionType.plugin_pkg_import]
if view.action in ["create_export_task", "query_export_task", "fetch_config_variables", "list_plugin_host"]:
return True
return False
class PolicyPermission(permissions.BasePermission):
"""
策略权限控制
"""
message = _("您没有该操作的权限")
def has_permission(self, request, view):
if view.action in ["search"]:
return True
if IamHandler().is_superuser(get_request_username()):
return True
if not settings.USE_IAM:
return True
# **使用了权限中心**
if view.action in [
"update",
"retrieve",
"update_policy",
"run_policy",
"upgrade_preview",
"operate",
"rollback_preview",
]:
perms = IamHandler().fetch_policy(get_request_username(), [IamActionType.strategy_operate])
operate_perms = perms[IamActionType.strategy_operate]
pk = int(view.kwargs.get("pk", 0))
if view.action in ["operate", "rollback_preview"]:
pk = int(request.data.get("policy_id", 0))
try:
policy = models.Subscription.objects.get(id=pk)
except models.Subscription.DoesNotExist:
return False
return policy.pid in operate_perms if policy.pid != -1 else policy.id in operate_perms
if view.action in [
"fetch_common_variable",
"fetch_policy_topo",
"selected_preview",
"plugin_preselection",
"fetch_policy_abnormal_info",
"migrate_preview",
]:
# 不需要鉴权的action
return True
if view.action in ["host_policy"]:
bk_host_id: Optional[int] = request.query_params.get("bk_host_id")
bk_biz_scope: List[int] = list(
models.Host.objects.filter(bk_host_id=bk_host_id).values_list("bk_biz_id", flat=True)
)
return CmdbHandler().check_biz_permission(bk_biz_scope, IamActionType.strategy_view)
if view.action in ["create_policy"]:
bk_biz_scope: List[int] = list(set([node["bk_biz_id"] for node in request.data["scope"]["nodes"]]))
return CmdbHandler().check_biz_permission(bk_biz_scope, IamActionType.strategy_create)
return False
|
""" Plotting class for IRR/IFR
Plots the output of :py:func:`~agg_dyn.stats.utils.score_points`
Classes:
* :py:class:`ReliabilityPlotter`: Plot IRR and IFR barplots
Functions:
* :py:class:`plot_roc_curve`: Make ROC curve plots
* :py:class:`plot_pr_curve`: Make P/R curve plots
"""
# Imports
import json
# 3rd party
import numpy as np
import matplotlib.pyplot as plt
# Our own imports
from .consts import PLOT_STYLE, BARPLOT_ORIENT, SUFFIX, PALETTE, FIGSIZE, LINEWIDTH
from .styling import set_plot_style, colorwheel
from .cat_plot import add_barplot
from .split_axes import SplitAxes
from ..utils import calc_pairwise_significance
# Classes
class ReliabilityPlotter(object):
""" Plot reliability for different scores
:param DataFrame score_data:
The score data frame with multiple condition columns
:param Path outdir:
The output directory to write the plots to
:param str score_type:
One of "irr" or "ifr"
"""
def __init__(self, score_data, outdir, score_type,
plot_style=PLOT_STYLE,
barplot_orient=BARPLOT_ORIENT,
suffix=SUFFIX,
palette=PALETTE,
for_paper=False,
annotator_column='Annotator',
figsize=FIGSIZE):
self.score_data = score_data
self.outdir = outdir
self.score_type = score_type.lower()
self.for_paper = for_paper
if self.for_paper:
plot_style = 'light'
barplot_orient = 'vertical'
suffix = '.svg'
palette = 'Set1'
# Data frame column names
self.annotator_column = annotator_column
self.pct_column = 'PercentLabeled'
self.tile_column = 'Tile'
self.ylabel = {
'irr': '% Matches to Consensus',
'ifr': '% Matches Between Frames',
}[self.score_type]
self.ycolumn = {
'irr': 'IRR',
'ifr': 'IFR',
}[self.score_type]
# Make sure we're plotting percents and check bounds
if np.max(self.score_data[self.ycolumn]) <= 1.0:
# Convert to percent
self.score_data[self.ycolumn] *= 100
score_min = np.min(self.score_data[self.ycolumn])
score_max = np.max(self.score_data[self.ycolumn])
if score_min < 0 or score_max > 100:
err = 'Score {} should be between 0 and 100, got {} to {}'
err = err.format(self.ycolumn, score_min, score_max)
raise ValueError(err)
self.xlimits = None
self.ylimits = [(0, 5), (65, 100)]
# Styling
self.plot_style = plot_style
self.barplot_orient = barplot_orient
self.suffix = suffix
self.palette = palette
self.fig_height = 8 # in - height of the figure
self.bar_width = 1.25 # in - width of individual bars
self.bar_padding = 1 # in - width of the left padding to add to the figure size
# Annotator plots
self.annotator_label = {
'Annotator': 'Rater',
'Detector': '',
}[self.annotator_column]
# Percent plots
self.pct_label = 'Percent Labeled'
self.pct_order = ['10', '30', '100']
self.pct_xticklabels = [f'{pct}%' for pct in self.pct_order]
def calc_figsize(self, score_data, xcolumn, hue_column=None):
""" Work out how big of a figure to generate
:returns:
A tuple of (figure width, figure height) in inches
"""
categories = np.unique(score_data[xcolumn])
num_categories = len(categories)
if num_categories < 2:
err = 'Got {} unique categories for column {}: {}'
err = err.format(num_categories, xcolumn, categories)
raise ValueError(err)
if hue_column is None:
num_hue_categories = 1
else:
hue_categories = np.unique(score_data[hue_column])
num_hue_categories = len(hue_categories)
if num_hue_categories < 2:
err = 'Got {} unique categories for column {}: {}'
err = err.format(num_hue_categories, hue_column, hue_categories)
raise ValueError(err)
num_bars = num_categories * num_hue_categories
return (num_bars * self.bar_width + self.bar_padding, self.fig_height)
def swap_limits(self, limits, orient=None):
""" Swap the order of the tuple in limits when the orientation is reversed
:param tuple limits:
A tuple with elements corresponding to the order when vertical
:returns:
The elements swapped when horizontal
"""
if limits is None:
return None
if orient is None:
orient = self.barplot_orient
if orient.startswith('h'):
limits = tuple(reversed(limits))
return limits
def subset_score_data(self, subset=None, xcolumn=None, order=None):
""" Subset the data
:param dict subset:
If not None, a dictionary of column: values to subset the dataframe by
:returns:
A new dataframe with only rows where (column in values)
"""
if subset in (None, {}):
return self.score_data, order
score_data = self.score_data
print('Size before filtering: {} rows'.format(score_data.shape[0]))
for key, values in subset.items():
mask = np.zeros((score_data.shape[0], ), dtype=np.bool)
if isinstance(values, (str, int, float)):
values = [values]
# Only keep values matching the selected categories
column_data = score_data[key]
for value in values:
mask = np.logical_or(mask, column_data == value)
score_data = score_data[mask]
print('After filtering "{}": {} rows'.format(key, score_data.shape[0]))
# Mask the order as well if we're filtering the relevant category
if order is not None and xcolumn is not None and key == xcolumn:
order = [o for o in order if o in values]
print('After filtering order for "{}": {}'.format(key, order))
# Final score filter
print('Size after filtering: {} rows'.format(score_data.shape[0]))
return score_data, order
def plot_comparison(self, xcolumn, xlabel,
subset=None, prefix='', order=None,
hue=None, hue_order=None,
barplot_orient=None,
xticklabels=None,
pathname=None,
sig_category=None):
""" Plot an arbitrary comparison between different samples
:param str xcolumn:
Name of the column to use for the categorical data
:param str xlabel:
Label for the categorical data
"""
if barplot_orient is None:
barplot_orient = self.barplot_orient
if pathname is None:
pathname = xcolumn.lower()
if sig_category is None:
sig_category = xcolumn
if subset in (None, {}):
print('Scoring {} vs {}...'.format(xcolumn, self.ycolumn))
else:
print('Scoring filtered {} vs {}...'.format(xcolumn, self.ycolumn))
# Filter the data and calculate a figure size
score_data, order = self.subset_score_data(subset=subset,
xcolumn=xcolumn,
order=order)
figsize = self.calc_figsize(score_data, xcolumn=xcolumn, hue_column=hue)
# Swap all the axes when horizontal
figsize = self.swap_limits(figsize, orient=barplot_orient)
xlimits, ylimits = self.swap_limits((self.xlimits, self.ylimits),
orient=barplot_orient)
order = self.swap_limits(order, orient=barplot_orient)
xticklabels = self.swap_limits(xticklabels, orient=barplot_orient)
print('Scoring {} vs {}...'.format(xcolumn, self.ycolumn))
significance = calc_pairwise_significance(score_data,
category=sig_category,
score=self.ycolumn)
with set_plot_style(self.plot_style) as style:
outfile = '{}_vs_{}{}'.format(self.score_type, pathname, self.suffix)
if prefix not in ('', None):
outfile = prefix + '_' + outfile
outfile = self.outdir / outfile
with SplitAxes(figsize=figsize, xlimits=xlimits, ylimits=ylimits) as ax:
add_barplot(y=self.ycolumn, x=xcolumn, order=order,
data=score_data, ax=ax, significance=significance,
orient=barplot_orient, palette=self.palette,
xlabel=xlabel, xticklabels=xticklabels, ylabel=self.ylabel,
hue=hue, hue_order=hue_order,
savefile=outfile)
style.show(outfile, transparent=True)
def plot_annotator(self, subset=None, prefix='', hue=None, hue_order=None, pathname=None, order=None, sig_category=None):
""" Make an annotator plot """
self.plot_comparison(xcolumn=self.annotator_column,
xlabel=self.annotator_label,
subset=subset,
prefix=prefix,
order=order,
hue=hue,
hue_order=hue_order,
pathname=pathname,
sig_category=sig_category)
def plot_percent(self, subset=None, prefix='', hue=None, hue_order=None, pathname=None, order=None, sig_category=None):
""" Make a percent labeled plot """
if self.for_paper:
barplot_orient = 'horizontal'
else:
barplot_orient = self.barplot_orient
if order is None:
order = self.pct_order
if pathname is None:
pathname = 'pct'
self.plot_comparison(xcolumn=self.pct_column,
xlabel=self.pct_label,
subset=subset,
prefix=prefix,
hue=hue,
hue_order=hue_order,
barplot_orient=barplot_orient,
order=order,
xticklabels=self.pct_xticklabels,
pathname=pathname,
sig_category=sig_category)
def plot_tiles(self, subset=None, prefix='', hue=None, hue_order=None, pathname=None, order=None, sig_category=None):
""" Make a plot for the individual tiles """
self.plot_comparison(xcolumn=self.tile_column,
xlabel=self.tile_column,
subset=subset,
prefix=prefix,
order=order,
hue=hue,
hue_order=hue_order,
pathname=pathname,
sig_category=sig_category)
# Functions
def plot_roc_curve(roc_lines, xlines=None, linewidth=LINEWIDTH, outfile=None,
plot_style=PLOT_STYLE, figsize=FIGSIZE, palette=PALETTE,
savefile=None):
""" Plot the ROC curves for sets of train/test pairs
:param list[ROCData] roc_lines:
The list of ROC data objects to plot
:param list[tuple] xlines:
List of (yvalue, label) lines
:param float linewidth:
Width of the lines to plot
:param Path outfile:
If not None, the path to save the ROC curve to
:param str plot_style:
The style to use for the plots
:param tuple[float] figsize:
The size in inches of the figure
:param Path savefile:
If not None, the JSON file to write containing all the plot data
"""
if xlines is None:
xlines = []
savedata = []
with set_plot_style(plot_style) as style:
fig, ax = plt.subplots(1, 1, figsize=figsize)
wheel = colorwheel(palette, n_colors=6)
for roc_line, color in zip(sorted(roc_lines, key=lambda x: x.label), wheel):
roc_line.plot_roc(ax=ax, color=color, linewidth=linewidth, annotate_knee=False)
if savefile is not None:
savedata.append(roc_line.to_plotdata())
ax.plot([0, 1], [0, 1], color='navy', linewidth=linewidth, linestyle='--',
label=None)
for (xvalue, xlabel), color in zip(xlines, colorwheel()):
ax.plot([0, 1], [xvalue, xvalue], color=color, linewidth=linewidth,
linestyle='--', label=xlabel)
ax.set_xlim([-0.01, 1.01])
ax.set_ylim([-0.01, 1.01])
ax.set_xlabel('False Positive Rate')
ax.set_ylabel('True Positive Rate')
ax.legend(loc="lower right")
style.show(outfile, transparent=True)
# Cache the final output data
if savefile is not None:
savefile = savefile.parent / (savefile.stem + '.json')
savefile.parent.mkdir(parents=True, exist_ok=True)
print('Saving plot data to {}'.format(savefile))
with savefile.open('wt') as fp:
for rec in savedata:
fp.write(json.dumps(rec) + '\n')
def plot_pr_curve(pr_lines, xlines=None, linewidth=LINEWIDTH, outfile=None,
plot_style=PLOT_STYLE, figsize=FIGSIZE, palette=PALETTE,
savefile=None):
""" Plot the P/R curves for sets of train/test pairs
:param list[ROCData] pr_lines:
The list of ROC data objects to plot
:param list[tuple] xlines:
List of (yvalue, label) lines
:param float linewidth:
Width of the lines to plot
:param Path outfile:
If not None, the path to save the P/R curve to
:param str plot_style:
The style to use for the plots
:param tuple[float] figsize:
The size in inches of the figure
"""
if xlines is None:
xlines = []
savedata = []
with set_plot_style(plot_style) as style:
fig, ax = plt.subplots(1, 1, figsize=figsize)
wheel = colorwheel(palette, n_colors=6)
for pr_line, color in zip(sorted(pr_lines, key=lambda x: x.label), wheel):
pr_line.plot_precision_recall(ax=ax, color=color, linewidth=linewidth, annotate_knee=False)
if savefile is not None:
savedata.append(pr_line.to_plotdata())
for (xvalue, xlabel), color in zip(xlines, colorwheel()):
ax.plot([0, 1], [xvalue, xvalue], color=color, linewidth=linewidth,
linestyle='--', label=xlabel)
ax.set_xlim([-0.01, 1.01])
ax.set_ylim([-0.01, 1.01])
ax.set_xlabel('Recall')
ax.set_ylabel('Precision')
ax.legend(loc="lower left")
style.show(outfile, transparent=True)
# Cache the final output data
if savefile is not None:
savefile = savefile.parent / (savefile.stem + '.json')
savefile.parent.mkdir(parents=True, exist_ok=True)
print('Saving plot data to {}'.format(savefile))
with savefile.open('wt') as fp:
for rec in savedata:
fp.write(json.dumps(rec) + '\n')
|
import numpy as np
import tensorbackends
from .statevector import StateVector
def computational_zeros(nsite, *, backend='numpy'):
backend = tensorbackends.get(backend)
tensor = backend.zeros((2,)*nsite, dtype=complex)
tensor[(0,)*nsite] = 1
return StateVector(tensor, backend)
def computational_ones(nsite, *, backend='numpy'):
backend = tensorbackends.get(backend)
tensor = backend.zeros((2,)*nsite, dtype=complex)
tensor[(1,)*nsite] = 1
return StateVector(tensor, backend)
def computational_basis(nsite, bits, *, backend='numpy'):
backend = tensorbackends.get(backend)
bits = np.asarray(bits).reshape(nsite)
tensor = backend.zeros((2,)*nsite, dtype=complex)
tensor[tuple(bits)] = 1
return StateVector(tensor, backend)
def random(nsite, *, backend='numpy'):
backend = tensorbackends.get(backend)
shape = (2,)*nsite
tensor = backend.random.uniform(-1,1,shape) + 1j * backend.random.uniform(-1,1,shape)
tensor /= backend.norm(tensor)
return StateVector(tensor, backend)
|
import logging
import os
from pathlib import Path
from uvicorn.supervisors.basereload import BaseReload
logger = logging.getLogger("uvicorn.error")
class StatReload(BaseReload):
def __init__(self, config, target, sockets):
super().__init__(config, target, sockets)
self.reloader_name = "statreload"
self.mtimes = {}
def should_restart(self):
for filename in self.iter_py_files():
try:
mtime = os.path.getmtime(filename)
except OSError: # pragma: nocover
continue
old_time = self.mtimes.get(filename)
if old_time is None:
self.mtimes[filename] = mtime
continue
elif mtime > old_time:
display_path = os.path.normpath(filename)
if Path.cwd() in Path(filename).parents:
display_path = os.path.normpath(os.path.relpath(filename))
message = "StatReload detected file change in '%s'. Reloading..."
logger.warning(message, display_path)
return True
return False
def iter_py_files(self):
for reload_dir in self.config.reload_dirs:
for subdir, dirs, files in os.walk(reload_dir):
if (
self.config.reload_exclude_dirs
and Path(subdir).absolute() in self.config.reload_exclude_dirs
):
continue
for file in files:
if file.endswith(".py"):
yield subdir + os.sep + file
|
# -*- coding: utf-8 -*-
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Integer, ForeignKey, DateTime, Text, Boolean, UniqueConstraint
from datetime import datetime
Base = declarative_base()
_engine = None
_session = sessionmaker()
USERLEVELS = {
'none': 0,
'user': 10,
'admin': 20,
}
MEDIATYPES = {
'video': 0,
'audio': 1
}
MEDIASTATUS = {
'not_started': 0,
'fetching_metadata': 1,
'downloading': 2,
'finished': 3,
'error': 4,
}
class Event(Base):
__tablename__ = "event"
id = Column(Integer, primary_key=True)
name = Column(String(32))
visible = Column(Boolean)
def serialize(self):
return {
'id': self.id,
'name': self.name,
'visible': self.visible
}
class Player(Base):
__tablename__ = "player"
id = Column(Integer, primary_key=True)
token = Column(String(16), unique=True, index=True)
event = Column(ForeignKey('event.id'))
name = Column(String(32))
last = Column(ForeignKey('media.id'), default=None)
status = Column(Integer, default=0)
def serialize(self, show_token=False):
return {
'id': self.id,
'token': self.token if show_token else None,
'name': self.name,
'last': self.last,
'event_id': self.event,
'status': self.status
}
class Skip(Base):
__tablename__ = "skip"
id = Column(Integer, primary_key=True)
user = Column(ForeignKey('user.id'))
media = Column(ForeignKey('media.id'))
player = Column(ForeignKey('player.id'))
__table_args__ = (
UniqueConstraint('user', 'media', 'player', name='_user_media_player_uc'),
)
class SourceQueue(Base):
__tablename__ = "sourcequeue"
id = Column(Integer, primary_key=True)
user = Column(ForeignKey('user.id'))
target = Column(ForeignKey('player.id'))
created_at = Column(DateTime(timezone=True), default=datetime.utcnow())
# Allow only one source queue per user per target player
__table_args__ = (
UniqueConstraint('user', 'target', name='_user_target_uc'),
)
def serialize(self):
s = db_session()
items = [media.serialize() for media in s.query(Media).filter_by(queue=self.id).all()],
s.close()
return {
'id': self.id,
'user': self.user,
'target': self.target,
'items': items
}
class Source(Base):
__tablename__ = "source"
id = Column(Integer, primary_key=True)
hash = Column(String(64), default='')
file_name = Column(String(256), default='')
file_ext = Column(String(4))
mime_type = Column(String(32))
size_bytes = Column(Integer, default=0)
media_type = Column(Integer, default=0)
youtube_hash = Column(String(32), default='')
other_url = Column(String(512), default='')
length_seconds = Column(Integer, default=0)
created_at = Column(DateTime(timezone=True), default=datetime.utcnow())
title = Column(String(100), default='')
description = Column(Text, default='')
status = Column(Integer, default=0)
message = Column(String(64), default='')
video_codec = Column(String(16), default='')
video_bitrate = Column(Integer, default=0)
video_w = Column(Integer, default=0)
video_h = Column(Integer, default=0)
audio_codec = Column(String(16), default='')
audio_bitrate = Column(Integer, default=0)
def serialize(self):
return {
'id': self.id,
'youtube_hash': self.youtube_hash,
'other_url': self.other_url,
'status': self.status,
'title': self.title,
'description': self.description,
'file_ext': self.file_ext,
'mime_type': self.mime_type,
'size_bytes': self.size_bytes,
'media_type': self.media_type,
'length_seconds': self.length_seconds,
'message': self.message,
'audio': {
'codec': self.audio_codec,
'bitrate': self.audio_bitrate
},
'video': {
'codec': self.video_codec,
'bitrate': self.video_bitrate,
'width': self.video_w,
'height': self.video_h
}
}
class Media(Base):
__tablename__ = "media"
id = Column(Integer, primary_key=True)
source = Column(ForeignKey('source.id'), nullable=True, default=None)
user = Column(ForeignKey('user.id'))
queue = Column(ForeignKey('sourcequeue.id'))
def serialize(self):
s = db_session()
source_entry = s.query(Source).filter_by(id=self.source).one().serialize() if self.source else None
s.close()
return {
'id': self.id,
'source_id': self.source,
'source': source_entry,
'user_id': self.user
}
class Setting(Base):
__tablename__ = "setting"
id = Column(Integer, primary_key=True)
user = Column(ForeignKey('user.id'))
key = Column(String(32))
value = Column(String(32))
type = Column(Integer)
max = Column(Integer)
min = Column(Integer)
def serialize(self):
return {
'id': self.id,
'key': self.key,
'value': self.value,
'type': self.type,
'max': self.max,
'min': self.min
}
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
username = Column(String(32), unique=True)
password = Column(String(255))
nickname = Column(String(32))
email = Column(String(128))
level = Column(Integer, default=USERLEVELS['none'])
def serialize(self):
return {
'id': self.id,
'username': self.username,
'nickname': self.nickname,
'email': self.email,
'level': self.level
}
class Session(Base):
__tablename__ = "session"
key = Column(String(32), primary_key=True)
user = Column(ForeignKey('user.id'))
start = Column(DateTime(timezone=True), default=datetime.utcnow())
def db_init(engine_str):
_engine = create_engine(engine_str+'?charset=utf8', pool_recycle=3600)
_session.configure(bind=_engine)
# Base.metadata.create_all(_engine)
def db_session():
return _session()
|
import logging
import re
import time
from urllib.parse import parse_qs
from urllib.parse import urlparse
import pandas as pd
import requests
from bs4 import BeautifulSoup
from covidata import config
from covidata.persistencia.dao import persistir_dados_hierarquicos
def pt_PortoAlegre():
url = config.url_pt_PortoAlegre
parsed_uri = urlparse(url)
url_base = '{uri.scheme}://{uri.netloc}/'.format(uri=parsed_uri)
page = requests.get(url)
parser = BeautifulSoup(page.content, 'lxml')
proximas_paginas = __get_paginacao(parser, url_base)
nomes_colunas_documentos = []
linhas_df = []
linhas_df_documentos = []
linhas_df_itens = []
linhas_df_documentos, linhas_df_itens, nomes_colunas_documentos = __processar_pagina(linhas_df,
linhas_df_documentos,
linhas_df_itens,
nomes_colunas_documentos,
parser, url_base)
for pagina in proximas_paginas:
parser = __parse(pagina)
linhas_df_documentos, linhas_df_itens, nomes_colunas_documentos = __processar_pagina(linhas_df,
linhas_df_documentos,
linhas_df_itens,
nomes_colunas_documentos,
parser, url_base)
nomes_colunas = ['Número da licitação', 'Objeto', 'Tipo', 'Status', 'Aplicar o Decreto 10.024/2019',
'Início de propostas', 'Final de propostas', 'Limite para impugnações', 'Data de Abertura',
'Pregoeiro', 'Autoridade Competente', 'Apoio', 'Origem dos Recursos', 'Operação']
df = pd.DataFrame(linhas_df, columns=nomes_colunas)
df_documentos = pd.DataFrame(linhas_df_documentos, columns=nomes_colunas_documentos)
nomes_colunas_itens = ['Número da Licitação', 'Item', 'Descrição', 'Unidade', 'Quantidade', 'Melhor Lance (R$',
'Situação', 'Modalidade']
df_itens = pd.DataFrame(linhas_df_itens, columns=nomes_colunas_itens)
persistir_dados_hierarquicos(df, {'Documentos': df_documentos, 'Itens': df_itens}, 'portal_transparencia',
'Licitacoes', 'RS', 'Porto Alegre')
def __get_paginacao(parser, url_base):
tag_paginacao = parser.findAll("ul", {"class": "pagination"})
if len(tag_paginacao) > 0:
paginador = tag_paginacao[0]
paginas = paginador.findAll('li')
proximas_paginas = []
for pagina in paginas[1:]:
link = url_base + pagina.findAll('a')[0].attrs['href']
proximas_paginas.append(link)
return proximas_paginas
else:
return []
def __get_paginacao_itens(parser, url_base, codigo_interno_licitacao):
tag_paginacao = parser.findAll("ul", {"class": "pagination"})
proximas_paginas = []
if len(tag_paginacao) > 0:
paginador = tag_paginacao[0]
paginas = paginador.findAll('li')
numero_pagina = 2
for _ in paginas[1:]:
link = url_base + f'18/SessaoPublica/lances/htmlAba.asp?param=0&ttTipo=0&ttCD_LICITACAO=' \
f'{codigo_interno_licitacao}&ttPagina={numero_pagina}'
numero_pagina += 1
proximas_paginas.append(link)
else:
link = url_base + f'18/SessaoPublica/lances/htmlAba.asp?param=0&ttTipo=0&ttCD_LICITACAO=' \
f'{codigo_interno_licitacao}&ttPagina=1'
proximas_paginas.append(link)
return proximas_paginas
def __processar_pagina(linhas_df, linhas_df_documentos, linhas_df_itens, nomes_colunas_documentos, parser, url_base):
registros = parser.findAll('div', {'class': 'item-registro xs-col-12'})
for registro in registros:
textos = re.split('\r\n|\n\t|\t\n', registro.findAll('h2')[0].get_text().strip())
numero_licitacao = textos[0]
conteudo = registro.findAll('div')[0]
divs = conteudo.findAll('div')
div_descricao_registro = divs[0]
objeto = div_descricao_registro.findAll('p')[0].get_text().strip()
div_detalhes = divs[1]
spans = div_detalhes.findAll('span')
tipo = spans[0].get_text().strip()
tipo = tipo[tipo.find('\n') + 1:len(tipo)].strip()
status = spans[1].get_text().strip()
status = status[status.find('\n') + 1:len(status)].strip()
# página de detalhes
parser_detalhes, codigo_interno_licitacao = __processar_detalhes(numero_licitacao, linhas_df, objeto, spans,
status, tipo, url_base)
# documentos
linhas_df_documentos, nomes_colunas_documentos = __processar_documentos(numero_licitacao, linhas_df_documentos,
parser_detalhes, url_base)
# itens
linhas_df_itens = __processar_itens(numero_licitacao, linhas_df_itens, parser_detalhes, url_base,
codigo_interno_licitacao)
return linhas_df_documentos, linhas_df_itens, nomes_colunas_documentos
def __processar_detalhes(numero_licitacao, linhas_df, objeto, spans, status, tipo, url_base):
dados_adicionais = spans[2]
url_detalhes = dados_adicionais.findAll('a', {'title': 'Dados do Processo'})[0].attrs['href']
codigo_interno_licitacao = url_detalhes[url_detalhes.rfind('-') + 1:url_detalhes.rfind('/')]
url_detalhes = url_base + url_detalhes
parser_detalhes = __parse(url_detalhes)
dados_licitacao = parser_detalhes.findAll('p', {'class': 'ff1 f400 fs16 fcW'})[0]
bs = dados_licitacao.findAll('b')
aplicar_decreto = bs[1].next_sibling.string.strip()
inicio_propostas = bs[3].next_sibling.string.strip()
final_propostas = bs[4].next_sibling.string.strip()
limite_impugnacoes = bs[5].next_sibling.string.strip()
data_abertura = bs[6].next_sibling.string.strip()
pregoeiro = bs[7].next_sibling.string.strip()
autoridade_competente = bs[8].next_sibling.string.strip()
apoio = bs[9].next_sibling.string.strip()
origem_recursos = ''
operacao = ''
i = 11
if 'Origem dos Recursos:' in bs[i].get_text():
origem_recursos = bs[i].next_sibling.string.strip()
i += 1
if 'Operação:' in bs[i].get_text():
operacao = bs[i].next_sibling.string.strip()
linhas_df.append([numero_licitacao, objeto, tipo, status, aplicar_decreto, inicio_propostas, final_propostas,
limite_impugnacoes, data_abertura, pregoeiro, autoridade_competente, apoio, origem_recursos,
operacao])
return parser_detalhes, codigo_interno_licitacao
def __parse(url):
page = requests.get(url)
return BeautifulSoup(page.content, 'lxml')
def __processar_documentos(numero_licitacao, linhas_df_documentos, parser_detalhes, url_base):
div_documentos = parser_detalhes.findAll('div', {'class': 'col-xs-12 col-sm-6 bgMidGreen'})[0]
url_documentos = div_documentos.findAll('a', {'title': 'Atas e demais documentos do processo'})[0].attrs['href']
url_documentos = url_base + url_documentos
parsed_url_documentos = urlparse(url_documentos)
chave = parse_qs(parsed_url_documentos.query)['ttCD_CHAVE'][0]
parser_documentos = __parse(url_documentos)
span = parser_documentos.findAll('span', {'class': 'green'})
numero_processo_interno = span[0].findAll('b')[0].get_text()
tabela = parser_documentos.findAll('table')[0]
cabecalho = tabela.findAll('th')
nomes_colunas_documentos = ['Número da Licitação', 'Número do Processo Interno'] + \
[coluna.get_text() for coluna in cabecalho] + \
['Comentário']
linhas_documentos = tabela.findAll('tbody')[0].findAll('tr')
for linha_documento in linhas_documentos:
tds = linha_documento.findAll('td')
a = tds[0].findAll('a')
if len(a) > 0 and a[0].attrs['title'] == 'Clique para expandir':
comentario = a[0].get_text()
# Execita a chamada que a página executaria caso o clique fosse realizado
onclick = linha_documento.attrs['onclick']
codigo_fornecedor = onclick[onclick.find('(') + 1:onclick.find(',')]
url_documentos_fornecedores = url_base + f'18/Atas/DocumentoFornecedor/?ttCD_LICITACAO={chave}' \
f'&ttCD_FORNECEDOR={codigo_fornecedor}'
# parser_documentos_fornecedor = __parse(driver, url_documentos_fornecedores)
parser_documentos_fornecedor = __parse(url_documentos_fornecedores)
links = parser_documentos_fornecedor.findAll('a')
for link in links:
nome_arquivo = link.get_text().strip()
if nome_arquivo != '':
linhas_df_documentos.append(
[numero_licitacao, numero_processo_interno, nome_arquivo, '', url_base + link.attrs['href'],
comentario])
else:
linhas_df_documentos.append(
[numero_licitacao, numero_processo_interno, tds[0].get_text(), tds[1].get_text(),
tds[2].findAll('a')[0].attrs['href'], ''])
return linhas_df_documentos, nomes_colunas_documentos
def __processar_itens(numero_licitacao, linhas_df_itens, parser, url_base, codigo_interno_licitacao):
proximas_paginas = __get_paginacao_itens(parser, url_base, codigo_interno_licitacao)
for pagina in proximas_paginas:
parser = __parse(pagina)
__processar_pagina_itens(numero_licitacao, linhas_df_itens, parser)
return linhas_df_itens
def __processar_pagina_itens(numero_licitacao, linhas_df_itens, parser):
try:
div_itens = parser.findAll('div', {'class': 'list-itens-processo'})[0]
except IndexError:
print('aqui')
divs_itens = div_itens.findAll('div', {'class': 'item-processo'})
for div_item in divs_itens:
spans = div_item.findAll('span')
codigo_item = spans[0].contents[1].string
descricao = spans[1].get_text()
unidade = spans[2].contents[2].string
quantidade = spans[3].contents[2].string
melhor_lance = spans[4].contents[2].string
svgs = spans[5].findAll('svg')
situacao = ''
modalidade = ''
if len(svgs) > 0:
situacao = svgs[0].get_text()
modalidade = svgs[1].find('title').get_text()
linhas_df_itens.append(
[numero_licitacao, codigo_item, descricao, unidade, quantidade, melhor_lance, situacao, modalidade])
def main():
logger = logging.getLogger('covidata')
logger.info('Portal de transparência da capital...')
start_time = time.time()
pt_PortoAlegre()
logger.info("--- %s segundos ---" % (time.time() - start_time))
main()
|
import numpy as np
def fcann2_train(X, Y, nhidden=5, param_niter=1e5, param_delta=0.05, param_lambda=1e-3):
w1 = np.random.randn(X.shape[1], nhidden) * 0.01
b1 = np.random.randn(1, nhidden) * 0.01
w2 = np.random.randn(nhidden, max(Y) + 1) * 0.01
b2 = np.random.randn(1, max(Y) + 1) * 0.01
Y_ = np.stack([1 - Y, Y], axis=1)
for i in range(int(param_niter)):
S1 = X @ w1 + b1
H1 = relu(S1)
S2 = H1 @ w2 + b2
P = softmax(S2)
Gs2 = P - Y_
if i % 100 == 0:
print(-np.sum(np.log(P[Y])) / len(X))
gradW2 = Gs2.T @ H1
gradb2 = np.sum(Gs2, axis=0, keepdims=True)
#print(Gs2.shape, w2.T.shape)
Gh1 = Gs2 @ w2.T
Gs1 = Gh1 * (S1 > 0).astype(np.float32)
gradW1 = Gs1.T @ X
#print(gradW1.shape, w1.shape)
gradb1 = np.sum(Gs1, axis=0, keepdims=True)
#print(w1.shape, b1.shape, w2.shape, b2.shape)
w1 = w1 - gradW1.T * param_delta - param_delta * param_lambda * w1
b1 = b1 - param_delta * gradb1
w2 = w2 - gradW2.T * param_delta - param_delta * param_lambda * w2
b2 = b2 - param_delta * gradb2
#print(w1.shape, b1.shape, w2.shape, b2.shape)
return [w1, w2], [b1, b2]
def fcann2_classify(X, W, b):
S1 = X @ W[0] + b[0]
H1 = relu(S1)
S2 = H1 @ W[1] + b[1]
P = softmax(S2)
return P
def relu(Y):
return np.maximum(0, Y)
def softmax(Y):
return np.exp(Y)/np.sum(np.exp(Y), axis=1, keepdims=True) |
from unittest import TestCase
from camunda.utils.auth_basic import AuthBasic, obfuscate_password
class TestAuthBasic(TestCase):
def test_auth_basic(self):
auth_basic = AuthBasic(**{
"username": "test",
"password": "test",
})
self.assertEqual(auth_basic.token, 'Basic dGVzdDp0ZXN0')
def test_obfuscate_password(self):
default_config = {
"auth_basic": {"username": "demo", "password": "demo"},
"maxTasks": 1,
"lockDuration": 10000,
"asyncResponseTimeout": 0,
"isDebug": True,
}
obfuscate_config = {
"auth_basic": {"username": "demo", "password": "***"},
"maxTasks": 1,
"lockDuration": 10000,
"asyncResponseTimeout": 0,
"isDebug": True,
}
self.assertEqual(obfuscate_password(default_config), obfuscate_config)
|
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: disable=too-many-statements
# pylint: disable=too-many-locals
# pylint: disable=line-too-long
from azure.cli.core.commands import CliCommandType
def load_command_table(self, _):
from ..generated._client_factory import cf_virtual_machine
vm_virtual_machine = CliCommandType(
operations_tmpl='azure.mgmt.compute.operations._virtual_machines_operations#VirtualMachinesOperations.{}',
client_factory=cf_virtual_machine,
)
with self.command_group(
'vm virtual-machine', vm_virtual_machine, client_factory=cf_virtual_machine, is_experimental=True
) as g:
g.custom_command('assess-patch', 'vm_virtual_machine_assess_patch', minApi='2020-06-01')
from ..generated._client_factory import cf_virtual_machine_scale_set_vm_extension
vm_virtual_machine_scale_set_vm_extension = CliCommandType(
operations_tmpl='azure.mgmt.compute.operations._virtual_machine_scale_set_vm_extensions_operations#VirtualMachineScaleSetVmExtensionsOperations.{}',
client_factory=cf_virtual_machine_scale_set_vm_extension,
)
with self.command_group(
'vm virtual-machine-scale-set-vm-extension',
vm_virtual_machine_scale_set_vm_extension,
client_factory=cf_virtual_machine_scale_set_vm_extension,
) as g:
g.custom_command('list', 'vm_virtual_machine_scale_set_vm_extension_list')
g.custom_show_command('show', 'vm_virtual_machine_scale_set_vm_extension_show')
g.custom_command('create', 'vm_virtual_machine_scale_set_vm_extension_create', supports_no_wait=True)
g.custom_wait_command('wait', 'vm_virtual_machine_scale_set_vm_extension_show')
|
from cgitb import html
from django.shortcuts import render
from django.http import HttpResponse
from django.http import JsonResponse
from .models import Document, Create_page, Demo_page
import json
from datetime import date
import datetime
# Create your views here.
def send_html_element_from_python_function(request):
if request.method == 'GET':
Value = request.GET['value']
Value1 = "<div class='form-group' id='username1' draggable='true' ondragstart='dragStart(event)'> <label for='usr'><b>Name:</b></label> <input type='text' class='form-control' id='usr1'></div>"
return HttpResponse(Value1)
def request_element(request):
if request.method == 'GET':
pk1 = request.GET['value']
file = Document.objects.get(pk=pk1)
return HttpResponse(file.document)
def Home(request):
return render(request, "home.html")
def save_file(request, filename):
file_name = filename
if request.method == 'GET':
htmlfile = request.GET['html_page']
file = Create_page.objects.create(Name_of_page=file_name, document = htmlfile)
file.save()
return HttpResponse("File saved")
def Show_pages(request):
all_pages = Create_page.objects.all()
context = {'all_pages': all_pages}
return render(request, 'show_pages.html', context)
def Open_page(request):
if request.method == 'GET':
pk1 = request.GET['page_id']
html_page = Create_page.objects.get(pk = pk1)
html_page1 = html_page.document
return HttpResponse(html_page1)
# VvvebJs
import re
from django.utils import text
MAX_FILE_LIMIT = 1024 * 1024 * 2 # 2 Megabytes max html file size
def sanitizeFileName(file):
# sanitize, remove double dot .. and remove get parameters if any
re.sub(r'@\?.*$@', '', file).sub(r'@\.{2,}@', '', file).sub(r'@[^\/\\a-zA-Z0-9\-\._]@', '', file)
return file
# -*- coding: utf-8 -*-
from urllib.request import urlopen
from django.conf import settings
HOSTPOT_DIR = getattr(settings, "HOSTPOT_DIR", None)
def save(request):
html_page = ''
if request.POST['startTemplateUrl'] and request.POST['startTemplateUrl'] != '':
startTemplateUrl = sanitizeFileName(request.POST['startTemplateUrl'])
html = html = urlopen(startTemplateUrl).read().decode('utf-8')
elif request.POST['html']:
html = request.POST['html'][0:MAX_FILE_LIMIT]
file = os.path.join(HOSTPOT_DIR, text.slugify(request.POST['file'], allow_unicode=is_unicode))
if not os.path.exists(file):
with open(file, 'w') as html_file:
html_file.write(html)
html_file.close()
from django.template.loader import render_to_string
def demo(request, id):
if request.method == 'GET':
demo_page = Demo_page.objects.get(pk = id)
html_page = demo_page.file
return render(request, html_page) |
from setuptools import setup, find_packages
setup(
name='frasco-angular',
version='0.7.1',
url='http://github.com/frascoweb/frasco-angular',
license='MIT',
author='Maxime Bouroumeau-Fuseau',
author_email='maxime.bouroumeau@gmail.com',
description="Angular integration for Frasco",
packages=find_packages(),
package_data={
'frasco_angular': ['templates/*.html', 'static/*.js']
},
zip_safe=False,
platforms='any',
install_requires=[
'frasco>=0.3',
'frasco-assets>=0.1.2',
'htmlmin'
]
)
|
from contracting_process.resource_level.consistent.period_duration_in_days import calculate
item_test_undefined = {
"tender": {
"tenderPeriod": {"startDate": "2019-08-14T16:47:36+01:00"},
"enquiryPeriod": {"startDate": "2019-08-14T16:47:36+01:00", "endDate": None, "durationInDays": 2},
}
}
def test_undefined():
result = calculate({})
assert result["result"] is None
assert result["application_count"] is None
assert result["pass_count"] is None
assert result["meta"] == {"reason": "there are no values with check-specific properties"}
result = calculate(item_test_undefined)
assert result["result"] is None
assert result["application_count"] is None
assert result["pass_count"] is None
assert result["meta"] == {"reason": "there are no values with check-specific properties"}
item_test_passed1 = {
"awards": [
{
"contractPeriod": {
"startDate": "2019-08-14T16:47:36+01:00",
"endDate": None,
"maxExtentDate": "2019-08-15T16:47:36+01:00",
"durationInDays": 1,
}
}
],
"contracts": [
{
"period": {
"startDate": "2019-12-31T16:47:36+01:00",
"endDate": "2020-01-02T16:47:36+01:00",
"durationInDays": 2,
}
},
{
"period": {
"startDate": "2019-01-01T16:47:36+01:00",
"endDate": "2019-02-01T16:47:36+01:00",
"durationInDays": 31,
}
},
],
}
item_test_passed2 = {
"tender": {
"enquiryPeriod": {
"startDate": "2019-08-14T16:47:36+01:00",
"endDate": "2019-08-24T00:00:00+01:00",
"durationInDays": 10,
}
}
}
item_test_passed3 = {
"tender": {
"enquiryPeriod": {
"startDate": "2019-08-14T16:47:36+01:00",
"endDate": "2019-08-24T00:00:00+01:00",
"durationInDays": 9,
}
}
}
def test_passed():
result = calculate(item_test_passed1)
assert result["result"] is True
assert result["application_count"] == 3
assert result["pass_count"] == 3
assert result["meta"] == {
"periods": [
{"path": "awards[0].contractPeriod", "result": True},
{"path": "contracts[0].period", "result": True},
{"path": "contracts[1].period", "result": True},
]
}
result = calculate(item_test_passed2)
assert result["result"] is True
assert result["application_count"] == 1
assert result["pass_count"] == 1
assert result["meta"] == {"periods": [{"path": "tender.enquiryPeriod", "result": True}]}
result = calculate(item_test_passed3)
assert result["result"] is True
assert result["application_count"] == 1
assert result["pass_count"] == 1
assert result["meta"] == {"periods": [{"path": "tender.enquiryPeriod", "result": True}]}
item_test_failed = {
"awards": [
{
"contractPeriod": {
"startDate": "2019-08-14T16:47:36+01:00",
"endDate": "2019-08-14T16:47:36+01:00",
"durationInDays": 1,
}
}
],
"contracts": [
{
"period": {
"startDate": "2019-12-31T16:47:36+01:00",
"endDate": "2020-01-01T16:47:36+01:00",
"maxExtentDate": "2020-01-01T16:47:36+01:00",
"durationInDays": 2,
}
},
{
"period": {
"startDate": "2019-01-01T16:47:36+01:00",
"maxExtentDate": "2019-02-01T16:47:36+01:00",
"durationInDays": 31,
}
},
],
}
def test_failed():
result = calculate(item_test_failed)
assert result["result"] is False
assert result["application_count"] == 3
assert result["pass_count"] == 1
assert result["meta"] == {
"periods": [
{"path": "awards[0].contractPeriod", "result": False},
{"path": "contracts[0].period", "result": False},
{"path": "contracts[1].period", "result": True},
]
}
|
# asyncio_wait_timeout_without_cancel_continue.py
import asyncio
async def phase(i):
print('now in phase {}'.format(i))
try:
await asyncio.sleep(0.1 * i)
except asyncio.CancelledError:
print('phase {} canceled'.format(i))
raise
else:
print('done with phase {}'.format(i))
return 'phase {} result'.format(i)
async def main(phase_count):
print('now in main')
phases = [phase(i) for i in range(phase_count)]
print('wait 0.1s for phases to complete')
completed, pending = await asyncio.wait(phases, timeout=0.1)
print('{} completed and {} pending'.format(len(completed), len(pending)))
print('exiting main')
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(main(3))
event_loop.run_until_complete(asyncio.sleep(3))
event_loop.close()
|
"""Management of configuration from environment variables."""
import dataclasses
import re
from json import JSONDecodeError
from pathlib import Path
from typing import TYPE_CHECKING, Any, Optional, cast
from pydantic import (
AnyHttpUrl,
AnyUrl,
BaseSettings,
Field,
FilePath,
PositiveInt,
SecretStr,
conint,
root_validator,
stricturl,
)
from pydantic.env_settings import SettingsError
from pydantic.error_wrappers import ValidationError
if TYPE_CHECKING:
OpcUrl = AnyUrl
PortField = int
else:
OpcUrl = stricturl(tld_required=False, allowed_schemes={"opc.tcp"})
PortField = conint(gt=0, le=2 ** 16)
class ConfigError(ValueError):
"""Configuration error exception."""
def __init__(self, field: str | None, error: str) -> None:
"""Initializes configuration error exception.
Args:
field: Configuration field the error is about.
error: A string describing the error.
"""
msg = f"{field.upper()} environment variable: " if field else ""
msg += error
super().__init__(msg)
self.field = field
self.error = error
class CentrifugoSettings(BaseSettings):
"""Centrifugo related configuration options."""
api_key: SecretStr = Field(..., help="Centrifugo API key")
api_url: AnyHttpUrl = Field(
"http://localhost:8000/api", help="URL of Centrifugo HTTP api"
)
proxy_port: PortField = Field(
8008, help="Port for Centrifugo proxy server to listen on"
)
class Config: # noqa: D106
env_prefix = "centrifugo_"
class InfluxSettings(BaseSettings):
"""InfluxDB related configuration options."""
org: str = Field(..., help="InfluxDB organization")
bucket: str = Field(..., help="InfluxDB bucket")
write_token: str = Field(..., help="InfluxDB auth token with write permission")
base_url: AnyHttpUrl = Field("http://localhost:8086/", help="Base InfluxDB URL")
class Config: # noqa: D106
env_prefix = "influxdb_"
class OPCSettings(BaseSettings):
"""OPC-UA related configuration options."""
server_url: OpcUrl = Field(
..., help="URL of the OPC-UA server, including username / password if needed"
)
monitor_nodes: list[str] = Field(
..., help="Array of node IDs to monitor without recording (JSON format)"
)
record_nodes: list[str] = Field(
..., help="Array of node IDs to monitor and record (JSON format)"
)
record_interval: PositiveInt = Field(
60, help="Delay in seconds between polling of recorded nodes"
)
retry_delay: PositiveInt = Field(
5, help="Delay in seconds to retry OPC-UA connection"
)
cert_file: FilePath = Field(None, help="Path of the OPC-UA client certificate")
private_key_file: FilePath = Field(
None, help="Path of the OPC-UA client private key"
)
@root_validator
def check_cert_and_key_set(
cls: "OPCSettings", # noqa: U100, N805
values: dict[str, Any],
) -> dict[str, Any]:
"""Validates that both or none of certificate and private key files parameters are set."""
cert_file: Optional[str] = values.get("cert_file")
private_key_file: Optional[str] = values.get("private_key_file")
if (cert_file is None) != (private_key_file is None):
raise ValueError("Missing one of OPC_CERT_FILE/OPC_PRIVATE_KEY_FILE")
return values
class Config: # noqa: D106
env_prefix = "opc_"
@staticmethod
def schema_extra(schema: dict[str, Any]) -> None:
"""Processes the generated schema."""
for prop in ("cert_file", "private_key_file"):
schema["properties"][prop]["default"] = "unset"
@dataclasses.dataclass
class Settings:
"""Globally manage environment variables configuration options."""
centrifugo: CentrifugoSettings
influx: InfluxSettings
opc: OPCSettings
def __init__(self, env_file: Optional[Path] = None) -> None:
"""Checks the validity of each configuration option.
Args:
env_file: Path to a file defining environment variables.
Raises:
ConfigError: A configuration option is not valid.
"""
try:
for field in dataclasses.fields(self):
setattr(self, field.name, field.type(env_file))
except ValidationError as err:
config_field = None
first_error = err.errors()[0]
loc: str = first_error["loc"][0]
if loc != "__root__":
settings_model = cast(BaseSettings, err.model)
config_field = settings_model.Config.env_prefix + loc
raise ConfigError(config_field, first_error["msg"])
except SettingsError as err:
config_field = "!-UNKNOWN-!"
error_msg = str(err)
cause = err.__cause__
if isinstance(cause, JSONDecodeError):
match = re.search(r'"(\w+)"$', error_msg)
# In this context (SettingsError from JSONDecodeError),
# match must not be None
assert match is not None # noqa: S101
config_field = match[1]
error_msg = f"JSON decoding error (`{cause}`)"
raise ConfigError(config_field, error_msg)
@classmethod
def help(cls) -> list[tuple[str, str]]:
"""Generate environment variables configuration options help text.
Returns:
A list of 2-tuples. Each tuple consists of the environment variable
and its descriptive text.
"""
env_vars_tuples: list[tuple[str, str]] = []
for field in dataclasses.fields(cls):
for props in field.type.schema()["properties"].values():
env_var = list(props["env_names"])[0].upper()
help_text = props["help"]
default_value = props.get("default")
default = f" (default: {default_value})" if default_value else ""
env_vars_tuples.append((env_var, f"{help_text}{default}"))
return env_vars_tuples
|
#!/usr/bin/env python3
"""Dynamic dns to ip address firewall rule creator.
Resolve dynamic dns names into ips and automatically create/destroy
firewall rules. Script requires to run as super user in order to be able to
create the firewall rules.
Requirements:
The following packages should be installed and enabled before attempting to
run the script
Fedora based:
bind-utils, firewalld
Ubuntu based:
dnsutils , ufw
Example:
To manually run the script
$ python dns_to_ip_firewall-rules.py
Todo:
* Fedora based firewall rule creation.
* Debian firewall rule creation
* Add cron setup instructions to automate the running of the script.
Author:
Rodrigo Hissam
"""
import re
import os.path
import time
import sys
from subprocess import Popen, PIPE
from platform import linux_distribution
from datetime import datetime
# Functions
def main():
"""Main entry point for the script."""
# Variables
#
# Config your domains their ports and the protocol needed per port. If you
# want both udp and tcp protocols for the port enter 'both' for protocol
# type.
# Example: domain with its ports and protocol:
# {
# 'name': 'example.com',
# 'ports': [
# (53, 'udp'),
# (22, 'both'),
# (80, 'tcp')
# ]
# },
# Example: Allow any port and protocol for domain
# {'name': 'theverge.com'},
dynamic_domains = [
{'name': 'theverge.com'},
{'name': 'arstechnica.com'},
{
'name': 'google.com',
'ports': [
(53, 'udp'),
(22, 'both'),
(80, 'tcp')
]
},
{
'name': 'example.com',
'ports': [
(53, 'udp'),
(22, 'both'),
(80, 'tcp')
]
},
{
'name': 'mangolassi.it',
'ports': [
(53, 'both'),
(443, 'tcp')
]
}
]
# Getting linux distro
distro = linux_distribution()[0]
# Script start
for domain in dynamic_domains:
current_ip = get_current_ip(domain['name'])
if os.path.isfile(domain['name']):
old_ip = get_logged_ip(domain['name'])
if not current_ip == old_ip:
if 'ports' in domain.keys():
delete_firewall_rule(distro, old_ip, domain['ports'])
create_firewall_rule(distro, current_ip, domain['ports'])
else:
delete_firewall_rule(distro, old_ip)
create_firewall_rule(distro, current_ip)
log_script_messages(domain['name'], current_ip, old_ip)
create_hostname_ip_log(domain['name'], current_ip)
else:
if 'ports' in domain.keys():
create_firewall_rule(distro, current_ip, domain['ports'])
else:
create_firewall_rule(distro, current_ip)
create_hostname_ip_log(domain['name'], current_ip)
log_script_messages(domain['name'], current_ip)
if 'Cent' in distro or 'Fedora' in distro or 'Red' in distro:
reload_fw = "firewall-cmd --reload"
Popen(reload_fw.split(' '), stdout=PIPE, stderr=PIPE)
def get_current_ip(domain):
"""Return the ip after resolving 'host hostame' in the shell."""
response = Popen(["host", domain], stdout=PIPE)
response = response.communicate()[0].decode("utf-8")
response = re.search('^.+?(?=\\n)', response)
response = response.group(0)
ip_address = re.search('\d+.\d+.\d+.\d+$', response)
return ip_address.group(0)
def create_hostname_ip_log(domain, ip):
"""Create a file with the ip of the resolved domain."""
with open(domain, "w") as f:
f.write("{0}".format(ip))
def get_logged_ip(domain):
"""Get logged ip for requested domain."""
with open(domain, "r") as f:
logged_ip = f.read()
return logged_ip
def log_script_messages(domain, current_ip, old_ip=None):
"""Log firewall rule creation and deletion."""
date_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
if old_ip:
with open("dns-to-ip-firewall.log", "a") as f:
f.write("{} - {} adding {} - removing {} from firewall\n"
.format(date_time, domain, current_ip, old_ip))
else:
with open("dns-to-ip-firewall.log", "a") as f:
f.write("{} - New domain {}/{} added to the firewall\n".format(
date_time, domain, current_ip))
def create_firewall_rule(distro, ip, ports=None):
"""Create firewall rule based on newest ip of dynamic domain."""
if 'Ubuntu' in distro:
if ports:
for port in ports:
if port[1] == 'both':
Popen(
["ufw", "allow", "from", ip, "to", "any", "port",
str(port[0])], stdout=PIPE, stderr=PIPE)
else:
Popen(
["ufw", "allow", "from", ip, "to", "any", "port",
str(port[0]), "proto", port[1]],
stdout=PIPE, stderr=PIPE)
# ufw freaks out when adding rules too fast
time.sleep(.5)
else:
Popen(["ufw", "allow", "from", ip], stdout=PIPE, stderr=PIPE)
elif 'Cent' in distro or 'Fedora' in distro or 'Red' in distro:
if ports:
for port in ports:
if port[1] == 'both':
rule_tcp = (
"firewall-cmd --permanent --add-rich-rule='rule "
"family=ipv4 source address={}/32 port port={} "
"protocol=tcp accept'".format(ip, port[0]))
rule_udp = (
"firewall-cmd --permanent --add-rich-rule='rule "
"family=ipv4 source address={}/32 port port={} "
"protocol=udp accept'".format(ip, port[0]))
Popen(rule_tcp, shell=True, stdout=PIPE, stderr=PIPE)
time.sleep(.5)
Popen(rule_udp, shell=True, stdout=PIPE, stderr=PIPE)
else:
rule = (
"firewall-cmd --permanent --add-rich-rule='rule "
"family=ipv4 source address={}/32 port port={} "
"protocol={} accept'".format(ip, port[0], port[1]))
Popen(rule, shell=True, stdout=PIPE, stderr=PIPE)
time.sleep(.5)
else:
rule = (
"firewall-cmd --permanent --add-rich-rule='rule family=ipv4 "
"source address={}/32 accept'".format(ip))
Popen(rule, shell=True, stdout=PIPE, stderr=PIPE)
def delete_firewall_rule(distro, ip, ports=None):
"""Delete firewall rule in order to add new ip from dynamic domain."""
if 'Ubuntu' in distro:
if ports:
for port in ports:
if port[1] == 'both':
Popen(
["ufw", "delete", "allow", "from", ip, "to", "any",
"port", str(port[0])], stdout=PIPE, stderr=PIPE)
else:
Popen(
["ufw", "delete", "allow", "from", ip, "to", "any",
"port", str(port[0]), "proto", port[1]], stdout=PIPE,
stderr=PIPE)
# ufw freaks out when deleting rules too fast
time.sleep(.5)
else:
Popen(["ufw", "delete", "allow", "from", ip], stdout=PIPE,
stderr=PIPE)
elif 'Cent' in distro or 'Fed' in distro or 'Red' in distro:
if ports:
for port in ports:
if port[1] == 'both':
rule_tcp = (
"firewall-cmd --permanent --remove-rich-rule='rule "
"family=ipv4 source address={}/32 port port={} "
"protocol=tcp accept'".format(ip, port[0]))
rule_udp = (
"firewall-cmd --permanent --remove-rich-rule='rule "
"family=ipv4 source address={}/32 port port={} "
"protocol=udp accept'".format(ip, port[0]))
Popen(rule_tcp, shell=True, stdout=PIPE, stderr=PIPE)
time.sleep(.5)
Popen(rule_udp, shell=True, stdout=PIPE, stderr=PIPE)
else:
rule = (
"firewall-cmd --permanent --remove-rich-rule='rule "
"family=ipv4 source address={}/32 port port={} "
"protocol={} accept'".format(ip, port[0], port[1]))
Popen(rule, shell=True, stdout=PIPE, stderr=PIPE)
time.sleep(.5)
else:
rule = (
"firewall-cmd --permanent --remove-rich-rule='rule family=ipv4 "
"source address={}/32 accept'".format(ip))
Popen(rule, shell=True, stdout=PIPE, stderr=PIPE)
if __name__ == '__main__':
sys.exit(main())
|
from .index import IndexView
from .media import MediaView
from .tag_image import TagImageView
from .tag_images import TagImagesView
from .tags_list import TagsListView
from .similar_images import SimilarImagesView
|
"""File containing class that abstracts a collection of widgets.
It can be used to swap between collections of widgets (screens) in a py_cui
"""
# Author: Jakub Wlodek
# Created: 12-Aug-2019
# TODO: Should create an initial widget set in PyCUI class that widgets are added to by default.
import shutil
import py_cui.widgets as widgets
import py_cui.grid as grid
import py_cui.control_widgets.slider as slider
class WidgetSet:
"""Class that represents a collection of widgets.
Use PyCUI.apply_widget_set() to set a given widget set for display
Attributes
----------
grid : py_cui.grid.Grid
The main layout manager for the CUI
widgets : dict of str - py_cui.widgets.Widget
dict of widget in the grid
keybindings : list of py_cui.keybinding.KeyBinding
list of keybindings to check against in the main CUI loop
height, width : int
height of the terminal in characters, width of terminal in characters
"""
def __init__(self, num_rows, num_cols, logger, simulated_terminal=None):
"""Constructor for WidgetSet
"""
self._widgets = {}
self._keybindings = {}
self._simulated_terminal = simulated_terminal
if self._simulated_terminal is None:
term_size = shutil.get_terminal_size()
height = term_size.lines
width = term_size.columns
else:
height = self._simulated_terminal[0]
width = self._simulated_terminal[1]
self._height = height
self._width = width
self._height = self._height - 4
self._grid = grid.Grid(num_rows, num_cols, self._height, self._width, logger)
self._selected_widget = None
self._logger = logger
def set_selected_widget(self, widget_id):
"""Function that sets the selected cell for the CUI
Parameters
----------
cell_title : str
the title of the cell
"""
if widget_id in self._widgets.keys():
self._selected_widget = widget_id
def get_widgets(self):
"""Function that gets current set of widgets
Returns
-------
widgets : dict of str -> widget
dictionary mapping widget IDs to object instances
"""
return self._widgets
def add_key_command(self, key, command):
"""Function that adds a keybinding to the CUI when in overview mode
Parameters
----------
key : py_cui.keys.KEY_*
The key bound to the command
command : Function
A no-arg or lambda function to fire on keypress
"""
self._keybindings[key] = command
def add_scroll_menu(self, title, row, column, row_span = 1, column_span = 1, padx = 1, pady = 0):
"""Function that adds a new scroll menu to the CUI grid
Parameters
----------
title : str
The title of the scroll menu
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
Returns
-------
new_scroll_menu : ScrollMenu
A reference to the created scroll menu object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_scroll_menu = widgets.ScrollMenu(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
self._logger)
self._widgets[id] = new_scroll_menu
if self._selected_widget is None:
self.set_selected_widget(id)
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_scroll_menu))))
return new_scroll_menu
def add_checkbox_menu(self, title, row, column, row_span=1, column_span=1, padx=1, pady=0, checked_char='X'):
"""Function that adds a new checkbox menu to the CUI grid
Parameters
----------
title : str
The title of the checkbox
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
checked_char='X' : char
The character used to mark 'Checked' items
Returns
-------
new_checkbox_menu : CheckBoxMenu
A reference to the created checkbox object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_checkbox_menu = widgets.CheckBoxMenu(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
self._logger,
checked_char)
self._widgets[id] = new_checkbox_menu
if self._selected_widget is None:
self.set_selected_widget(id)
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_checkbox_menu))))
return new_checkbox_menu
def add_text_box(self, title, row, column, row_span = 1, column_span = 1, padx = 1, pady = 0, initial_text = '', password = False):
"""Function that adds a new text box to the CUI grid
Parameters
----------
title : str
The title of the textbox
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
initial_text='' : str
Initial text for the textbox
password=False : bool
Toggle to show '*' instead of characters.
Returns
-------
new_text_box : TextBox
A reference to the created textbox object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_text_box = widgets.TextBox(id,
title,
self._grid,
row, column,
row_span,
column_span,
padx, pady,
self._logger,
initial_text,
password)
self._widgets[id] = new_text_box
if self._selected_widget is None:
self.set_selected_widget(id)
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_text_box))))
return new_text_box
def add_text_block(self, title, row, column, row_span = 1, column_span = 1, padx = 1, pady = 0, initial_text = ''):
"""Function that adds a new text block to the CUI grid
Parameters
----------
title : str
The title of the text block
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
initial_text='' : str
Initial text for the text block
Returns
-------
new_text_block : ScrollTextBlock
A reference to the created textblock object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_text_block = widgets.ScrollTextBlock(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
self._logger,
initial_text)
self._widgets[id] = new_text_block
if self._selected_widget is None:
self.set_selected_widget(id)
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_text_block))))
return new_text_block
def add_label(self, title, row, column, row_span = 1, column_span = 1, padx = 1, pady = 0):
"""Function that adds a new label to the CUI grid
Parameters
----------
title : str
The title of the label
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
Returns
-------
new_label : Label
A reference to the created label object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_label = widgets.Label(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
self._logger)
self._widgets[id] = new_label
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_label))))
return new_label
def add_block_label(self, title, row, column, row_span = 1, column_span = 1, padx = 1, pady = 0, center=True):
"""Function that adds a new block label to the CUI grid
Parameters
----------
title : str
The title of the block label
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
center : bool
flag to tell label to be centered or left-aligned.
Returns
-------
new_label : BlockLabel
A reference to the created block label object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_label = widgets.BlockLabel(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
center,
self._logger)
self._widgets[id] = new_label
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_label))))
return new_label
def add_button(self, title, row, column, row_span = 1, column_span = 1, padx = 1, pady = 0, command=None):
"""Function that adds a new button to the CUI grid
Parameters
----------
title : str
The title of the button
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
command=None : Function
A no-argument or lambda function to fire on button press.
Returns
-------
new_button : Button
A reference to the created button object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_button = widgets.Button(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
self._logger,
command)
self._widgets[id] = new_button
if self._selected_widget is None:
self.set_selected_widget(id)
self._logger.info('Adding widget {} w/ ID {} of type {}'.format(title, id, str(type(new_button))))
return new_button
def add_slider(self, title, row, column, row_span=1,
column_span=1, padx=1, pady=0,
min_val=0, max_val=100, step=1, init_val=0):
"""Function that adds a new label to the CUI grid
Parameters
----------
title : str
The title of the label
row : int
The row value, from the top down
column : int
The column value from the top down
row_span=1 : int
The number of rows to span accross
column_span=1 : int
the number of columns to span accross
padx=1 : int
number of padding characters in the x direction
pady=0 : int
number of padding characters in the y direction
min_val = 0 int
min value of the slider
max_val = 0 int
max value of the slider
step = 0 int
step to incremento or decrement
init_val = 0 int
initial value of the slider
Returns
-------
new_slider : Slider
A reference to the created slider object.
"""
id = 'Widget{}'.format(len(self._widgets.keys()))
new_slider = slider.SliderWidget(id,
title,
self._grid,
row,
column,
row_span,
column_span,
padx,
pady,
self._logger,
min_val,
max_val,
step,
init_val)
self._widgets[id] = new_slider
self._logger.info('Adding widget {} w/ ID {} of type {}'
.format(title, id, str(type(new_slider))))
return new_slider
|
import pytest
from gamestonk_terminal.cryptocurrency.onchain import blockchain_view
@pytest.mark.vcr
@pytest.mark.record_stdout
def test_display_btc_circulating_supply(mocker):
# MOCK CHARTS
mocker.patch.object(target=blockchain_view.gtff, attribute="USE_ION", new=True)
mocker.patch(target="gamestonk_terminal.stocks.options.yfinance_view.plt.ion")
mocker.patch(target="gamestonk_terminal.stocks.options.yfinance_view.plt.show")
blockchain_view.display_btc_circulating_supply(1_601_596_800, 1_641_573_787, "")
@pytest.mark.vcr
@pytest.mark.record_stdout
def test_display_btc_confirmed_transactions(mocker):
# MOCK CHARTS
mocker.patch.object(target=blockchain_view.gtff, attribute="USE_ION", new=True)
mocker.patch(target="gamestonk_terminal.stocks.options.yfinance_view.plt.ion")
mocker.patch(target="gamestonk_terminal.stocks.options.yfinance_view.plt.show")
blockchain_view.display_btc_confirmed_transactions(1_601_596_800, 1_641_573_787, "")
|
# http://localhost:5555/?unvoweled=ARABIC_WORD
import mishkal.tashkeel
from flask import Flask, request
app = Flask(__name__)
voweler = mishkal.tashkeel.TashkeelClass()
def get_vowels(unvoweled):
voweled = voweler.tashkeel(unvoweled)
return voweled
@app.route('/tashkeel', methods=["GET"])
def set_vowels():
unvoweled = request.args.get('unvoweled')
voweled = get_vowels(unvoweled)
return voweled
@app.route('/health', methods=["GET"])
def health_check():
unvoweled = "لحمد لله"
voweled = get_vowels(unvoweled)
return voweled
if __name__ == '__main__':
app.run()
|
'''
Write a program which can compute the factorial of a given numbers. Use recursion to find it.
Hint: Suppose the following input is supplied to the program:8
Then, the output should be:40320
'''
def fact (x):
if (x == 0):
return (1)
else :
return (x * fact(x-1))
n = input('Enter no for which factorial need to be find : ')
y = fact(int(n))
print ('Factorial of number ' + str(n) + ' is : ' + str(y)) |
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2005-2010 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
def copyright():
return "mcni.neutron_coordinates_transformers module: Copyright (c) 2007 Jiao Lin";
from ._transformers import default
# version
__id__ = "$Id$"
# End of file
|
# -*- encoding: utf-8 -*-
"""
Copyright (c) 2019 - present GlastHeim.pe
"""
from django.urls import path, re_path
from apps.includes.sidebar.views import SidebarList
urlpatterns = [
path('sidebar', SidebarList.as_view(), name='sidebar.index'),
] |
from pathlib import Path
def get_files(root, extensions, recursive=True):
if isinstance(extensions, str):
extensions = [extensions]
root = Path(root)
all_files = root.rglob('*') if recursive else root.glob('*')
return [
x for x in all_files if any(x.name.endswith(y) for y in extensions)
]
IMAGE_EXTENSIONS = ('.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif',
'.tiff', '.webp', '.gif')
VIDEO_EXTENSIONS = ('.mp4', )
|
import numpy as np
import time
import imutils
import cv2
avg = None
video = cv2.VideoCapture("1.mp4")
xvalues = list()
yvalues = list()
motion = list()
count1 = 0
count2 = 0
def find_majority(k):
myMap = {}
maximum = ('', 0) # (occurring element, occurrences)
for n in k:
if n in myMap: myMap[n] += 1
else: myMap[n] = 1
# Keep track of maximum on the go
if myMap[n] > maximum[1]: maximum = (n, myMap[n])
return maximum
face_cascade = cv2.CascadeClassifier('C:/Program Files/Python37/Lib/site-packages/cv2/data/haarcascade_frontalface_default.xml')
width = 500
while 1:
ret, frame = video.read()
flag = True
text = ""
frame = imutils.resize(frame, width=width)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (5, 5), 0)
if avg is None:
avg = gray.copy().astype("float")
continue
cv2.accumulateWeighted(gray, avg, 0.000001)
frameDelta = cv2.absdiff(gray, cv2.convertScaleAbs(avg))
thresh = cv2.threshold(frameDelta, 5, 255, cv2.THRESH_BINARY)[1]
thresh = cv2.dilate(thresh, None, iterations=2)
cnts = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[0]
for c in cnts:
print(c)
if cv2.contourArea(c) < 12000:
continue
(x, y, w, h) = cv2.boundingRect(c)
xvalues.append(x)
yvalues.append(y)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)
rectagleCenterPont = ((x + x + w) // 2, (y + y + h) // 2)
cv2.circle(frame, rectagleCenterPont, 1, (0, 0, 255), 5)
flag = False
faces = face_cascade.detectMultiScale(frame, scaleFactor=1.1, inNeighbors=5, minSize=(30, 30))
no_x = len(xvalues)
no_y = len(yvalues)
if (no_y > 2):
difference = yvalues[no_y - 2] - yvalues[no_y - 1]
if(difference > 0):
motion.append(1)
else:
motion.append(0)
if flag is True:
if (no_y > 3):
val, times = find_majority(motion)
if val == 1 and times >= 3:
count1 += 1
else:
count2 += 1
yvalues = list()
motion = list()
cv2.line(frame, (0, 110), (width * 3 // 4, 110), (0, 255, 0), 2)
cv2.line(frame, (0, 160), (width * 3 // 4, 160), (0, 255, 0), 2)
cv2.putText(frame, "In: {}".format(count1), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, "Out: {}".format(count2), (10, 40), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.imshow("Frame", frame)
cv2.imshow("Gray", gray)
cv2.imshow("FrameDelta", frameDelta)
key = cv2.waitKey(1) & 0xFF
# time.sleep(0.03)
if key == ord('q'):
break
video.release()
cv2.destroyAllWindows()
|
#
# Copyright (c) 2020 Lucas Lehnert <lucas_lehnert@brown.edu>
#
# This source code is licensed under an MIT license found in the LICENSE file in the root directory of this project.
#
import numpy as np
from itertools import product
import rlutils as rl
import os
import os.path as osp
import multiprocessing as mp
from .utils import cluster_idx_to_phi_mat
from .enumerate_partitions import enumerate_n_partitions
from .evaluate import eval_total_reward
def sample_cycle_mdp(num_states, num_actions, action_seq_sampler=None,
self_loop_prob=0.):
if action_seq_sampler is None:
action_seq = np.random.choice(np.arange(num_actions), size=num_states)
else:
action_seq = action_seq_sampler()
t_mat = np.stack([np.eye(num_states) for a in range(num_actions)])
for s, a in enumerate(action_seq):
s_next = (s + 1) % num_states
t_mat[a, s, s] = self_loop_prob
t_mat[a, s, s_next] = 1. - self_loop_prob
r_mat = np.zeros([num_actions, num_states, num_states])
r_mat[action_seq[-1], num_states - 1, 0] = 1.
return t_mat, r_mat
def sample_inflated_cycle_mdp(state_partition, num_actions):
phi = cluster_idx_to_phi_mat(state_partition)
num_s, num_s_lat = np.shape(phi)
m_mat, w_mat = sample_cycle_mdp(num_s_lat, num_actions)
num_a = np.shape(m_mat)[0]
phi_mat_sum = np.sum(phi, axis=0, keepdims=True)
phi_inv = np.transpose(phi / phi_mat_sum)
phi_t = np.transpose(phi)
t_mat = [np.matmul(phi, np.matmul(m_mat[a], phi_inv)) for a in range(num_a)]
r_mat = [np.matmul(phi, np.matmul(w_mat[a], phi_t)) for a in range(num_a)]
t_mat = np.stack(t_mat)
r_mat = np.stack(r_mat)
mdp = rl.environment.TabularMDP(
t_mat=t_mat,
r_mat=r_mat,
idx_start_list=np.arange(num_s),
idx_goal_list=[]
)
return mdp
def sample_cycle_mdp_sequence(partition_list, partition_idx_seq, num_actions=3,
gamma=0.9, sample_factor=20):
task_dict = {}
for (i, p_task), (j, p_test) in product(
enumerate(partition_list[partition_idx_seq]),
enumerate(partition_list)):
if i not in task_dict.keys() and i != j:
num_task_samples = len(partition_idx_seq) * sample_factor
task_list = [sample_inflated_cycle_mdp(p_task, num_actions) for _ in
range(num_task_samples)]
total_rew_list = [
eval_total_reward(t, p_test, repeats=5, rollout_depth=10,
gamma=gamma) for t in task_list]
total_rew_list = np.sum(total_rew_list, axis=-1)
task_dict[i] = task_list[np.argmin(total_rew_list)]
task_list = [task_dict[i] for i in range(len(partition_idx_seq))]
return task_list
def sample_cycle_mdp_sequence_dataset(num_states=9,
num_latent_states=3,
num_actions=3,
gamma=0.9,
sequence_length=20,
num_sequences=100):
partition_list = enumerate_n_partitions(num_states, num_latent_states)
partition_list = partition_list[
np.random.choice(np.shape(partition_list)[0], size=2)]
partition_idx = [
np.random.choice([0, 1], p=[0.75, 0.25], size=sequence_length) for _ in
range(num_sequences)]
param_list = [(partition_list, p, num_actions, gamma) for p in
partition_idx]
with mp.Pool() as p:
task_seq_dataset = p.starmap(sample_cycle_mdp_sequence, param_list)
return task_seq_dataset, partition_idx, partition_list
def _task_seq_to_mat(task_seq):
t_mat_seq = []
r_mat_seq = []
for task in task_seq:
t_mat, r_mat = task.get_t_mat_r_mat()
t_mat_seq.append(t_mat)
r_mat_seq.append(r_mat)
return np.stack(t_mat_seq), np.stack(r_mat_seq)
def generate_cycle_mdp_dataset(base_dir='./data/CycleMDPDataset/'):
os.makedirs(base_dir, exist_ok=True)
task_seq_dataset, partition_idx_seq_dataset, partition_list = sample_cycle_mdp_sequence_dataset()
np.save(osp.join(base_dir, 'partition_list.npy'), partition_list)
t_mat_seq_list = []
r_mat_seq_list = []
for task_seq in task_seq_dataset:
t_mat_seq, r_mat_seq = _task_seq_to_mat(task_seq)
t_mat_seq_list.append(t_mat_seq)
r_mat_seq_list.append(r_mat_seq)
np.save(osp.join(base_dir, 't_mat_seq_list.npy'), t_mat_seq_list)
np.save(osp.join(base_dir, 'r_mat_seq_list.npy'), r_mat_seq_list)
np.save(osp.join(base_dir, 'partition_idx_seq_dataset.npy'),
partition_idx_seq_dataset)
def load_cycle_mdp_dataset(base_dir='./data/CycleMDPDataset/'):
t_mat_seq_list = np.load(osp.join(base_dir, 't_mat_seq_list.npy'))
r_mat_seq_list = np.load(osp.join(base_dir, 'r_mat_seq_list.npy'))
partition_idx_seq_list = np.load(
osp.join(base_dir, 'partition_idx_seq_dataset.npy'))
partition_list = np.load(osp.join(base_dir, 'partition_list.npy'))
mdp_dataset = []
start_list = np.arange(np.shape(t_mat_seq_list)[-1])
for t_mat_seq, r_mat_seq in zip(t_mat_seq_list, r_mat_seq_list):
mdp_dataset.append(
[rl.environment.TabularMDP(t, r, start_list, []) for t, r in
zip(t_mat_seq, r_mat_seq)])
return mdp_dataset, partition_idx_seq_list, partition_list
if __name__ == "__main__":
generate_cycle_mdp_dataset()
print('done.')
|
from brownie import accounts, web3, Wei, chain
from brownie.network.transaction import TransactionReceipt
from brownie.convert import to_address
import pytest
from brownie import Contract
from settings import *
######################################
# Deploy Contracts
######################################
@pytest.fixture(autouse=True)
def isolation(fn_isolation):
pass
@pytest.fixture(scope='module', autouse=True)
def btts_lib(BTTSLib):
btts_lib = BTTSLib.deploy({'from': accounts[0]})
return btts_lib
@pytest.fixture(scope='module', autouse=True)
def frame_token_template(DreamFramesToken, btts_lib):
frame_token_template = DreamFramesToken.deploy({'from': accounts[0]})
return frame_token_template
@pytest.fixture(scope='module', autouse=True)
def royalty_token_template(RoyaltyToken, btts_lib):
royalty_token_template = RoyaltyToken.deploy({'from': accounts[0]})
return royalty_token_template
@pytest.fixture(scope='module', autouse=True)
def white_list_template(WhiteList):
white_list_template = WhiteList.deploy({"from": accounts[0]})
return white_list_template
@pytest.fixture(scope='module', autouse=True)
def token_factory(TokenFactory, frame_token_template, royalty_token_template, white_list_template):
token_factory = TokenFactory.deploy(frame_token_template,royalty_token_template,white_list_template,0, {"from": accounts[0]})
return token_factory
@pytest.fixture(scope='module', autouse=True)
def frame_token(token_factory, DreamFramesToken):
name = 'Dream Frame Token'
symbol = 'DFT'
mintable = True
transferable = True
initial_supply = '1000 ether'
tx = token_factory.deployFrameToken(accounts[0],symbol, name,
initial_supply,mintable,transferable,{'from': accounts[0]})
frame_token = DreamFramesToken.at(tx.return_value)
return frame_token
@pytest.fixture(scope='module', autouse=True)
def royalty_token(RoyaltyToken,token_factory):
name = 'Royalty Token'
symbol = 'RFT'
mintable = True
transferable = True
initial_supply = '500 ether'
owner = accounts[0]
tx = token_factory.deployRoyaltyToken(owner,symbol, name,
initial_supply,mintable,transferable, {'from': accounts[0]})
royalty_token = RoyaltyToken.at(tx.return_value)
return royalty_token
@pytest.fixture(scope='module', autouse=True)
def price_simulator(MakerDAOETHUSDPriceFeedSimulator):
price_simulator = MakerDAOETHUSDPriceFeedSimulator.deploy('200 ether', True, {"from": accounts[0]})
return price_simulator
@pytest.fixture(scope='module', autouse=True)
def price_feed(MakerDAOPriceFeedAdaptor, price_simulator):
price_feed = MakerDAOPriceFeedAdaptor.deploy(price_simulator, {"from": accounts[0]})
return price_feed
# @pytest.fixture(scope='module', autouse=True)
# def price_feed(CompoundPriceFeedAdaptor):
# price_feed = CompoundPriceFeedAdaptor.deploy(COMPOUND_ORACLE, {"from": accounts[0]})
# return price_feed
@pytest.fixture(scope='module', autouse=True)
def white_list(token_factory, WhiteList):
tx = token_factory.deployWhiteList(accounts[0], [accounts[0]], {'from': accounts[0]})
white_list = WhiteList.at(tx.return_value)
return white_list
@pytest.fixture(scope='module', autouse=True)
def bonus_list(WhiteList):
bonus_list = WhiteList.deploy({"from": accounts[0]})
bonus_list.initWhiteList(accounts[0], {"from": accounts[0]})
return bonus_list
@pytest.fixture(scope='module', autouse=True)
def frames_crowdsale(DreamFramesCrowdsale, frame_token, price_feed, bonus_list):
wallet = accounts[9]
startDate = chain.time() +10
endDate = startDate + 50000
frames_crowdsale = DreamFramesCrowdsale.deploy({"from": accounts[0]})
frames_crowdsale.init(frame_token, price_feed
, wallet, startDate, endDate
, PRODUCER_PCT, FRAME_USD, BONUS,BONUS_ON_LIST, HARDCAP_USD, SOFTCAP_USD
, {"from": accounts[0]})
tx = frames_crowdsale.addOperator(accounts[1], {"from": accounts[0]})
assert 'OperatorAdded' in tx.events
tx = frame_token.setMinter(frames_crowdsale, {"from": accounts[0]})
tx = frames_crowdsale.setBonusList(bonus_list, {"from": accounts[0]})
chain.sleep(15)
return frames_crowdsale
@pytest.fixture(scope='module', autouse=True)
def token_converter(RoyaltyTokenConverter, frame_token, royalty_token):
token_converter = RoyaltyTokenConverter.deploy({"from": accounts[0]})
token_converter.initTokenConverter(frame_token,royalty_token, True, {"from": accounts[0]})
tx = royalty_token.setMinter(token_converter, {"from": accounts[0]})
return token_converter
@pytest.fixture(scope='module', autouse = True)
def dream_frames_nft(DreamChannelNFT):
dream_frames_nft = DreamChannelNFT.deploy({"from": accounts[0]})
return dream_frames_nft
################################
#Staking
################################
@pytest.fixture(scope='module',autouse= True)
def access_control(GazeAccessControls):
access_control = GazeAccessControls.deploy({"from":accounts[0]})
return access_control
@pytest.fixture(scope='module', autouse=True)
def btts_lib_gaze(BTTSLibGaze):
btts_lib_gaze = BTTSLibGaze.deploy({'from': accounts[0]})
return btts_lib_gaze
@pytest.fixture(scope='module', autouse=True)
def gaze_coin(BTTSToken, btts_lib_gaze):
name = "GazeCoin Metaverse Token"
symbol = "GZE"
owner = accounts[1]
initialSupply = 29000000 * 10 ** 18
gaze_coin = BTTSToken.deploy(owner,symbol, name, 18, initialSupply, False, True, {"from":owner})
return gaze_coin
@pytest.fixture(scope='module', autouse=True)
def weth_token(WETH9):
weth_token = WETH9.deploy({'from': accounts[0]})
return weth_token
@pytest.fixture(scope='module', autouse=True)
def lp_factory(UniswapV2Factory):
lp_factory = UniswapV2Factory.deploy(accounts[0], {'from': accounts[0]})
return lp_factory
@pytest.fixture(scope='module', autouse=True)
def lp_token(UniswapV2Pair, weth_token, gaze_coin,lp_factory):
lp_token_deployer = accounts[5]
# lp_token = UniswapV2Pair.deploy({"from":lp_token_deployer})
lp_token = lp_factory.createPair(weth_token, gaze_coin, {"from": lp_token_deployer}).return_value
lp_token = UniswapV2Pair.at(lp_token)
weth_token.deposit({'from': accounts[0], 'value': 1 * 10**18})
gaze_coin.transfer(lp_token, 100000 * 10**18, {'from':accounts[1]})
weth_token.transfer( lp_token,1 * 10**18,{'from':accounts[0]})
lp_token.mint(accounts[0],{'from':accounts[0]})
return lp_token
""" @pytest.fixture(scope='module', autouse=True)
def lp_token_from_fork(gaze_coin,weth_token,interface):
uniswap_factory = interface.IUniswapV2Factory(UNISWAP_FACTORY)
tx = uniswap_factory.createPair(gaze_coin, weth_token, {'from': accounts[0]})
assert 'PairCreated' in tx.events
lp_token_from_fork = interface.IUniswapV2Pair(web3.toChecksumAddress(tx.events['PairCreated']['pair']))
return lp_token_from_fork """
@pytest.fixture(scope='module', autouse=True)
def lp_staking(GazeLPStaking,gaze_coin,lp_token,weth_token,access_control):
lp_staking = GazeLPStaking.deploy({'from':accounts[0]})
lp_staking.initLPStaking(gaze_coin,lp_token,weth_token,access_control,{"from":accounts[0]})
lp_staking.setTokensClaimable(True)
return lp_staking
##############################################
# Rewards
##############################################
@pytest.fixture(scope = 'module', autouse = True)
def rewards_contract(GazeRewards,access_control,gaze_coin, lp_staking):
vault = accounts[1]
rewards_contract = GazeRewards.deploy(gaze_coin,
access_control,
lp_staking,
chain.time() +10,
0,
0,
{'from': accounts[0]})
return rewards_contract
@pytest.fixture(scope='module', autouse=True)
def staking_rewards(GazeLPStaking, GazeRewards, gaze_coin, lp_token, weth_token, access_control):
staking_rewards = GazeLPStaking.deploy({'from':accounts[0]})
staking_rewards.initLPStaking(gaze_coin,lp_token,weth_token,access_control,{"from":accounts[0]})
vault = accounts[1]
rewards_contract = GazeRewards.deploy(gaze_coin,
access_control,
staking_rewards,
chain.time() +10,
0,
0,
{'from': accounts[0]})
assert gaze_coin.balanceOf(vault) > 0
gaze_coin.approve(rewards_contract, ONE_MILLION, {"from":vault} )
rewards_contract.setVault(vault, {"from":accounts[0]})
staking_rewards.setRewardsContract(rewards_contract, {"from":accounts[0]})
staking_rewards.setTokensClaimable(True, {"from":accounts[0]})
weeks = [0,1,2,3,4,5]
rewards = [70000*TENPOW18,60000*TENPOW18,50000*TENPOW18,50000*TENPOW18,45000*TENPOW18,42000*TENPOW18]
rewards_contract.setRewards(weeks,rewards)
chain.sleep(20)
chain.mine()
return staking_rewards |
"""
Functions that interface with the TF Serving service go here
"""
import grpc
import os
import simplejson as json
import numpy as np
import tensorflow as tf
from tensorflow_serving.apis import predict_pb2, get_model_metadata_pb2, prediction_service_pb2_grpc
from tensorflow_serving.apis.model_pb2 import ModelSpec
from tensorflow import make_tensor_proto
from google.protobuf.json_format import MessageToJson
MESSAGE_OPTIONS = [('grpc.max_message_length', 200 * 1024 * 1024),
('grpc.max_receive_message_length', 200 * 1024 * 1024)]
TF_LITE_MODEL_FILE = "models/vgg-simclr.tflite"
interpreter_quant = tf.lite.Interpreter(model_path=str(TF_LITE_MODEL_FILE))
interpreter_quant.allocate_tensors()
input_index = interpreter_quant.get_input_details()[0]["index"]
output_index = interpreter_quant.get_output_details()[0]["index"]
def get_model_metadata(
model_uri: str,
model_name: str,
signature_name: str = "serving_default",
timeout: float = 2.0
):
"""
Returns Metadata of a model loaded into TensorFlow Serving
:param model_uri:
:param model_name:
:param signature_name:
:param timeout:
:return:
"""
channel = grpc.insecure_channel(model_uri)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
model_spec = ModelSpec(name=model_name, signature_name=signature_name)
request = get_model_metadata_pb2.GetModelMetadataRequest(model_spec=model_spec)
request.metadata_field.append("signature_def")
result = stub.GetModelMetadata(request, timeout)
metadata = json.loads(MessageToJson(result))
return metadata
def get_input_name(
model_uri: str,
model_name: str,
timeout: float = 2.0
):
"""
Returns the name of a model in Tensorflow Serving
:param model_uri:
:param model_name:
:param timeout:
:return:
"""
model_metadata = get_model_metadata(model_uri, model_name, timeout=timeout)
signature_def = model_metadata['metadata']['signature_def']['signatureDef']
inputs = signature_def['serving_default']['inputs']
return list(inputs.keys())[0]
def get_input_shape(
model_uri: str,
model_name: str,
input_name: str = None,
signature_name: str = "serving_default",
timeout: float = 2.0
):
"""
Returns the input shape of a model in Tensorflow Serving
:param model_uri:
:param model_name:
:param input_name:
:param signature_name:
:param timeout:
:return:
"""
model_metadata = get_model_metadata(model_uri, model_name, signature_name, timeout)
if input_name is None:
input_name = get_input_name(model_uri, model_name, timeout)
signature_def = model_metadata['metadata']['signature_def']['signatureDef']
inputs = signature_def['serving_default']['inputs']
input_dims = inputs[input_name]['tensorShape']['dim'][1:]
return tuple([int(d['size']) for d in input_dims])
def get_output_name(
model_uri: str,
model_name: str,
timeout: float = 2.0
) -> str:
"""
Returns the name of the output of a model stored in the Tensorflow Serving service
:param model_uri:
:param model_name:
:param timeout:
:return:
"""
model_metadata = get_model_metadata(model_uri, model_name, timeout=timeout)
signature_def = model_metadata['metadata']['signature_def']['signatureDef']
outputs = signature_def['serving_default']['outputs']
return list(outputs.keys())[0]
def get_output_shape(
model_uri: str,
model_name: str,
output_name: str = None,
signature_name: str = "serving_default",
timeout: float = 2.0
):
"""
Returns the output shape of a model in the Tensorflow Serving container
:param model_uri:
:param model_name:
:param output_name:
:param signature_name:
:param timeout:
:return:
"""
model_metadata = get_model_metadata(model_uri, model_name, signature_name, timeout)
if output_name is None:
output_name = get_input_name(model_uri, model_name, timeout)
signature_def = model_metadata['metadata']['signature_def']['signatureDef']
inputs = signature_def['serving_default']['outputs']
input_dims = inputs[output_name]['tensorShape']['dim'][1:]
return tuple([int(d['size']) for d in input_dims])
def get_serving_prediction_scores(
inputs: np.ndarray,
model_uri: str,
model_name: str,
signature_name: str = "serving_default",
input_name: str = None,
output_name: str = None,
timeout: float = 5.0
):
"""
Returns the raw scores outputted by the specified model in TF Serving.
:param inputs:
:param model_uri:
:param model_name:
:param signature_name:
:param input_name:
:param output_name:
:param timeout:
:return:
"""
channel = grpc.insecure_channel(model_uri, options=MESSAGE_OPTIONS)
stub = prediction_service_pb2_grpc.PredictionServiceStub(channel)
model_spec = ModelSpec(name=model_name, signature_name=signature_name)
request = predict_pb2.PredictRequest(model_spec=model_spec)
if input_name is None:
input_name = get_input_name(model_uri, model_name, timeout)
if output_name is None:
output_name = get_output_name(model_uri, model_name, timeout)
request.inputs[input_name].CopyFrom(make_tensor_proto(inputs.astype(np.float32), shape=inputs.shape))
result = stub.Predict(request, timeout)
channel.close()
output_shape = get_output_shape(model_uri, model_name, output_name, timeout=timeout)
scores = np.zeros(shape=output_shape)
for i, score in enumerate(result.outputs[output_name].float_val):
scores[i] = score
return scores
|
import torch
from transformers import AdamW, get_linear_schedule_with_warmup
class BaseModel(torch.nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.from_pretrained()
def save_pretrained(self, save_dir):
self.model.save_pretrained(save_dir)
for key in ["special_tokens_map_file", "tokenizer_file"]:
self.tokenizer.init_kwargs.pop(key, None)
self.tokenizer.save_pretrained(save_dir)
def from_pretrained(self):
raise NotImplementedError
def forward(self, inputs):
return self.model(**inputs)
def eval_step(self, outputs):
raise NotImplementedError
@staticmethod
def add_args(parser):
parser.add_argument(
"--model_name_or_path",
default=None,
type=str,
required=True,
)
parser.add_argument("--data_dir", default="cache", type=str)
parser.add_argument("--train_file", default=None, type=str)
parser.add_argument("--predict_file", default=None, type=str)
parser.add_argument("--do_lower_case", default=False, type=bool)
parser.add_argument("--max_seq_length", default=512, type=int)
parser.add_argument("--weight_decay", default=0.0, type=float, help="Weight decay if we apply some.")
parser.add_argument("--adam_epsilon", default=1e-8, type=float, help="Epsilon for Adam optimizer.")
parser.add_argument("--max_grad_norm", default=1.0, type=float)
parser.add_argument("--num_train_epochs", default=10, type=int)
parser.add_argument("--train_batch_size", default=8, type=int)
parser.add_argument("--eval_batch_size", default=16, type=int)
parser.add_argument("--learning_rate", default=3e-5, type=float)
parser.add_argument("--gradient_accumulation_steps", default=1, type=int)
parser.add_argument("--warmup_proportion", default=0.0, type=float)
return parser
def get_optimizer(self):
"""Prepare optimizer"""
no_decay = ["bias", "LayerNorm.weight"]
optimizer_grouped_parameters = [
{
"params": [p for n, p in self.model.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": self.config.weight_decay,
},
{
"params": [p for n, p in self.model.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": self.config.weight_decay,
},
]
optimizer = AdamW(optimizer_grouped_parameters, lr=self.config.learning_rate, eps=self.config.adam_epsilon)
return optimizer
def get_scheduler(self, batch_num, optimizer):
"""Prepare scheduler"""
if self.config.warmup_proportion == 0.0:
return None
t_total = batch_num // self.config.gradient_accumulation_steps * self.config.num_train_epochs
scheduler = get_linear_schedule_with_warmup(
optimizer,
num_warmup_steps=int(t_total * self.config.warmup_proportion),
num_training_steps=t_total,
)
return scheduler
def tensor_to_array(self, tensor):
return tensor.detach().cpu().numpy()
def tensor_to_list(self, tensor):
return self.tensor_to_array(tensor).tolist()
|
import unittest
import io
from remcall.schema import Schema, Enum, Record, Interface, Method, string, float32, uint32, uint16, void, date, datetime, Array
from remcall import SchemaReader, read_schema, SchemaWriter, schema_to_bytes
Status = Enum('Status', ['Registered', 'Activated', 'Locked'])
User = Interface('User', [])
Address = Record('Address', [(string, 'Street'), (uint16, 'Number')])
User.methods = [
Method('GetName', [], string),
Method('SetName', [(string, 'name')], void),
Method('GetBirthdate', [], date),
Method('GetLastLogin', [], datetime),
Method('GetFriends', [], Array(User)),
Method('AddFriend', [(User, 'user'), (float32, 'degree')], void),
Method('GetAge', [], uint32),
Method('GetStatus', [], Status),
Method('GetAddress', [], Address)
]
Main = Interface('Main', [Method('GetFirstUser', [], User)])
USER_SCHEMA = Schema('MySchema', [Main, Array(User), User, Address, Array(Status), Status, Interface('Test', [Method('DoNothing', [], void)])])
class TestSchema(unittest.TestCase):
def reserialize_and_check(self, schema):
serialized_schema = schema_to_bytes(schema)
with io.BytesIO(serialized_schema) as stream:
schema2 = read_schema(stream)
serialized_schema2 = schema_to_bytes(schema2)
self.assertEqual( serialized_schema, serialized_schema2 )
def setUp(self):
self.user_schema = USER_SCHEMA
def test_missing_main(self):
with self.assertRaises(AssertionError):
Schema('NoMainSchema', [Interface('SomeInterface', [Method('DoSomething', [], void)])])
def test_missing_methods(self):
with self.assertRaises(AssertionError):
Schema('NoMethodsInterfaceSchema', [Interface('Main', [])])
def test_bad_argument_definition(self):
with self.assertRaises(AssertionError):
Method('m', [(string, '')], void)
with self.assertRaises(AssertionError):
Method('m', [(string, '1a')], void)
with self.assertRaises(AssertionError):
Method('m', [('test', 'a')], void)
with self.assertRaises(AssertionError):
Record('r', [(string, '')])
with self.assertRaises(AssertionError):
Record('r', [(string, '1a')])
with self.assertRaises(AssertionError):
Record('r', [('test', 'a')])
def test_void_arguments_or_fields(self):
with self.assertRaises(AssertionError):
Method('m', [(void, 'arg')], void)
with self.assertRaises(AssertionError):
Record('r', [(void, 'field')])
def test_too_many_enum_values(self):
Enum('e', ['value{}'.format(i) for i in range(256)])
with self.assertRaises(AssertionError):
Enum('e', ['value{}'.format(i) for i in range(257)])
def test_user_reserialization(self):
self.reserialize_and_check(self.user_schema)
def test_user_serialization_to_file(self):
from tempfile import TemporaryFile
with TemporaryFile(mode='w+b') as f:
SchemaWriter(self.user_schema, f).write_schema()
f.seek(0)
schema_read = read_schema(f)
self.assertEqual(schema_to_bytes(self.user_schema), schema_to_bytes(schema_read))
#with open('schema.rmc', mode='wb') as f:
# import base64
# f.write(base64.encodebytes(schema_to_bytes(self.user_schema)))
def test_bad_stream_reading(self):
from remcall.error import WrongNumberOfBytesRead
reader = SchemaReader(io.BytesIO(b'123'))
with self.assertRaises(WrongNumberOfBytesRead):
reader.read_from_stream(4)
def test_name_reading(self):
namestream = io.BytesIO(b'\x00\x00\x00\x03Abc\x00\x00\x00\x03a_1\x00\x00\x00\x00\x00\x00\x00\x03123')
reader = SchemaReader(namestream)
self.assertEqual('Abc', reader.read_name())
with self.assertRaises(AssertionError):
reader.read_name() # bad char '_'
with self.assertRaises(AssertionError):
reader.read_name() # bad length 0
with self.assertRaises(AssertionError):
reader.read_name() # bad first char '1'
def test_name_writing(self):
namestream = io.BytesIO()
writer = SchemaWriter(self.user_schema, namestream)
writer.write_name('Abc')
namestream.seek(0)
self.assertEqual(b'\x00\x00\x00\x03Abc', namestream.read(7))
with self.assertRaises(AssertionError):
writer.write_name('a_1') # bad char '_'
with self.assertRaises(AssertionError):
writer.write_name('') # bad length 0
with self.assertRaises(AssertionError):
writer.write_name('123') # bad first char '1'
def test_int_type_writing(self):
import struct
stream = io.BytesIO()
writer = SchemaWriter(self.user_schema, stream)
writer.write_int8(2**7-1)
with self.assertRaises(struct.error):
writer.write_int8(2**7)
writer.write_uint8(2**8-1)
with self.assertRaises(struct.error):
writer.write_uint8(2**8)
writer.write_int16(2**15-1)
with self.assertRaises(struct.error):
writer.write_int16(2**15)
writer.write_uint16(2**16-1)
with self.assertRaises(struct.error):
writer.write_uint16(2**16)
writer.write_int32(2**31-1)
with self.assertRaises(struct.error):
writer.write_int32(2**31)
writer.write_uint32(2**32-1)
with self.assertRaises(struct.error):
writer.write_uint32(2**32)
writer.write_int64(2**63-1)
with self.assertRaises(struct.error):
writer.write_int64(2**63)
writer.write_uint64(2**64-1)
with self.assertRaises(struct.error):
writer.write_uint64(2**64)
if __name__ == '__main__':
unittest.main()
|
"""Problem 69
07 May 2004
Euler's Totient function, φ(n) [sometimes called the phi function], is
used to determine the number of numbers less than n which are
relatively prime to n. For example, as 1, 2, 4, 5, 7, and 8, are all
less than nine and relatively prime to nine, φ(9)=6.
n Relatively Prime φ(n) n/φ(n)
2 1 1 2
3 1,2 2 1.5
4 1,3 2 2
5 1,2,3,4 4 1.25
6 1,5 2 3
7 1,2,3,4,5,6 6 1.1666...
8 1,3,5,7 4 2
9 1,2,4,5,7,8 6 1.5
10 1,3,7,9 4 2.5
It can be seen that n=6 produces a maximum n/φ(n) for n 10.
Find the value of n 1,000,000 for which n/φ(n) is a maximum.
"""
from eulerlib import timedRun
from fractions import gcd
def phi(num):
count = 0
for den in range(1, num):
if gcd(num, den) == 1:
count += 1
return count
def euler69():
"""Solves problem 69 from Project Euler."""
# At first I tried to bruteforce this. My algorithm was O(n²) and
# was taking too long, so I decided to look at the data. I
# perceived that everytime my program reached a maximum n/phi(n),
# the value for 'n' was a product of consecutive primes. I decided
# to just calculate the phi for the product of the consecutive
# primes and found the answer quickly.
primes = [2,3,5,7,11,13,17,19,23]
maxim = 0
n = 1
for p in primes:
n *= p
if n > 1000000:
break
phi_n = phi(n)
ratio = n / phi_n
if ratio > maxim:
maxim = ratio
print(n, phi_n, ratio)
print("maxim =", maxim)
if __name__ == "__main__":
timedRun(euler69)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Adres',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('postcode', models.CharField(max_length=6)),
('huisnummer', models.IntegerField()),
('plaats', models.CharField(max_length=100)),
('straat', models.CharField(max_length=100)),
('toevoeging', models.CharField(max_length=20, null=True, blank=True)),
],
options={
'verbose_name_plural': 'Adressen',
},
),
migrations.CreateModel(
name='Eigenaar',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('initialen', models.CharField(max_length=6)),
('voornaam', models.CharField(max_length=20)),
('achternaam', models.CharField(max_length=40)),
('adres', models.ForeignKey(to='iom.Adres')),
],
options={
'verbose_name_plural': 'Eigenaren',
},
),
migrations.CreateModel(
name='Meetpunt',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ident', models.CharField(max_length=20)),
('locatie', django.contrib.gis.db.models.fields.PointField(help_text=b'Locatie meetpunt in Rijksdriehoekstelsel coordinaten', srid=28992)),
('begin', models.DateTimeField(auto_now=True)),
('einde', models.DateTimeField(null=True, blank=True)),
('eigenaar', models.ForeignKey(to='iom.Eigenaar')),
],
options={
'verbose_name_plural': 'Meetpunten',
},
),
]
|
bl_info = {
"name": "Reload Addon",
"blender": (2, 83, 0),
"category": "Development",
}
import bpy
from bpy.props import StringProperty
import time
import os
class ReloadOperator(bpy.types.Operator):
bl_idname = "wm.reload_addon"
bl_label = "Reload Addon"
def execute(self, context):
return {'FINISHED'}
def invoke(self, context, event):
prefs = context.preferences.addons[__name__].preferences
if prefs.filepath == "" or prefs.name == "":
print("Cannot reload addon, filepath or name not set.")
return {'CANCELLED'}
now = int(time.time())
os.utime(bpy.path.abspath(prefs.filepath), (now, now))
bpy.ops.preferences.addon_disable(module=prefs.name)
bpy.ops.preferences.addon_enable(module=prefs.name)
return {'RUNNING_MODAL'}
class ReloadPreferences(bpy.types.AddonPreferences):
bl_idname = __name__
name = StringProperty(name="Addon name")
filepath = StringProperty(
name="__init__.py",
subtype='FILE_PATH',
)
def draw(self, context):
layout = self.layout
layout.prop(self, "name")
layout.prop(self, "filepath")
@bpy.app.handlers.persistent
def load_handler(dummy):
wm = bpy.context.window_manager
km = wm.keyconfigs.active.keymaps["Window"]
km.keymap_items.new('wm.reload_addon', 'F5', 'PRESS', repeat=False)
def register():
bpy.utils.register_class(ReloadOperator)
bpy.utils.register_class(ReloadPreferences)
wm = bpy.context.window_manager
km = wm.keyconfigs.active.keymaps["Window"]
km.keymap_items.new('wm.reload_addon', 'F5', 'PRESS', repeat=False)
bpy.app.handlers.load_post.append(load_handler)
def unregister():
bpy.utils.unregister_class(ReloadOperator)
bpy.utils.unregister_class(ReloadPreferences)
bpy.app.handlers.load_post.remove(load_handler)
|
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env('DJANGO_SECRET_KEY', default='iNqDNewHzwBVO8NOhPSyzIlb8vqXLh1ehgkevg8GKoi1ONm9F9cVqxbxfNhmphZZ')
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
ALLOWED_HOSTS = [
"localhost",
"0.0.0.0",
"127.0.0.1",
]
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# TEMPLATES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa F405
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = 'localhost'
# https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = 1025
# django-debug-toolbar
# ------------------------------------------------------------------------------
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
INSTALLED_APPS += ['debug_toolbar'] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware'] # noqa F405
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2']
# django-extensions
# ------------------------------------------------------------------------------
# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
INSTALLED_APPS += ['django_extensions'] # noqa F405
# Your stuff...
# ------------------------------------------------------------------------------
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(message)s',
},
'simple': {
'format': '%(levelname)s %(asctime)s %(message)s',
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'root': {
'level': 'DEBUG',
'handlers': ['console']
},
'loggers': {
'django': {
'handler': ['console'],
'level': 'INFO'
},
'risk_management.users.views': {
'handlers': ['console'],
'level': 'DEBUG'
},
'risk_register.models': {
'handlers': ['console'],
'level': 'DEBUG'
},
'risk_register.views': {
'handlers': ['console'],
'level': 'DEBUG'
}
},
}
|
from discord.ext.commands import Cog, command, is_owner
from discord import Color, Embed, File
from sys import version_info
from asyncpg.exceptions import UniqueViolationError
from mcstatus import MinecraftServer
from socket import gethostbyname, timeout, gaierror
from matplotlib.pyplot import subplots, xlabel, ylabel, title
from matplotlib.dates import DateFormatter
from os import mkdir, remove
from re import sub as re_sub, IGNORECASE
from datetime import datetime, timedelta
from asyncio import sleep
class Commands(Cog):
"""Команды для бота пингера"""
def __init__(self, bot):
self.bot = bot
@command(name="пинг")
async def ping(self, ctx, ip):
"""Пинг сервера и показ его основной информации"""
print(f'{ctx.author.name} использовал команду "{ctx.message.content}"')
embed = Embed(
title=f"Пингую {ip}...", description="Подождите немного, я вас упомяну когда закончу", color=Color.orange()
)
await ctx.send(embed=embed)
ip_from_alias = await self.bot.db.get_ip_alias(ip)
if len(ip_from_alias) != 0:
ip = str(ip_from_alias[0]["numip"])[0:-3] + ":" + str(ip_from_alias[0]["port"])
server = MinecraftServer.lookup(ip)
try:
status = server.status()
online = True
except (timeout, ConnectionRefusedError, gaierror, OSError):
online, status = False, None
if online:
num_ip = gethostbyname(server.host)
embed = Embed(
title=f"Результаты пинга {ip}",
description=f"Цифровое айпи: {num_ip}:{str(server.port)}\n**Онлайн**",
color=Color.green(),
)
embed.set_thumbnail(url=f"https://api.mcsrvstat.us/icon/{server.host}:{str(server.port)}")
embed.add_field(name="Время ответа", value=str(status.latency) + "мс")
embed.add_field(name="Используемое ПО", value=status.version.name)
embed.add_field(name="Онлайн", value=f"{status.players.online}/{status.players.max}")
motd_clean = re_sub(r"[\xA7|&][0-9A-FK-OR]", "", status.description, flags=IGNORECASE)
embed.add_field(name="Мотд", value=motd_clean)
embed.set_footer(text=f'Для получения ссылки на редактирование МОТД, напишите "мотд {ip}"')
await ctx.send(ctx.author.mention, embed=embed)
else:
embed = Embed(title=f"Результаты пинга {ip}", description="\n\n**Офлайн**", color=Color.red())
embed.add_field(
name="Не удалось пингануть сервер", value="Возможно вы указали неверный айпи, или сервер сейчас выключен"
)
await ctx.send(ctx.author.mention, embed=embed)
@command(name="мотд")
async def motd(self, ctx, ip):
"""Показывает мотд и ссылку на редактирование сервера"""
print(f'{ctx.author.name} использовал команду "{ctx.message.content}"')
embed = Embed(
title=f"Получаю данные сервера {ip}...",
description="Подождите немного, я вас упомяну когда закончу",
color=Color.orange(),
)
await ctx.send(embed=embed)
ip_from_alias = await self.bot.db.get_ip_alias(ip)
if len(ip_from_alias) != 0:
ip = str(ip_from_alias[0]["numip"])[0:-3] + ":" + str(ip_from_alias[0]["port"])
server = MinecraftServer.lookup(ip)
try:
status = server.status()
online = True
except (timeout, ConnectionRefusedError, gaierror, OSError):
online, status = False, None
if online:
motd = str(status.raw["description"]["text"]).replace(" ", "+")
motd_url = motd.replace("\n", "%0A")
embed = Embed(
title=f"Подробное мотд сервера {ip}",
description="Эта команда дает возможность скопировать мотд и вставить на свой сервер",
color=Color.green(),
)
embed.set_thumbnail(url=f"https://api.mcsrvstat.us/icon/{server.host}:{str(server.port)}")
embed.add_field(name="Мотд", value=f"{status.description}")
embed.add_field(name="Ссылка на редактирование", value="https://mctools.org/motd-creator?text=" + motd_url)
await ctx.send(ctx.author.mention, embed=embed)
else:
embed = Embed(
title=f"Подробное мотд сервера {ip}",
description="Эта команда дает возможность скопировать мотд и вставить на свой сервер",
color=Color.red(),
)
embed.add_field(
name="Не удалось получить данные с сервера",
value="Возможно вы указали неверный айпи, или сервер сейчас выключен",
)
await ctx.send(ctx.author.mention, embed=embed)
@command(name="стата")
async def statistic(self, ctx, server): # noqa: C901
"""Статистика сервера"""
print(f'{ctx.author.name} использовал команду "{ctx.message.content}"')
embed = Embed(
title=f"Получаю данные сервера {server}...",
description="Подождите немного, я вас упомяну когда закончу",
color=Color.orange(),
)
await ctx.send(embed=embed)
ip_from_alias = await self.bot.db.get_ip_alias(server)
if len(ip_from_alias) != 0:
server = str(ip_from_alias[0]["numip"])[0:-3] + ":" + str(ip_from_alias[0]["port"])
mcserver = MinecraftServer.lookup(server)
try:
status = mcserver.status()
valid = True
except (timeout, ConnectionRefusedError):
valid, status = True, None
except gaierror:
valid, status = False, None
if valid:
num_ip = gethostbyname(mcserver.host)
database_server = await self.bot.db.get_server(num_ip, mcserver.port)
else:
database_server, num_ip = [], None
if valid and len(database_server) != 0:
if database_server[0]["alias"] is not None:
server = database_server[0]["alias"]
embed = Embed(
title=f"Статистика сервера {server}",
description=f"Цифровое айпи: {num_ip}:{str(mcserver.port)}\n**Онлайн**",
color=Color.green(),
)
yesterday_25h = datetime.now() - timedelta(hours=25)
yesterday_23h = datetime.now() - timedelta(hours=23)
pings = await self.bot.db.get_pings(num_ip, mcserver.port)
online_yest = None
for ping in pings: # ищет пинги в радиусе двух часов сутки назад
if (yesterday_23h > ping["time"] > yesterday_25h) and (
yesterday_23h.day >= ping["time"].day >= yesterday_25h.day
):
online_yest = ping["players"]
if online_yest is None:
online_yest = "Нету информации"
embed.set_thumbnail(url=f"https://api.mcsrvstat.us/icon/{mcserver.host}:{str(mcserver.port)}")
embed.add_field(name="Текущий онлайн", value=str(status.players.online) + "/" + str(status.players.max))
embed.add_field(name="Онлайн сутки назад в это же время", value=online_yest)
embed.add_field(name="Рекорд онлайна за всё время", value=str(database_server[0]["record"]))
embed.set_footer(text=f'Для большей информации о сервере напишите "пинг {server}"')
pings = await self.bot.db.get_pings(num_ip, mcserver.port)
if len(pings) <= 20:
return await ctx.send(ctx.author.mention + ", слишком мало информации для графика.", embed=embed)
fig, ax = subplots()
arr_online = []
arr_time = []
for ping in pings:
arr_online.append(int(ping["players"]))
arr_time.append(ping["time"])
my_fmt = DateFormatter("%H:%M")
ax.xaxis.set_major_formatter(my_fmt)
ax.plot(arr_time, arr_online)
xlabel("Время")
ylabel("Онлайн")
title("Статистика сервера " + server)
file_name = num_ip + "_" + str(mcserver.port) + ".png"
try:
mkdir("./plots/")
except FileExistsError:
pass
fig.savefig("./plots/" + file_name)
file = File("./plots/" + file_name, filename=file_name)
embed.set_image(url="attachment://" + file_name)
await ctx.send(ctx.author.mention, embed=embed, file=file)
await sleep(10)
try:
remove("./plots/" + file_name)
except (PermissionError, FileNotFoundError):
pass
else:
embed = Embed(title=f"Статистика сервера {server}", description="**Офлайн**", color=Color.red())
embed.add_field(
name="Не удалось получить статистику сервера",
value="Возможно вы указали неверный айпи/алиас, или сервер еще не добавлен",
)
await ctx.send(embed=embed)
@command(name="добавить")
@is_owner()
async def add_server(self, ctx, server):
"""Добавление сервера в бота"""
print(f'{ctx.author.name} использовал команду "{ctx.message.content}"')
embed = Embed(
title=f"Получаю данные сервера {server}...",
description="Подождите немного, я вас упомяну когда закончу",
color=Color.orange(),
)
await ctx.send(embed=embed)
mcserver = MinecraftServer.lookup(server)
try:
mcserver.status()
online = True
except (timeout, ConnectionRefusedError, gaierror, OSError):
online = False
if online:
num_ip = gethostbyname(mcserver.host)
try:
await self.bot.db.add_server(num_ip, ctx.author.id, mcserver.port)
except UniqueViolationError: # сервер уже добавлен
embed = Embed(title=f"Не удалось добавить сервер {server}", description="**Онлайн**", color=Color.red())
embed.add_field(name="Не удалось добавить сервер", value="Сервер уже добавлен")
return await ctx.send(ctx.author.mention, embed=embed)
embed = Embed(
title=f"Добавил сервер {server}",
description=f"Цифровое айпи: {num_ip}:{str(mcserver.port)}\n**Онлайн**",
color=Color.green(),
)
embed.set_thumbnail(url=f"https://api.mcsrvstat.us/icon/{mcserver.host}:{str(mcserver.port)}")
embed.add_field(
name="Сервер успешно добавлен", value='Напишите "помощь" для получения большей информации о серверах'
)
embed.set_footer(text=f'Теперь вы можете использовать "стата {server}" или "алиас (алиас) {server}"')
await ctx.send(ctx.author.mention, embed=embed)
else:
embed = Embed(title=f"Не удалось добавить сервер {server}", description="**Офлайн**", color=Color.red())
embed.add_field(
name="Не удалось добавить сервер", value="Возможно вы указали неверный айпи, или сервер сейчас выключен"
)
await ctx.send(ctx.author.mention, embed=embed)
@command(name="алиас")
async def alias(self, ctx, alias, server):
"""Добавление алиаса к серверу"""
print(f'{ctx.author.name} использовал команду "{ctx.message.content}"')
embed = Embed(
title=f"Получаю данные сервера {server}...",
description="Подождите немного, я вас упомяну когда закончу",
color=Color.orange(),
)
await ctx.send(embed=embed)
mcserver = MinecraftServer.lookup(server)
num_ip = gethostbyname(mcserver.host)
database_server = await self.bot.db.get_server(num_ip, mcserver.port)
if len(database_server) != 0:
if ctx.author.id != database_server[0]["owner"]:
embed = Embed(
title=f"Вы не владелец сервера {server}",
description=f"Только владелец может изменять алиас сервера {server}",
color=Color.red(),
)
embed.set_thumbnail(url=f"https://api.mcsrvstat.us/icon/{mcserver.host}:{str(mcserver.port)}")
embed.add_field(name="Ошибка", value="Вы не владелец")
embed.set_footer(text=f'Для большей информации о сервере напишите "стата {server}"')
return await ctx.send(ctx.author.mention, embed=embed)
await self.bot.db.add_alias(alias, num_ip, mcserver.port)
embed = Embed(
title=f"Записал алиас {alias} к серверу {server}",
description=f"Теперь вы можете использовать вместо {server} алиас {alias}",
color=Color.green(),
)
embed.set_thumbnail(url=f"https://api.mcsrvstat.us/icon/{mcserver.host}:{str(mcserver.port)}")
embed.add_field(
name="Данные успешно обновлены", value='Напишите "помощь" для получения большей информации о серверах'
)
embed.set_footer(text=f'Для большей информации о сервере напишите "стата {alias}"')
await ctx.send(ctx.author.mention, embed=embed)
else:
embed = Embed(
title=f"Не удалось добавить алиас {alias} к серверу {server}", description="**Упс**", color=Color.red()
)
embed.add_field(name="Не удалось добавить алиас", value="Возможно вы указали неверный айпи")
await ctx.send(ctx.author.mention, embed=embed)
# дальше идет код не сильно относящийся к пингер боту
@command(name="помощь", aliases=["help", "хэлп", "хєлп", "хелп"])
async def help(self, ctx):
"""Это команда помощи"""
commands = sorted([c for c in self.bot.commands if not c.hidden], key=lambda c: c.name)
embed = Embed(
title="Команда помощи",
description="Я пингую сервер каждые 5 минут, и показываю его статистику! "
"Я довольно простой бот в использовании. Мой префикс это буквально ничего, "
"вам не нужно ставить префикс перед командами."
"\n\nВот короткий список моих команд:",
color=Color.greyple(),
)
embed.set_footer(text="Примечание: Нет, я не пингую сервера перед тем как вы меня добавите")
for cmd in commands:
embed.add_field(name=cmd.name, value=cmd.help, inline=False)
await ctx.send(embed=embed)
@command(name="инфо", aliases=["об", "about"])
async def about(self, ctx):
"""Немного базовой информации про меня"""
embed = Embed(
title=str(self.bot.user),
description=self.bot.app_info.description + f"\n\n**ID**: {self.bot.app_info.id}",
color=Color.greyple(),
)
embed.set_thumbnail(url=self.bot.app_info.icon_url)
embed.add_field(name="Владелец", value=self.bot.app_info.owner)
embed.add_field(name="Количество серверов", value=str(len(self.bot.guilds)))
embed.add_field(name="Количество пользователей", value=str(len(self.bot.users)))
embed.add_field(name="Язык программирования", value=f"Python {'.'.join(map(str, version_info[:-2]))}")
embed.add_field(name="Библиотека", value="[discord.py](https://github.com/Rapptz/discord.py)")
embed.add_field(
name="Лицензия", value="[Apache License 2.0]" "(https://github.com/PerchunPak/PingerBot/blob/main/LICENSE)"
)
embed.add_field(name="Открытый код", value="https://github.com/PerchunPak/PingerBot", inline=False)
embed.set_footer(text="Примечание: Оригинальный автор не Perchun_Pak#9236, а NinjaSnail1080#8581")
await ctx.send(embed=embed)
@command(name="пригласить", aliases=["invite", "приглос", "приг"])
async def invite(self, ctx):
"""Скидывает ссылку чтобы Вы могли пригласить бота на свой сервер"""
await ctx.send(
'Это моя пригласительная ссылка чтобы Вы могли считать "ладно" тоже:\n'
f"https://discordapp.com/oauth2/authorize?client_id={self.bot.app_info.id}"
"&scope=bot&permissions=8"
)
def setup(bot):
bot.add_cog(Commands(bot))
|
# -*-coding: utf-8-*-
# @Author : Charlesxu
# @Email : charlesxu.ai@gmail.com
|
from requests import ConnectionError, Timeout
class AlephException(Exception):
def __init__(self, exc):
self.exc = exc
self.response = None
self.status = None
self.transient = isinstance(exc, (ConnectionError, Timeout))
self.message = str(exc)
if hasattr(exc, "response") and exc.response is not None:
self.response = exc.response
self.status = exc.response.status_code
self.transient = exc.response.status_code >= 500
try:
data = exc.response.json()
self.message = data.get("message")
except Exception:
self.message = exc.response.text
def __str__(self):
return self.message
|
from django import template
from metashare.repository.models import corpusInfoType_model, \
toolServiceInfoType_model, lexicalConceptualResourceInfoType_model, \
languageDescriptionInfoType_model
register = template.Library()
class ResourceLanguages(template.Node):
"""
Template tag that allows to display languages in result page template.
"""
def __init__(self, context_var):
"""
Initialises this template tag.
"""
super(ResourceLanguages, self).__init__()
self.context_var = template.Variable(context_var)
def render(self, context):
"""
Renders languages.
"""
result = []
corpus_media = self.context_var.resolve(context)
if isinstance(corpus_media, corpusInfoType_model):
media_type = corpus_media.corpusMediaType
for corpus_info in media_type.corpustextinfotype_model_set.all():
result.extend([lang.languageName for lang in
corpus_info.languageinfotype_model_set.all()])
if media_type.corpusAudioInfo:
result.extend([lang.languageName for lang in
media_type.corpusAudioInfo.languageinfotype_model_set.all()])
for corpus_info in media_type.corpusvideoinfotype_model_set.all():
result.extend([lang.languageName for lang in
corpus_info.languageinfotype_model_set.all()])
if media_type.corpusTextNgramInfo:
result.extend([lang.languageName for lang in
media_type.corpusTextNgramInfo.languageinfotype_model_set.all()])
if media_type.corpusImageInfo:
result.extend([lang.languageName for lang in
media_type.corpusImageInfo.languageinfotype_model_set.all()])
elif isinstance(corpus_media, lexicalConceptualResourceInfoType_model):
lcr_media_type = corpus_media.lexicalConceptualResourceMediaType
if lcr_media_type.lexicalConceptualResourceAudioInfo:
result.extend([lang.languageName for lang in lcr_media_type \
.lexicalConceptualResourceAudioInfo.languageinfotype_model_set.all()])
if lcr_media_type.lexicalConceptualResourceTextInfo:
result.extend([lang.languageName for lang in lcr_media_type \
.lexicalConceptualResourceTextInfo.languageinfotype_model_set.all()])
if lcr_media_type.lexicalConceptualResourceVideoInfo:
result.extend([lang.languageName for lang in lcr_media_type \
.lexicalConceptualResourceVideoInfo.languageinfotype_model_set.all()])
if lcr_media_type.lexicalConceptualResourceImageInfo:
result.extend([lang.languageName for lang in lcr_media_type \
.lexicalConceptualResourceImageInfo.languageinfotype_model_set.all()])
elif isinstance(corpus_media, languageDescriptionInfoType_model):
ld_media_type = corpus_media.languageDescriptionMediaType
if ld_media_type.languageDescriptionTextInfo:
result.extend([lang.languageName for lang in ld_media_type \
.languageDescriptionTextInfo.languageinfotype_model_set.all()])
if ld_media_type.languageDescriptionVideoInfo:
result.extend([lang.languageName for lang in ld_media_type \
.languageDescriptionVideoInfo.languageinfotype_model_set.all()])
if ld_media_type.languageDescriptionImageInfo:
result.extend([lang.languageName for lang in ld_media_type \
.languageDescriptionImageInfo.languageinfotype_model_set.all()])
elif isinstance(corpus_media, toolServiceInfoType_model):
if corpus_media.inputInfo:
result.extend(corpus_media.inputInfo.languageName)
if corpus_media.outputInfo:
result.extend(corpus_media.outputInfo.languageName)
result = list(set(result))
result.sort()
return u"".join(u"<li>{}</li>".format(lang) for lang in result)
def resource_languages(parser, token):
"""
Use it like this: {% load_languages object.resourceComponentType.as_subclass %}
"""
tokens = token.contents.split()
if len(tokens) != 2:
_msg = "%r tag accepts exactly two arguments" % tokens[0]
raise template.TemplateSyntaxError(_msg)
return ResourceLanguages(tokens[1])
register.tag('resource_languages', resource_languages)
|
"""Integration with I Done This."""
from __future__ import unicode_literals
import json
import logging
from django.utils.functional import cached_property
from django.utils.six.moves.urllib.error import HTTPError, URLError
from django.utils.six.moves.urllib.request import urlopen
from django.utils.translation import ugettext_lazy as _
from reviewboard.admin.server import build_server_url
from reviewboard.extensions.hooks import AccountPagesHook, SignalHook
from reviewboard.integrations import Integration
from reviewboard.reviews.models import BaseComment, ReviewRequest
from reviewboard.reviews.signals import (review_request_closed,
review_request_published,
review_request_reopened,
review_published,
reply_published)
from rbintegrations.idonethis import entries
from rbintegrations.idonethis.forms import IDoneThisIntegrationConfigForm
from rbintegrations.idonethis.pages import IDoneThisIntegrationAccountPage
from rbintegrations.idonethis.utils import (create_idonethis_request,
get_user_api_token,
get_user_team_ids)
class IDoneThisIntegration(Integration):
"""Integrates Review Board with I Done This.
This integration allows posting 'done' entries to I Done This teams on
behalf of users when they publish, change, or close a review request, and
when they publish reviews or replies.
I Done This entries are plain text, with #tags (used for group names)
and @mentions (unused; only I Done This admins can configure them).
URLs inside entries shown on the I Done This website are also
automatically linked with truncated URL text.
"""
name = _('I Done This')
description = _(
'Posts on behalf of users to I Done This teams when review requests '
'are created, updated, and reviewed.'
)
default_settings = {
'team_id': '',
}
config_form_cls = IDoneThisIntegrationConfigForm
def initialize(self):
"""Initialize the integration hooks."""
AccountPagesHook(self, [IDoneThisIntegrationAccountPage])
hooks = (
(review_request_closed, self._on_review_request_closed),
(review_request_published, self._on_review_request_published),
(review_request_reopened, self._on_review_request_reopened),
(review_published, self._on_review_published),
(reply_published, self._on_reply_published),
)
for signal, handler in hooks:
SignalHook(self, signal, handler)
@cached_property
def icon_static_urls(self):
"""The icons used for the integration."""
from rbintegrations.extension import RBIntegrationsExtension
extension = RBIntegrationsExtension.instance
return {
'1x': extension.get_static_url('images/idonethis/icon.png'),
'2x': extension.get_static_url('images/idonethis/icon@2x.png'),
}
def post_entry(self, entry_type, user, review_request, signal_name,
url=None, num_issues=0):
"""Post a 'done' entry to I Done This teams.
Posts the specified entry to any configured teams that the user
belongs to.
Args:
entry_type (unicode):
Entry type from :py:mod:`rbintegrations.idonethis.entries` to
post.
user (django.contrib.auth.models.User):
The user who updated the review request. Entries are only
posted if the user has specified an API token for I Done This,
and is a member of the configured teams.
review_request (reviewboard.reviews.models.review_request.
ReviewRequest):
The review request the notification is bound to.
signal_name (unicode):
The name of the signal triggering this notification.
url (unicode, optional):
URL to show in the entry instead of the review request URL.
num_issues (int, optional):
Number of issues opened in a review.
"""
if not (review_request.public and user and user.is_active):
return
api_token = get_user_api_token(user)
if not api_token:
return
user_team_ids = None
for config in self.get_configs(review_request.local_site):
if not config.match_conditions(form_cls=self.config_form_cls,
review_request=review_request):
continue
# Lazy load team IDs after the first matching configuration.
if user_team_ids is None:
user_team_ids = get_user_team_ids(user)
if not user_team_ids:
# We finished posting to all of the user's teams, the request
# to get the teams failed, or the user is not in any team.
return
team_id = config.get('team_id')
if not team_id or team_id not in user_team_ids:
continue
# Avoid posting duplicate entries to the same team from multiple
# matching configurations.
user_team_ids.remove(team_id)
template_string = entries.default_template_strings[entry_type]
entry_body = entries.format_template_string(
template_string=template_string,
num_issues=num_issues,
review_request=review_request,
url=url)
json_payload = json.dumps({
'body': entry_body,
'team_id': team_id,
'status': 'done',
# Optional 'occurred_on' is automatically set by I Done This.
})
request = create_idonethis_request(request_path='entries',
api_token=api_token,
json_payload=json_payload)
logging.debug('IDoneThis: Posting entry "%s" for signal "%s", '
'review_request ID %d, user "%s" to team "%s", '
'request "%s %s"',
entry_type,
signal_name,
review_request.pk,
user.username,
team_id,
request.get_method(),
request.get_full_url())
try:
urlopen(request)
except (HTTPError, URLError) as e:
# TODO: record failure in user settings and possibly notify the
# user on the account page so that problems can be noticed.
if isinstance(e, HTTPError):
error_info = '%s, error data: %s' % (e, e.read())
else:
error_info = e.reason
logging.error('IDoneThis: Failed to post entry for user "%s" '
'to team "%s", request "%s %s": %s',
user.username,
team_id,
request.get_method(),
request.get_full_url(),
error_info)
def _on_review_request_closed(self, user, review_request, close_type,
**kwargs):
"""Handler for when review requests are closed.
Posts an entry to any configured I Done This teams when a review
request is closed.
Args:
user (django.contrib.auth.models.User):
The user who closed the review request.
review_request (reviewboard.reviews.models.review_request.
ReviewRequest):
The review request that was closed.
close_type (unicode):
The close type. Must be either ReviewRequest.DISCARDED or
ReviewRequest.SUBMITTED.
**kwargs (dict):
Additional keyword arguments passed to the handler.
"""
if close_type == ReviewRequest.DISCARDED:
entry_type = entries.REVIEW_REQUEST_DISCARDED
else:
entry_type = entries.REVIEW_REQUEST_COMPLETED
self.post_entry(entry_type=entry_type,
user=user,
review_request=review_request,
signal_name='review_request_closed')
def _on_review_request_published(self, user, review_request, changedesc,
**kwargs):
"""Handler for when review requests are published.
Posts an entry to any configured I Done This teams when a review
request is published.
Args:
user (django.contrib.auth.models.User):
The user who published the review request.
review_request (reviewboard.reviews.models.review_request.
ReviewRequest):
The review request that was published.
changedesc (reviewboard.changedescs.models.ChangeDescription):
The change description for the update, if any.
**kwargs (dict):
Additional keyword arguments passed to the handler.
"""
if not changedesc:
entry_type = entries.REVIEW_REQUEST_PUBLISHED
elif ('status' in changedesc.fields_changed and
changedesc.fields_changed['status']['new'][0] ==
ReviewRequest.PENDING_REVIEW):
entry_type = entries.REVIEW_REQUEST_REOPENED
else:
entry_type = entries.REVIEW_REQUEST_UPDATED
self.post_entry(entry_type=entry_type,
user=user,
review_request=review_request,
signal_name='review_request_published')
def _on_review_request_reopened(self, user, review_request, **kwargs):
"""Handler for when review requests are reopened.
Posts an entry to any configured I Done This teams when a review
request is reopened.
Args:
user (django.contrib.auth.models.User):
The user who reopened the review request.
review_request (reviewboard.reviews.models.review_request.
ReviewRequest):
The review request that was reopened.
**kwargs (dict):
Additional keyword arguments passed to the handler.
"""
self.post_entry(entry_type=entries.REVIEW_REQUEST_REOPENED,
user=user,
review_request=review_request,
signal_name='review_request_reopened')
def _on_review_published(self, user, review, to_owner_only, **kwargs):
"""Handler for when a review is published.
Posts an entry to any configured I Done This teams when a review is
published.
Args:
user (django.contrib.auth.models.User):
The user who published the review.
review (reviewboard.reviews.models.review.Review):
The review that was published.
to_owner_only (boolean):
Whether the review should be sent only to the review request
owner.
**kwargs (dict):
Additional keyword arguments passed to the handler.
"""
if to_owner_only:
return
num_issues = 0
for comment in review.get_all_comments():
if (comment.issue_opened and
comment.issue_status == BaseComment.OPEN):
num_issues += 1
if review.ship_it:
if num_issues == 0:
entry_type = entries.REVIEW_PUBLISHED_SHIPIT
elif num_issues == 1:
entry_type = entries.REVIEW_PUBLISHED_SHIPIT_ISSUE
else:
entry_type = entries.REVIEW_PUBLISHED_SHIPIT_ISSUES
else:
if num_issues == 0:
entry_type = entries.REVIEW_PUBLISHED
elif num_issues == 1:
entry_type = entries.REVIEW_PUBLISHED_ISSUE
else:
entry_type = entries.REVIEW_PUBLISHED_ISSUES
self.post_entry(entry_type=entry_type,
user=user,
review_request=review.review_request,
signal_name='review_published',
url=build_server_url(review.get_absolute_url()),
num_issues=num_issues)
def _on_reply_published(self, user, reply, **kwargs):
"""Handler for when a reply to a review is published.
Posts an entry to any configured I Done This teams when a reply to a
review is published.
Args:
user (django.contrib.auth.models.User):
The user who published the reply.
reply (reviewboard.reviews.models.review.Review):
The reply that was published.
**kwargs (dict):
Additional keyword arguments passed to the handler.
"""
self.post_entry(entry_type=entries.REPLY_PUBLISHED,
user=user,
review_request=reply.review_request,
signal_name='reply_published',
url=build_server_url(reply.get_absolute_url()))
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from segm_benchmark.utils.encoding import scaled_l2, aggregate
class Encoding(nn.Module):
r"""
Encoding Layer: a learnable residual encoder.
Args:
D: dimention of the features or feature channels
K: number of codeswords
Attributes:
codewords (Tensor): the learnable codewords of shape (:math:`K\times D`)
scale (Tensor): the learnable scale factor of visual centers
Reference:
Hang Zhang, Kristin Dana, Jianping Shi, Zhongyue Zhang, Xiaogang Wang, Ambrish Tyagi,
Amit Agrawal. “Context Encoding for Semantic Segmentation.
*The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2018*
"""
def __init__(self, D, K):
super(Encoding, self).__init__()
self.D, self.K = D, K
self.codewords = nn.Parameter(torch.Tensor(K, D), requires_grad=True)
self.scale = nn.Parameter(torch.Tensor(K), requires_grad=True)
self.reset_params()
def reset_params(self):
_std = 1.0 / ((self.K * self.D) ** (1.0 / 2))
self.codewords.data.uniform_(-_std, _std)
self.scale.data.uniform_(-1, 0)
def forward(self, X):
assert X.size(1) == self.D
B, D = X.size(0), self.D
if X.dim() == 3:
# B, D, N -> B, N, D
X = X.transpose(1, 2).contiguous()
elif X.dim() == 4:
# B,D,H,W -> B,HW, D
X = X.view(B, D, -1).transpose(1, 2).contiguous()
else:
raise RuntimeError("Encoding Layer unknown input dims!")
# assignment weights BxNxK
A = F.softmax(scaled_l2(X, self.codewords, self.scale), dim=2)
# aggregate
E = aggregate(A, X, self.codewords)
return E
def __repr__(self):
return self.__class__.__name__ + '(N x ' + str(self.D) + '=>' + str(self.K) + ' x ' + str(self.D) + ')'
class Mean(nn.Module):
def __init__(self, dim, keep_dim=False):
super(Mean, self).__init__()
self.dim = dim
self.keep_dim = keep_dim
def forward(self, input):
return input.mean(self.dim, self.keep_dim)
|
# Slixmpp: The Slick XMPP Library
# Copyright (C) 2015 Emmanuel Gil Peyrot
# This file is part of Slixmpp.
# See the file LICENSE for copying permission.
import asyncio
import logging
from uuid import uuid4
from slixmpp.plugins import BasePlugin, register_plugin
from slixmpp import future_wrapper, Iq, Message
from slixmpp.exceptions import XMPPError, IqError, IqTimeout
from slixmpp.jid import JID
from slixmpp.xmlstream import JID, register_stanza_plugin
from slixmpp.xmlstream.handler import Callback
from slixmpp.xmlstream.matcher import StanzaPath
from slixmpp.plugins.xep_0070 import stanza, Confirm
log = logging.getLogger(__name__)
class XEP_0070(BasePlugin):
"""
XEP-0070 Verifying HTTP Requests via XMPP
"""
name = 'xep_0070'
description = 'XEP-0070: Verifying HTTP Requests via XMPP'
dependencies = {'xep_0030'}
stanza = stanza
def plugin_init(self):
register_stanza_plugin(Iq, Confirm)
register_stanza_plugin(Message, Confirm)
self.xmpp.register_handler(
Callback('Confirm',
StanzaPath('iq@type=get/confirm'),
self._handle_iq_confirm))
self.xmpp.register_handler(
Callback('Confirm',
StanzaPath('message/confirm'),
self._handle_message_confirm))
def plugin_end(self):
self.xmpp.remove_handler('Confirm')
self.xmpp['xep_0030'].del_feature(feature='http://jabber.org/protocol/http-auth')
def session_bind(self, jid):
self.xmpp['xep_0030'].add_feature('http://jabber.org/protocol/http-auth')
@future_wrapper
def ask_confirm(self, jid, id, url, method, *, ifrom=None, message=None):
jid = JID(jid)
if jid.resource:
stanza = self.xmpp.Iq()
stanza['type'] = 'get'
else:
stanza = self.xmpp.Message()
stanza['thread'] = uuid4().hex
stanza['from'] = ifrom
stanza['to'] = jid
stanza['confirm']['id'] = id
stanza['confirm']['url'] = url
stanza['confirm']['method'] = method
if not jid.resource:
if message is not None:
stanza['body'] = message.format(id=id, url=url, method=method)
stanza.send()
return stanza
else:
return stanza.send()
def _handle_iq_confirm(self, iq):
self.xmpp.event('http_confirm_iq', iq)
self.xmpp.event('http_confirm', iq)
def _handle_message_confirm(self, message):
self.xmpp.event('http_confirm_message', message)
self.xmpp.event('http_confirm', message)
|
from classtoolz import Slotted, Typed, Immutable, Cached
class Person(Slotted, Typed, Immutable, Cached):
""" A Person class
Example for Slotted, Typed, Immutable and Cached mixins
>>> Alice = Person('Alice', 25)
>>> Alice
Person(name=Alice, age=25)
>>> Alice.age = 26
Traceback (most recent call last):
...
TypeError: Person class is immutable
>>> Bob = Person('Bob', 22.5)
Traceback (most recent call last):
...
TypeError: 22.5 should be of type int. Got type float
>>> Alice2 = Person('Alice', 25) # A duplicate
>>> Alice is Alice2
True
"""
__slots__ = ['name', 'age']
__types__ = [str, int]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.