content stringlengths 5 1.05M |
|---|
"""Exports the "troupe" decorator that can be applied to Actor
definitions. This decorator turns a regular Actor into a dynamic
Actor Troupe, where multiple Actors are spun up on-demand to handle
messages.
This pattern is especially useful for situations where multiple
requests are received and processing individual requests may take some
time to perform.
Usage:
from thespian.troupe import troupe
@troupe()
class MyActor(Actor):
...
The optional arguments to the troupe decorator are:
max_count -- the maximum number of actors in the troupe (default=10)
idle_count -- the number of actors in the troupe when idle (default=2).
As work is received, the number of actors will grow
up to the max_count, but when there is no more work,
the number of actors will shrink back down to this
number. Note that there may be fewer than this
number of actors present: actors are only created if
work is received and there are no idle actors to
handle that work.
The decorator usage above works very well for a simple worker actor
that can perform all of the necessary work utilizing only the message
sent to it; the actor can be turned into a troupe member with no
change other than adding the decorator.
However, an actor which must interact with other actors to process the
work requires additional modifications to allow the troupe manager to
know when the actor has finished performing the work. A troupe member
that has not fully performed the work and is exchanging messages with
other actors to complete the work (or awaiting WakeupMessages) must
set the "troupe_work_in_progress" attribute on self to True. Once the
work is completed by a subsequent message delivery, it should set this
attribute to False, which will cause the troupe manager to be notified
that the actor is ready for more work.
Failure to set the "troupe_work_in_progress" attribute to True on a
multi-step actor will result in either (a) the actor receiving more
work before it has completed the previous work, or (b) the actor will
be killed by the troupe manager before finishing the work because the
manager believes the actor is finished.
Failure to reset the "troupe_work_in_progress" attribute to False will
cause the troupe manager to never send any more work requests to the
troupe actor, even if the latter is idle. The troupe actor will also
never be killed until the troupe manager itself is killed.
"""
from thespian.actors import (ActorSystemMessage, ActorExitRequest,
ChildActorExited, WakeupMessage)
from datetime import timedelta
import inspect
# If at least some troupe members have been idle for this long and
# they are over the idle count, they can be dismissed (killed).
DISMISS_EXTRA_PERIOD = timedelta(seconds=2)
class UpdateTroupeSettings(object):
"""A message that can be sent to a Troupe to cause the troupe manager
to update either or both of the max_count and idle_count number
of workers. The Troupe manager will respond by sending a
message of this type back with the limit values in effect after
the update.
"""
def __init__(self, max_count=None, idle_count=None):
if max_count:
assert max_count > 0
if idle_count:
assert idle_count > 0
self.max_count = max_count
self.idle_count = idle_count
class _TroupeMemberReady(object):
def __init__(self, work_ident):
self.ident_done = work_ident
class _TroupeWork(object):
def __init__(self, message, orig_sender, troupe_mgr):
self.message = message
self.orig_sender = orig_sender
self.troupe_mgr = troupe_mgr
self.ident = None
def __str__(self):
return '_TroupeWork(from=%s, msg=%s)' % \
(self.orig_sender, self.message)
class _TroupeManager(object):
def __init__(self, actorClass, mgr_addr, idle_count, max_count):
self.mgr_addr = mgr_addr
self.idle_count = idle_count
self.max_count = max_count
self._troupers = []
self._idle_troupers = []
self._extra_troupers = []
self._pending_work = []
self._handling_work = {}
self._pending_dismissal = False
self._work_ident = 0
def is_ready(self, managerActor, ready_msg, troupe_member):
if ready_msg.ident_done >= 0:
if ready_msg.ident_done in self._handling_work:
del self._handling_work[ready_msg.ident_done]
if self._pending_work:
w = self._pending_work.pop(0)
self._handling_work[w.ident] = (troupe_member, w)
return [(troupe_member, w)]
if self.idle_count is not None and \
len(self._troupers) > self.idle_count and \
len(self._idle_troupers) >= self.idle_count:
self._trouper_is_extra(managerActor, troupe_member)
return []
self._idle_troupers.append(troupe_member)
return []
def _trouper_is_extra(self, managerActor, troupe_member):
if not self._pending_dismissal:
managerActor.wakeupAfter(DISMISS_EXTRA_PERIOD)
self._pending_dismissal = True
self._extra_troupers.append(troupe_member)
def dismiss_extras(self, managerActor):
exitReq = ActorExitRequest()
for each in self._extra_troupers:
managerActor.send(each, exitReq)
self._extra_troupers = []
self._pending_dismissal = False
def new_work(self, msg, sender):
if isinstance(msg, _TroupeWork):
work = msg
else:
work = _TroupeWork(msg, sender, self.mgr_addr)
work.ident = self._work_ident
# limit of 0xffffffff is > max reasonable pending work items
self._work_ident = (self._work_ident + 1) & 0xffffffff
worker = self._idle_troupers.pop(0) if self._idle_troupers else \
(self._extra_troupers.pop(0) if self._extra_troupers else None)
if worker:
self._handling_work[work.ident] = (worker, work)
return [(worker, work)]
if len(self._troupers) < self.max_count:
return [(None, work)]
self._pending_work.append(work)
return []
def add_trouper(self, trouper_addr, trouper_work):
if trouper_addr not in self._troupers:
self._troupers.append(trouper_addr)
self._handling_work[trouper_work.ident] = (trouper_addr, trouper_work)
def worker_exited(self, trouper_addr):
try:
self._troupers.remove(trouper_addr)
except ValueError: pass
try:
self._idle_troupers.remove(trouper_addr)
except ValueError: pass
try:
self._extra_troupers.remove(trouper_addr)
except ValueError: pass
wcheck = filter(lambda e: e[0] == trouper_addr, self._handling_work.values())
# n.b. list(wcheck): removing from self._handling_work, which
# will cause a RuntimeError of "dictionary changed size during
# iteration" if done on the iterator.
for (_,wfnd) in list(wcheck): # should be 0 or 1 entry
del self._handling_work[wfnd.ident]
if self._idle_troupers:
# If idle, re-attempt this work immediately; if not
# idle, place it on the pending queue to be handled by
# an existing worker to avoid a fork bomb.
return wfnd
self._pending_work.append(wfnd)
return None
def status(self):
return 'Idle=%d, Max=%d, Troupers [%d, %d idle, %d extra]: %s, Pending=%d' % (
self.idle_count, self.max_count,
len(self._troupers), len(self._idle_troupers), len(self._extra_troupers),
['%s%s' % (('I:' if A in self._idle_troupers else
'E:' if A in self._extra_troupers else ''), str(A))
for A in self._troupers],
len(self._pending_work)
)
def troupe(max_count=10, idle_count=2):
def _troupe(actorClass):
actorName = '.'.join((inspect.getmodule(actorClass).__name__,
actorClass.__name__))
def manageTroupe(self, message, sender):
isTroupeWork = isinstance(message, _TroupeWork)
troupeWorker = getattr(self, '_is_a_troupe_worker', False)
# If a worker, or this message indicates we are a
# worker... or we haven't been decided yet but this is a
# system message so we shouldn't create a troupe because
# of it.
if troupeWorker or isTroupeWork or \
(not hasattr(self, '_troupe_mgr') and isinstance(message, ActorSystemMessage)):
was_in_prog = getattr(self, 'troupe_work_in_progress', False)
if isTroupeWork:
self._is_a_troupe_worker = message.troupe_mgr
self._work_ident = message.ident
r = self._orig_receiveMessage(message.message,
message.orig_sender)
else:
r = self._orig_receiveMessage(message, sender)
if (isTroupeWork or was_in_prog) and \
not getattr(self, 'troupe_work_in_progress', False):
self.send(self._is_a_troupe_worker,
_TroupeMemberReady(self._work_ident))
self._work_ident = -1
return r
# The following is only run for the primary/manager of the troupe
if isinstance(message, ActorExitRequest):
return
if not hasattr(self, '_troupe_mgr'):
self._troupe_mgr = _TroupeManager(
self.__class__, self.myAddress, idle_count, max_count)
if isinstance(message, ChildActorExited):
message = self._troupe_mgr.worker_exited(message.childAddress)
elif isinstance(message, _TroupeMemberReady):
for sendargs in self._troupe_mgr.is_ready(self, message, sender):
self.send(*sendargs)
return
elif isinstance(message, UpdateTroupeSettings):
if message.max_count:
self._troupe_mgr.max_count = message.max_count
if message.idle_count:
self._troupe_mgr.idle_count = message.idle_count
self.send(sender, UpdateTroupeSettings(max_count=self._troupe_mgr.max_count,
idle_count=self._troupe_mgr.idle_count))
elif isinstance(message, str):
if message == 'troupe:status?':
self.send(sender, self._troupe_mgr.status())
return
elif message.startswith('troupe:set_max_count='):
try:
new_max_count = int(message.split('=')[1])
except ValueError as ex:
self.send(sender, 'Error changing max_count'
' for troupe based on message "%s": %s' %
(message, str(ex)))
return
self._troupe_mgr.max_count = new_max_count
self.send(sender,
'Set troupe max_count to %d' % new_max_count)
return
elif message.startswith('troupe:set_idle_count='):
try:
new_idle_count = int(message.split('=')[1])
except ValueError as ex:
self.send(sender, 'Error changing idle_count'
' for troupe based on message "%s": %s' %
(message, str(ex)))
return
self._troupe_mgr.idle_count = new_idle_count
self.send(sender,
'Set troupe idle_count to %d' % new_idle_count)
return
elif isinstance(message, WakeupMessage):
self._troupe_mgr.dismiss_extras(self)
return
if message:
for sendargs in self._troupe_mgr.new_work(message, sender):
if sendargs[0] is None:
sendargs = (self.createActor(actorName), sendargs[1])
self._troupe_mgr.add_trouper(*sendargs)
self.send(*sendargs)
actorClass._orig_receiveMessage = actorClass.receiveMessage
actorClass.receiveMessage = manageTroupe
return actorClass
return _troupe
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Dict, Optional, Tuple, Sequence
import collections
import json
import logging
import pathlib
import random
import time
import pandas as pd
import psutil
import torch
import fairdiplomacy.selfplay.metrics
import fairdiplomacy.selfplay.remote_metric_logger
from fairdiplomacy import pydipcc
from fairdiplomacy.agents import build_agent_from_cfg
from fairdiplomacy.get_xpower_supports import compute_xpower_supports
from fairdiplomacy.env import OneSixPolicyProfile
from fairdiplomacy.models.consts import POWERS
from fairdiplomacy.selfplay.ckpt_syncer import build_cfr1p_agent_with_syncs
from fairdiplomacy.selfplay.search_rollout import ReSearchRolloutBatch, yield_game
from fairdiplomacy.selfplay.search_utils import unparse_device
from fairdiplomacy.utils.exception_handling_process import ExceptionHandlingProcess
from fairdiplomacy.utils.multiprocessing_spawn_context import get_multiprocessing_ctx
import heyhi
mp = get_multiprocessing_ctx()
# Do not dump a game on disk more often that this.
GAME_WRITE_TIMEOUT = 60
MIN_GAMES_FOR_STATS = 50
class H2HEvaler:
def __init__(
self,
*,
log_dir,
h2h_cfg,
agent_one_cfg,
device,
ckpt_sync_path,
num_procs,
game_kwargs: Dict,
cores: Optional[Tuple[int, ...]],
game_json_paths: Optional[Sequence[str]],
):
logging.info(f"Creating eval h2h {h2h_cfg.tag} rollout workers")
self.queue = mp.Queue(maxsize=4000)
self.procs = []
for i in range(num_procs):
log_path = log_dir / f"eval_h2h_{h2h_cfg.tag}_{i:03d}.log"
kwargs = dict(
queue=self.queue,
device=device,
ckpt_sync_path=ckpt_sync_path,
agent_one_cfg=agent_one_cfg,
agent_six_cfg=h2h_cfg.agent_six,
game_json_paths=game_json_paths,
game_kwargs=game_kwargs,
seed=i,
num_zero_epoch_evals=MIN_GAMES_FOR_STATS // num_procs + 5,
use_trained_value=h2h_cfg.use_trained_value,
use_trained_policy=h2h_cfg.use_trained_policy,
)
kwargs["log_path"] = log_path
kwargs["log_level"] = logging.WARNING
logging.info(
f"H2H Rollout process {h2h_cfg.tag}/{i} will write logs to {log_path} at level %s",
kwargs["log_level"],
)
self.procs.append(
ExceptionHandlingProcess(target=self.eval_worker, kwargs=kwargs, daemon=True)
)
logging.info(f"Adding main h2h {h2h_cfg.tag} worker")
self.procs.append(
ExceptionHandlingProcess(
target=self.aggregate_worker,
kwargs=dict(queue=self.queue, tag=h2h_cfg.tag, save_every_secs=GAME_WRITE_TIMEOUT),
daemon=True,
)
)
logging.info(f"Starting h2h {h2h_cfg.tag} workers")
for p in self.procs:
p.start()
if cores:
logging.info("Setting affinities")
for p in self.procs:
psutil.Process(p.pid).cpu_affinity(cores)
logging.info("Done")
@classmethod
def eval_worker(
cls,
*,
seed,
queue: mp.Queue,
device: str,
ckpt_sync_path: str,
log_path: pathlib.Path,
log_level,
agent_one_cfg,
agent_six_cfg,
game_json_paths,
game_kwargs: Dict,
num_zero_epoch_evals: int,
use_trained_value: bool,
use_trained_policy: bool,
):
# We collect this many games for the first ckpt before loading new
# ckpt. This is to establish an accurate BL numbers where RL agent net
# in equivalent to the blueprint it's initialized from.
num_evals_without_reload_left = num_zero_epoch_evals
heyhi.setup_logging(console_level=None, fpath=log_path, file_level=log_level)
device_id = unparse_device(device)
assert agent_one_cfg.cfr1p is not None, "Must be CFR1P agent"
agent_one, do_sync_fn = build_cfr1p_agent_with_syncs(
agent_one_cfg.cfr1p,
ckpt_sync_path=ckpt_sync_path,
use_trained_policy=use_trained_policy,
use_trained_value=use_trained_value,
device_id=device_id,
)
agent_six = build_agent_from_cfg(agent_six_cfg, device=device_id)
random.seed(seed)
torch.manual_seed(seed)
# Hack: using whatever syncer is listed first to detect epoch.
main_meta = next(iter(do_sync_fn().values()))
if main_meta["epoch"] > 0:
# First ckpt is not on zero epoch. Disabling.
num_evals_without_reload_left = 0
for game_id, game in yield_game(seed, game_json_paths, game_kwargs):
start_phase = game.current_short_phase
if num_evals_without_reload_left > 0:
num_evals_without_reload_left -= 1
else:
main_meta = next(iter(do_sync_fn().values()))
# Agent one must be alive at the start of the game.
starting_sos = game.get_square_scores()
agent_one_power = random.choice(
[p for p, score in zip(POWERS, starting_sos) if score > 1e-3]
)
policy_profile = OneSixPolicyProfile(
agent_one=agent_one,
agent_six=agent_six,
agent_one_power=agent_one_power,
seed=random.randint(0, 100000),
)
while not game.is_game_done:
power_orders = policy_profile.get_all_power_orders(game)
for power, orders in power_orders.items():
if not game.get_orderable_locations().get(power):
continue
game.set_orders(power, orders)
game.process()
queue.put(
{
"last_ckpt_meta": main_meta,
"game_json": game.to_json(),
"agent_one_power": agent_one_power,
"game_id": game_id,
"start_phase": start_phase,
}
)
@classmethod
def aggregate_worker(cls, *, queue: mp.Queue, tag: str, save_every_secs: float):
logger = fairdiplomacy.selfplay.remote_metric_logger.get_remote_logger(
tag=f"eval_h2h_{tag}"
)
counters = collections.defaultdict(fairdiplomacy.selfplay.metrics.FractionCounter)
max_seen_epoch = -1
num_games = 0
def process_metrics(epoch, game_json, power):
nonlocal logger
nonlocal counters
nonlocal max_seen_epoch
nonlocal num_games
if max_seen_epoch < epoch:
if num_games >= MIN_GAMES_FOR_STATS:
metrics = {
f"eval_h2h_{tag}/{key}": value.value() for key, value in counters.items()
}
metrics[f"eval_h2h_{tag}/num_games"] = num_games
logger.log_metrics(metrics, max_seen_epoch)
counters.clear()
num_games = 0
max_seen_epoch = epoch
num_games += 1
game = pydipcc.Game.from_json(game_json)
counters["episode_length"].update(len(game.get_phase_history()))
scores = game.get_square_scores()
counters["r_draw_all"].update(max(scores) < 0.99)
counters["r_solo"].update(scores[POWERS.index(power)] > 0.99)
counters["r_draw"].update(0.001 < scores[POWERS.index(power)] < 0.99)
counters["r_dead"].update(scores[POWERS.index(power)] < 0.001)
counters["r_square_score"].update(scores[POWERS.index(power)])
x_supports_power = compute_xpower_supports(game, only_power=power)
counters["sup_to_all_share"].update(x_supports_power["s"], x_supports_power["o"])
counters["sup_xpower_to_sup_share"].update(
x_supports_power["x"], x_supports_power["s"]
)
last_save = 0
game_dump_path = pathlib.Path(f"games_h2h_{tag}").absolute()
game_dump_path.mkdir(exist_ok=True, parents=True)
while True:
data = queue.get()
try:
epoch = data["last_ckpt_meta"]["epoch"]
except KeyError:
logging.error("Bad Meta: %s", data["last_ckpt_meta"])
raise
process_metrics(epoch, data["game_json"], data["agent_one_power"])
now = time.time()
if now - last_save > save_every_secs:
save_game(
game_json=data["game_json"],
epoch=epoch,
dst_dir=game_dump_path,
game_id=data["game_id"],
start_phase=data["start_phase"],
agent_one_power=data["agent_one_power"],
)
last_save = now
def terminate(self):
logging.info("Killing H2H processes")
for proc in self.procs:
proc.kill()
self.procs = []
def _rollout_batch_to_dataframe(tensors: ReSearchRolloutBatch) -> pd.DataFrame:
data = {}
for i, p in enumerate(POWERS):
data[f"reward_{p}"] = tensors.rewards[:, i].numpy()
data["done"] = tensors.done.numpy()
for i, p in enumerate(POWERS):
data[f"is_explore_{p}"] = tensors.is_explore[:, i].numpy()
df = pd.DataFrame(data)
df.index.name = "timestamp"
return df
def save_game(
*,
game_json: str,
epoch: int,
dst_dir: pathlib.Path,
game_id: str,
start_phase: str,
tensors: Optional[ReSearchRolloutBatch] = None,
agent_one_power: Optional[str] = None,
):
counter = 0
while True:
name = f"game_{epoch:06d}_{counter:05d}_{game_id}"
if agent_one_power:
name += f"_{agent_one_power}"
path = dst_dir / f"{name}.json"
path_meta = dst_dir / f"{name}.meta.csv"
if not path.exists():
break
counter += 1
game_dict = json.loads(game_json)
game_dict["viz"] = dict(game_id=game_id, start_phase=start_phase)
with path.open("w") as stream:
json.dump(game_dict, stream)
if tensors is not None:
_rollout_batch_to_dataframe(tensors).to_csv(path_meta)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Mar 17 09:44:13 2020
@author: u7075106
"""
def sum_odd_digits(num):
x = num
sum_o = 0
while x >= 1:
d = x%10
print(d)
if d%2 == 1:
sum_o = sum_o + d
x = int(x/10)
return sum_o
def sum_even_digits(num):
x = num
sum_e = 0
while x >= 1:
d = x%10
if d%2 == 0:
sum_e = sum_e + d
x = int(x/10)
return sum_e
def sum_all_digits(num):
x = num
sum_all = 0
while x > 1:
d = x%10
sum_all = sum_all + d
x = int(x/10)
return sum_all |
import traceback
from datetime import datetime, timedelta
from functools import wraps
from typing import Union, Optional, Callable, Any
import jwt
from flask import g, request, has_request_context
from werkzeug.local import LocalProxy
from anubis.config import config
from anubis.models import User, TAForCourse, ProfessorForCourse
from anubis.utils.data import is_debug
from anubis.utils.exceptions import AuthenticationError, AssertError
from anubis.utils.services.logger import logger
def get_user(netid: Union[str, None]) -> Union[User, None]:
"""
Load a user by username
:param netid: netid of wanted user
:return: User object or None
"""
if netid is None:
return None
# Get the user from the database
user = User.query.filter_by(netid=netid).first()
return user
def get_current_user() -> Union[User, None]:
"""
Load current user based on the token
:return: User or None
"""
if g.get("user", default=None) is not None:
return g.user
# Attempt to get the token from the request
token = get_token()
if token is None:
return None
# Try to decode the jwt
try:
decoded = jwt.decode(token, config.SECRET_KEY, algorithms=["HS256"])
except Exception as e:
logger.error('AUTH decode error\n' + traceback.format_exc())
return None
# Make sure there is a netid in the jwt
if "netid" not in decoded:
return None
# Get the user from the decoded jwt
user = get_user(decoded["netid"])
# Cache the user in the request context
g.user = user
return user
def _create_get_current_user_field(field: str) -> Callable:
def _func() -> Optional[Any]:
"""
Load current_user.id
:return:
"""
# Get current user
user = get_current_user()
# Make sure they exist
if user is None:
return None
# Return the user.id
return getattr(user, field)
return _func
def get_token() -> Union[str, None]:
"""
Attempt to get the token from the request. Both the cookie, and the
headers will be checked.
:return:
"""
if not has_request_context():
return None
return request.headers.get("token", default=None) or request.cookies.get(
"token", default=None
) or request.args.get('token', default=None)
def create_token(netid: str, exp_kwargs=None, **extras) -> Union[str, None]:
"""
Get token for user by netid. You can provide a dictionary
to the exp_kwargs to set a different expire time for this token.
By default it is 6 hours. If you wanted to do 6 days exp_kwargs={'days': 6}
:param exp_kwargs:
:param netid:
:return: token string or None (if user not found)
"""
# Get user
user: User = get_user(netid)
if exp_kwargs is None:
exp_kwargs = {'hours': 6}
# Verify user exists
if user is None:
return None
# Create new token
return jwt.encode({
"netid": user.netid,
"exp": datetime.utcnow() + timedelta(**exp_kwargs),
**extras,
}, config.SECRET_KEY)
def require_user(unless_debug=False):
"""
Wrap a function to require a user to be logged in.
If they are not logged in, they will get an Unathed
error response with status code 401.
:param unless_debug:
:return:
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
# Get the user in the current
# request context.
user = get_current_user()
# Bypass auth if the api is in debug
# mode and unless_debug is true.
if unless_debug and is_debug():
return func(*args, **kwargs)
# Check that there is a user specified
# in the current request context, and
# that use is an admin.
if user is None:
raise AuthenticationError()
# Pass the parameters to the
# decorated function.
return func(*args, **kwargs)
return wrapper
return decorator
def require_admin(unless_debug=False):
"""
Wrap a function to require an admin to be logged in.
If they are not logged in, they will get an Unathed
error response with status code 401.
:param unless_debug:
:return:
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
# Get the user in the current
# request context.
user = get_current_user()
# Bypass auth if the api is in debug
# mode and unless_debug is true.
if unless_debug and is_debug():
return func(*args, **kwargs)
# Check that there is a user specified
# in the current request context, and
# that use is an admin.
if user is None:
raise AuthenticationError('Request is anonymous')
if user.is_superuser:
return func(*args, **kwargs)
ta = TAForCourse.query.filter(
TAForCourse.owner_id == user.id).first()
prof = ProfessorForCourse.query.filter(
ProfessorForCourse.owner_id == user.id).first()
if ta is None and prof is None:
raise AuthenticationError('User is not ta or professor')
# Pass the parameters to the
# decorated function.
return func(*args, **kwargs)
return wrapper
return decorator
def require_superuser(unless_debug=False):
"""
Wrap a function to require an superuser to be logged in.
If they are not logged in, they will get an Unathed
error response with status code 401.
:param unless_debug:
:return:
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
# Get the user in the current
# request context.
user = get_current_user()
# Bypass auth if the api is in debug
# mode and unless_debug is true.
if unless_debug and is_debug():
return func(*args, **kwargs)
# Check that there is a user specified
# in the current request context, and
# that use is a superuser.
if user is None:
raise AuthenticationError()
# If the user is not a superuser, then return a 400 error
# so it will be displayed in a snackbar.
if user.is_superuser is False:
raise AssertError("This requires superuser permissions", 200)
# Pass the parameters to the
# decorated function.
return func(*args, **kwargs)
return wrapper
return decorator
current_user: User = LocalProxy(get_current_user)
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The data created by recording CPU performance counters (Pmc) for each context switch looks something like this:
CSwitch, TimeStamp, New Process Name ( PID), New TID, NPri, NQnt, TmSinceLast, WaitTime, Old Process Name ( PID), Old TID, OPri, OQnt, OldState, Wait Reason, Swapable, InSwitchTime, CPU, IdealProc, OldRemQnt, NewPriDecr, PrevCState
Pmc, TimeStamp, ThreadID, BranchInstructions, BranchMispredictions
...
CSwitch, 64630, Idle ( 0), 0, 0, -1, 0, 0, tracelog.exe (5200), 5912, 9, -1, Standby, WrPreempted, NonSwap, 6, 6, 7, 14768128, 0, 0
CSwitch, 64631, tracelog.exe (5200), 5912, 9, -1, 1, 1, RuntimeBroker.exe (3896), 7720, 8, -1, Ready, WrDispatchInt, NonSwap, 6, 7, 7, 32640000, 0, 0
CSwitch, 64648, MsMpEng.exe (3016), 13212, 8, -1, 19604, 2, Idle ( 0), 0, 0, -1, Running, Executive, NonSwap, 1465, 0, 0, 0, 0, 1
Pmc, 64662, 7720, 41066, 6977
CSwitch, 64662, RuntimeBroker.exe (3896), 7720, 8, -1, 31, 0, MsMpEng.exe (3016), 13212, 8, -1, Waiting, WrQueue, Swapable, 14, 0, 4, 68564992, 0, 0
Pmc, 64690, 0, 6723, 1485
CSwitch, 64690, Idle ( 0), 0, 0, -1, 0, 0, tracelog.exe (5200), 5912, 9, -1, Waiting, Executive, NonSwap, 59, 7, 2, 14640128, 0, 0
Pmc, 64693, 7904, 34481, 3028
CSwitch, 64693, conhost.exe (8148), 7904, 11, -1, 4243, 1, Idle ( 0), 0, 0, -1, Running, Executive, NonSwap, 1407, 2, 2, 0, 2, 1
Pmc, 64704, 0, 36020, 3267
CSwitch, 64704, Idle ( 0), 0, 0, -1, 0, 0, conhost.exe (8148), 7904, 11, -1, Waiting, UserRequest, Swapable, 12, 2, 6, 202464256, 0, 0
Pmc, 64710, 5912, 7077, 1518
CSwitch, 64710, tracelog.exe (5200), 5912, 9, -1, 19, 0, Idle ( 0), 0, 0, -1, Running, Executive, NonSwap, 19, 7, 7, 0, 0, 1
A few things can be observed about the data.
The Pmc data takes a while to get going - there can be thousands of CSwitch events
before the first Pmc event. Awesome.
The Pmc events are cumulative and per-processor. This explains why they increase over the
duration of the trace, but not monotonically. They only increase monotonically if you look
at them on a particular CPU.
The Pmc events are associated with the following event. This can be seen in the CSwitch at TimeStamp
64704. This CSwitch is on CPU 2 and the following Pmc has a BranchInstructions count of 7077, which
is incompatible with the previous CSwitch which is also on CPU 2.
The CSwitch events are when a thread *starts* executing. So, you don't know what counts to associate
with a timeslice until the *next* context switch on that CPU. So...
When a Pmc event is seen, look for a CSwitch event on the next line. If this is not the first Pmc/CSwitch
pair for this CPU (see column 16) then calculate the deltas for all of the Pmc counters and add those
deltas to the process listed in the Old Process Name ( PID) column (column 8).
Sometimes there will be an Error: message inbetween the Pmc and CSwitch lines. Ignore those, but don't
be too forgiving about what you parse or else you may end up calculating garbage results.
Example:
Pmc, 2428274, 84, 45813769, 2146039
Error: Description for thread state (9) could not be found. Thread state array out of date!!
CSwitch, 2428274, System ( 4), 84, 23, -1, 220, 0, csrss.exe ( 628), 732, 14, -1, <out of range>, WrProcessInSwap, Swapable, 19, 2, 4, 68552704, 0, 0
"""
import sys
if len(sys.argv) <= 1:
print 'Usage: %s xperfoutput [processname]' % sys.argv[0]
print 'The first parameter is the name of a file containing the results'
print 'of "xperf -i trace.etl". The second (optional) parameter is a'
print 'process name substring filter used to restrict which results are'
print 'shown - only processes that match are displayed.'
sys.exit(0)
xperfoutputfilename = sys.argv[1]
l = open(xperfoutputfilename).readlines()
# Scan through the counter data looking for Pmc and CSwitch records.
# If adjacent records are found that contain Pmc and CSwitch data then
# combine the data. This gives us some counters that we can assign to
# a particular CPU. If we have already seen counters for that CPU then
# we can subtract the previous counters to get a delta.
# That delta can then be applied to the process that was *previously*
# assigned to that CPU.
lastLineByCPU = {}
countersByCPU = {}
lastCSwitchTimeByCPU = {}
countersByProcess = {}
contextSwitchesByProcess = {}
cpuTimeByProcess = {}
processByCPU = {} # Which process has been switched in to a particular CPU
description = None
for x in range(len(l) - 1):
if l[x].startswith(" Pmc,"):
pmc_parts = l[x].split(",")
if not description:
# Grab the description of the Pmc counter records, see how many counters
# there are, and print the description.
num_counters = len(pmc_parts) - 3
description = l[x].strip()
print description
continue
counters = map(int, pmc_parts[3:])
# Look for a CSwitch line. Ideally it will be next, but sometimes an Error: line
# might be in-between.
cswitch_line = ""
if l[x+1].startswith(" CSwitch,"):
cswitch_line = l[x+1]
elif l[x+1].startswith("Error: ") and l[x+2].startswith(" CSwitch,"):
cswitch_line = l[x+2]
if cswitch_line:
cswitch_parts = cswitch_line.split(",")
CPU = int(cswitch_parts[16].strip())
process = cswitch_parts[2].strip()
timeStamp = int(cswitch_parts[1])
# See if we've got previous Pmc records for this CPU:
if countersByCPU.has_key(CPU):
diffs = map(lambda a,b : a - b, counters, countersByCPU[CPU])
old_process = cswitch_parts[8].strip()
# Sanity checking...
if old_process != processByCPU[CPU]:
print "Old process mismatch at line %d, %s versus %s" % (x, old_process, processByCPU[CPU])
sys.exit(0)
if old_process != "Idle ( 0)":
countersByProcess[old_process] = map(lambda x, y: x + y, countersByProcess.get(old_process, num_counters * [0]), diffs)
contextSwitchesByProcess[old_process] = contextSwitchesByProcess.get(old_process, 0) + 1
cpuTimeByProcess[old_process] = cpuTimeByProcess.get(old_process, 0) + (timeStamp - lastCSwitchTimeByCPU[CPU])
lastCSwitchTimeByCPU[CPU] = timeStamp
processByCPU[CPU] = process
countersByCPU[CPU] = counters
lastLineByCPU[CPU] = x
else:
print "Missing cswitch line at line %d" % x
sys.exit(0)
print "%43s: counter1/counter2, counters" % "Process name"
for process in countersByProcess.keys():
totals = countersByProcess[process]
if totals[0] > 100000: # Arbitrary filtering
# Filter to the specific process substring if requested.
if len(sys.argv) == 2 or process.lower().count(sys.argv[2].lower()) > 0:
print "%43s: %5.2f%%, %s, %d context switches, time: %d" % (process, totals[0] * 100.0 / totals[1], totals, contextSwitchesByProcess[process], cpuTimeByProcess[process])
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from os import path
WINDOW_TITLE = 'Interface' # Título da janela.
SCREEN_SIZE = (800, 600) # Dimensões da janela.
FONT_SIZE = 14 # Tamanho da fonte a ser utilizada.
DOT_RADIUS = 4 # Raio dos pontos do mapa.
LINE_WIDTH = 2 # Espessura das linhas de rota.
FONT_FAMILY = 'Consolas' # Nome da fonte.
ARQ_MAPA = 'mapa_vale.png' # Nome do arquivo do mapa.
ARQ_DISTANCIAS = 'distancias.csv' # Nome do arquivo de distâncias.
ARQ_MUNICIPIOS = 'municipios.csv' # Nome do arquivo de municípios.
METHOD_NAMES = ['Amplitude',
'Profundidade',
'Profundidade Limitada',
'Profundidade Interativa',
'Bi-direcional',
'Custo Uniforme']
# Diretório raiz do projeto:
root_dir = path.dirname(path.abspath(__file__))
# Diretório da imagem do mapa:
map_path = path.join(root_dir, 'res/' + ARQ_MAPA)
# Diretório do arquivo de distâncias:
distances_path = path.join(root_dir, 'res/' + ARQ_DISTANCIAS)
# Diretório do arquivo de posições:
positions_path = path.join(root_dir, 'res/' + ARQ_MUNICIPIOS)
|
import argparse
import html
import json
import os
import string
import sys
from copy import deepcopy
import dbus
from dbus.mainloop.glib import DBusGMainLoop, threads_init
from gi.repository import Gio, GLib
__version__ = '1.2.0'
__author__ = 'un.def <me@undef.im>'
class Formatter(string.Formatter):
_FORMAT_FUNCS = {
'upper': str.upper,
'lower': str.lower,
'capitalize': str.capitalize,
'icon': 'status_icon',
}
def __init__(self, format_string, status_icons=None, markup_escape=True):
self._format_string = format_string
if status_icons is not None:
self._status_icons = status_icons.copy()
else:
self._status_icons = {}
self._markup_escape = markup_escape
def __call__(self, *args, **kwargs):
return self.format(self._format_string, *args, **kwargs)
def format_field(self, value, format_spec):
if format_spec:
format_func = self._FORMAT_FUNCS[format_spec]
if isinstance(format_func, str):
format_func = getattr(self, '_format_func__' + format_func)
value = format_func(value)
if self._markup_escape:
value = html.escape(value)
return value
def _format_func__status_icon(self, status):
return self._status_icons.get(status, '?')
class SpotifyBlocklet:
DEFAULT_CONFIG = {
# Format: {field} or {field:filter}
# Fields: status, artist, title
# Filters: icon (from status only), upper, lower, capitalize
'format': '{status}: {artist} – {title}',
# Escape special characters (such as `<>&`) for Pango markup
'markup_escape': False,
# MPRIS `PlaybackStatus` property to icon mapping
'status_icons': {
'Playing': '\uf04b', #
'Paused': '\uf04c', #
'Stopped': '\uf04d', #
},
# X11 mouse button number to MPRIS method mapping
'mouse_buttons': {
'1': 'PlayPause',
},
# Do not print the same info multiple times if True
'dedupe': True,
}
BUS_NAME = 'org.mpris.MediaPlayer2.spotify'
OBJECT_PATH = '/org/mpris/MediaPlayer2'
PLAYER_INTERFACE = 'org.mpris.MediaPlayer2.Player'
PROPERTIES_INTERFACE = 'org.freedesktop.DBus.Properties'
_loop = None
_stdin_stream = None
_bus = None
_spotify = None
def __init__(self, config=None):
_config = deepcopy(self.DEFAULT_CONFIG)
if config:
for key, value in config.items():
if isinstance(value, dict):
_config[key].update(value)
else:
_config[key] = value
self._formatter = Formatter(
format_string=_config['format'],
status_icons=_config['status_icons'],
markup_escape=_config['markup_escape'],
)
self._mouse_buttons = _config['mouse_buttons']
self._dedupe = _config['dedupe']
self._prev_info = None
@classmethod
def create_loop(cls):
loop = GLib.MainLoop()
# See: https://dbus.freedesktop.org/doc/dbus-python/
# dbus.mainloop.html?highlight=thread#dbus.mainloop.glib.threads_init
threads_init()
DBusGMainLoop(set_as_default=True)
return loop
def init_bus(self):
self._bus = dbus.SessionBus()
def run(self, *, loop=None, read_stdin=True, nowait=False):
if loop is None:
self._loop = self.create_loop()
else:
self._loop = loop
self.init_bus()
try:
self.init_spotify()
except dbus.exceptions.DBusException:
if nowait:
return
if read_stdin:
self.start_stdin_read_loop()
self.connect_to_name_owner_changed_signal()
try:
self._loop.run()
except KeyboardInterrupt:
pass
finally:
if read_stdin:
self.stop_stdin_read_loop()
def start_stdin_read_loop(self):
self._stdin_stream = Gio.DataInputStream.new(
Gio.UnixInputStream.new(sys.stdin.fileno(), False))
self._stdin_stream.set_close_base_stream(True)
self._read_stdin_once()
def stop_stdin_read_loop(self):
self._stdin_stream.close_async(
io_priority=GLib.PRIORITY_DEFAULT,
callback=lambda *args: self._loop.quit(),
)
self._loop.run()
self._stdin_stream = None
def _read_stdin_once(self):
self._stdin_stream.read_line_async(
io_priority=GLib.PRIORITY_DEFAULT, callback=self._on_stdin_line)
def _on_stdin_line(self, stream, task):
try:
result = stream.read_line_finish(task)
except GLib.Error:
return
try:
button = result[0].decode()
except ValueError:
button = None
if button and self._spotify:
method_name = self._mouse_buttons.get(button)
if method_name:
getattr(self._spotify, method_name)(
dbus_interface=self.PLAYER_INTERFACE)
self._read_stdin_once()
def init_spotify(self):
self._spotify = self._bus.get_object(
bus_name=self.BUS_NAME,
object_path=self.OBJECT_PATH,
follow_name_owner_changes=True,
)
self.connect_to_properties_changed_signal()
self.show_initial_info()
def connect_to_properties_changed_signal(self):
self._spotify.connect_to_signal(
signal_name='PropertiesChanged',
handler_function=self._on_properties_changed,
dbus_interface=self.PROPERTIES_INTERFACE,
)
def _on_properties_changed(self, interface_name, changed_properties, _):
"""Show updated info when playback status or track is changed"""
self.show_info(
status=changed_properties['PlaybackStatus'],
metadata=changed_properties['Metadata'],
only_if_changed=self._dedupe,
)
def connect_to_name_owner_changed_signal(self):
self._bus.get_object(
bus_name='org.freedesktop.DBus',
object_path='/org/freedesktop/DBus',
).connect_to_signal(
signal_name='NameOwnerChanged',
handler_function=self._on_name_owner_changed,
dbus_interface='org.freedesktop.DBus',
arg0=self.BUS_NAME,
)
def _on_name_owner_changed(self, name, old_owner, new_owner):
"""
Get Spotify object when Spotify is started or clear info when
Spotify is closed
"""
if not old_owner and new_owner and not self._spotify:
self.init_spotify()
elif old_owner and not new_owner:
print(flush=True)
self._prev_info = None
def get_property(self, property_name):
return self._spotify.Get(
self.PLAYER_INTERFACE, property_name,
dbus_interface=self.PROPERTIES_INTERFACE,
)
def show_initial_info(self):
self.show_info(
status=self.get_property('PlaybackStatus'),
metadata=self.get_property('Metadata'),
)
def show_info(self, status, metadata, only_if_changed=False):
artist = ', '.join(metadata['xesam:artist'])
# For podcasts the artist field is always blank and the
# name of the podcast is placed in the album field.
if artist == '':
artist = metadata['xesam:album']
title = metadata['xesam:title']
info = self._formatter(
status=status,
artist=artist,
title=title,
)
if not only_if_changed or self._prev_info != info:
print(info, flush=True)
self._prev_info = info
def _parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config')
parser.add_argument('-f', '--format')
markup_escape_group = parser.add_mutually_exclusive_group()
markup_escape_group.add_argument(
'--markup-escape',
action='store_true', default=None, dest='markup_escape',
)
markup_escape_group.add_argument(
'--no-markup-escape',
action='store_false', default=None, dest='markup_escape',
)
dedupe_group = parser.add_mutually_exclusive_group()
dedupe_group.add_argument(
'--dedupe',
action='store_true', default=None, dest='dedupe',
)
dedupe_group.add_argument(
'--no-dedupe',
action='store_false', default=None, dest='dedupe',
)
parser.add_argument('--version', action='version', version=__version__)
args = parser.parse_args()
return args
def _main():
args = _parse_args()
if args.config:
with open(os.path.abspath(args.config)) as fp:
config = json.load(fp)
else:
config = {}
for key in ['format', 'markup_escape', 'dedupe']:
value = getattr(args, key)
if value is not None:
config[key] = value
SpotifyBlocklet(config=config).run()
if __name__ == '__main__':
_main()
|
from max_profit import max_profit
def test_simple():
stocks = [1, 2, 3, 4]
assert (max_profit(stocks) == [1, 4])
def test_empty():
stocks = []
assert (max_profit(stocks) == [])
def test_one_elt():
stocks = [1]
assert (max_profit(stocks) == [1, 1])
def test_medium():
stocks = [5, 10, 2, 5]
assert (max_profit(stocks) == [5, 10])
def test_medium2():
stocks = [5, 10, 2, 5, 14]
assert (max_profit(stocks) == [2, 14]) |
class StringIdentifierException(Exception):
'''
Raised if a String identifier (e.g. an observation "name")
does not match our constraints. See the utilities method
for those expectations/constraints.
'''
pass
class AttributeValueError(Exception):
'''
Raised by the attribute subclasses if something is amiss.
For example, if we try to create an IntegerAttribute with
a string.
'''
pass
class InvalidAttributeKeywords(Exception):
'''
Raised if invalid keyword args are passed to the constructor of
an Attribute subclass
'''
pass
class InputMappingException(Exception):
'''
Raised if there is an exception to be raised when mapping a user's
inputs to job inputs when an ExecutedOperation has been requested.
'''
pass
class OperationResourceFileException(Exception):
'''
This exception is raised if an Operation specifies user-indepdent
files that are associated with the Operation, but there is an issue
finding or reading the file.
Raised during the ingestion of the operation
'''
pass
class NoResourceFoundException(Exception):
'''
Raised as a general exception when a Resource cannot be found.
'''
pass
class InactiveResourceException(Exception):
'''
Raised when a resource exists, but is inactive. Often used as a "marker"
to indicate that we cannot perform any modifications to the underlying resource.
'''
pass
class OwnershipException(Exception):
'''
Raised if there is a conflict between the "owner" of a database resource
and the requester. Used, for example, to reject requests for a resource/file
if the requester is NOT the owner.
'''
pass
class NonIterableContentsException(Exception):
'''
Raised when resource contents are requested, but the data does
not support iteration. Typical case would be for a JSON-based data
structure. If the JSON is basically an array, we can iterate. Otherwise
the concept of pagination is not generalizable (e.g. if the JSON
is a dict)
'''
pass
class OutputConversionException(Exception):
'''
Raised when the output of an `ExecutedOperation` has an issue. Examples
include failure to format the output payload or a failure of the validation
functions for the output files (e.g. it's not actually an integer matrix as
we expect)
'''
pass
class StorageException(Exception):
'''
This is raised as part of storage operations where failure is predicted (i.e.
it's not a generic catch-all failure that we did not expect)
'''
pass |
#$Id$
class BankRule:
"""This class is used to create object for bank rules."""
def __init__(self):
"""Initialize parameters for Bank rules."""
self.rule_id = ''
self.rule_name = ''
self.rule_order = 0
self.apply_to = ''
self.target_account_id = ''
self.apply_to = ''
self.criteria_type = ''
self.criterion = []
self.record_as = ''
self.account_id = ''
self.account_name = ''
self.tax_id = ''
self.reference_number = ''
self.customer_id = ''
self.customer_name = ''
def set_rule_id(self, rule_id):
"""Set rule id.
Args:
rule_id(str): Rule id.
"""
self.rule_id = rule_id
def get_rule_id(self):
"""Get rule id.
Returns:
str: Rule id.
"""
return self.rule_id
def set_rule_name(self, rule_name):
"""Set rule name.
Args:
rule_name(str): Rule name.
"""
self.rule_name = rule_name
def get_rule_name(self):
"""Get rule name.
Returns:
str:Rule name.
"""
return self.rule_name
def set_rule_order(self, rule_order):
"""Set rule order.
Args:
rule_order(int): Rule order.
"""
self.rule_order = rule_order
def get_rule_order(self):
"""Get rule order.
Returns:
int: Rule order.
"""
return self.rule_order
def set_apply_to(self, apply_to):
"""Set apply to.
Args:
apply_to(str): Apply to.
"""
self.apply_to = apply_to
def get_apply_to(self):
"""Get apply to.
Returns:
str: Apply to.
"""
return self.apply_to
def set_criteria_type(self, criteria_type):
"""Set criteria type.
Args:
criteria_type(str): Criteria type.
"""
self.criteria_type = criteria_type
def get_criteria_type(self):
"""Get criteria type.
Returns:
str: Criteria type.
"""
return self.criteria_type
def set_target_account_id(self, target_account_id):
"""Set target account id.
Args:
target_account_id(str): Target account id.
"""
self.target_account_id = target_account_id
def get_target_account_id(self):
"""Get target account id.
Returns:
str: Target account id.
"""
return self.target_account_id
def set_criterion(self, criteria):
"""Set criterion.
Args:
criteria(instance): Criteria object.
"""
self.criterion.append(criteria)
def get_criterion(self):
"""Get criterion.
Returns:
list of instance: List of criteria object.
"""
return self.criterion
def set_record_as(self, record_as):
"""Set record as.
Args:
record_as(str): Record as.
"""
self.record_as = record_as
def get_record_as(self):
"""Get record as.
Returns:
str: Record as.
"""
return self.record_as
def set_account_id(self, account_id):
"""Set account id.
Args:
account_id(str): Account id.
"""
self.account_id = account_id
def get_account_id(self):
"""Get account id.
Returns:
str: Account id.
"""
return self.account_id
def set_account_name(self, account_name):
"""Set account name.
Args:
account_name(str): Account name.
"""
self.account_name = account_name
def get_account_name(self):
"""Get account name.
Returns:
str: Account name.
"""
return self.account_name
def set_tax_id(self, tax_id):
"""Set tax id.
Args:
tax_id(str): Tax id.
"""
self.tax_id = tax_id
def get_tax_id(self):
"""Get tax id.
Returns:
str:Tax id.
"""
return self.tax_id
def set_reference_number(self, reference_number):
"""Set reference number.
Args:
reference_number(str): Reference number.
"""
self.reference_number = reference_number
def get_reference_number(self):
"""Get reference number.
Returns:
str: Reference number.
"""
return self.reference_number
def set_customer_id(self, customer_id):
"""Set customer id.
Args:
customer_id(str): Customer id.
"""
self.customer_id = customer_id
def get_customer_id(self):
"""Get customer id.
Returns:
str: Customer id.
"""
return self.customer_id
def set_customer_name(self, customer_name):
"""Set customer name.
Args:
customer_name(str): Customer name.
"""
self.customer_name = ''
def get_customer_name(self):
"""Get customer name.
Returns:
str: Customer name.
"""
return self.customer_name
def to_json(self):
"""This method is used to convert bills object to json object.
Returns:
dict: Dictionary containing json object for bank rules.
"""
data = {}
if self.rule_name != '':
data['rule_name'] = self.rule_name
if self.target_account_id != '':
data['target_account_id'] = self.target_account_id
if self.apply_to != '':
data['apply_to'] = self.apply_to
if self.criteria_type != '':
data['criteria_type'] = self.criteria_type
if self.criterion:
data['criterion'] = []
for value in self.criterion:
criteria = value.to_json()
data['criterion'].append(criteria)
if self.record_as != '':
data['record_as'] = self.record_as
if self.account_id != '':
data['account_id'] = self.account_id
if self.tax_id != '':
data['tax_id'] = self.tax_id
if self.reference_number != '':
data['reference_number'] = self.reference_number
if self.customer_id != '':
data['customer_id'] = self.customer_id
return data
|
from ..database import Base
from ..database import autocommit_engine
from ..tenants import models as t_models
from ..policies import models as p_models
import pytest
@pytest.fixture()
def test_db():
Base.metadata.create_all(bind=autocommit_engine)
yield
Base.metadata.drop_all(bind=autocommit_engine)
|
import numba
from numba.types import float64
import numpy as np
from privates import NamedStruct
FAIL_MSG = "Numba not found"
class Point(NamedStruct):
x: float64
y: float64
def distance_from_origin(self):
return np.sqrt(self.x**2 + self.y**2)
class Rectangle(Point):
width: float64
height: float64
def __init__(self, x, y, width, height):
self.x = x
self.y = y
self.width = width
self.height = height
def area(self):
return self.width * self.height
def test_create_api():
norm = Rectangle(x=0, y=1, height=5, width=6)
assert norm.x == 0
assert norm[0] == 0
assert norm.y == 1
assert norm[1] == 1
assert norm.height == 5
# assert norm[2] == 5 # TODO : need Struct.init
assert norm.width == 6
# assert norm[3] == 6
assert norm.distance_from_origin() == 1
assert norm.area() == 30
r = Rectangle.create(x=0, y=1, height=5, width=6)
assert r.x == 0
assert r.y == 1
assert r.height == 5
assert r.width == 6
assert r.area() == 30
assert r.distance_from_origin() == 1.0
r2 = Rectangle.create(x=0, y=1, height=6, width=7)
assert r2.area() == 42
assert r2.distance_from_origin() == 1.0
r3 = Rectangle.create(x=3.0, y=4.0, height=3, width=4)
assert r3.area() == 12
assert r3.distance_from_origin() == 5.0
|
import sys
import os
import argparse
import logging
import time
import numpy as np
from skimage import filters
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../../')
parser = argparse.ArgumentParser(description='Generate predicted coordinates'
' from probability map of tumor patch'
' predictions, using non-maximal suppression')
parser.add_argument('probs_map_path', default=None, metavar='PROBS_MAP_PATH',
type=str, help='Path to the input probs_map numpy file')
parser.add_argument('coord_path', default=None, metavar='COORD_PATH',
type=str, help='Path to the output coordinates csv file')
parser.add_argument('--level', default=5, type=int, help='at which WSI level'
' the probability map was generated, default 6,'
' i.e. inference stride = 64')
parser.add_argument('--radius', default=12, type=int, help='radius for nms,'
' default 12 (6 used in Google paper at level 7,'
' i.e. inference stride = 128)')
parser.add_argument('--prob_thred', default=0.5, type=float,
help='probability threshold for stopping, default 0.5')
parser.add_argument('--sigma', default=0.0, type=float,
help='sigma for Gaussian filter smoothing, default 0.0,'
' which means disabled')
def run(args):
'''批量tif预测'''
count = 0
time_now = time.time()
files = os.listdir(args.probs_map_path)
for eachfile in files:
if '-3-pmap' in eachfile:#晓迪在Camelyon16上得到的npy
probs_map = np.load(args.probs_map_path+eachfile)
probs_map = probs_map.transpose()#晓迪在Camelyon16上得到的npy
X, Y = probs_map.shape
resolution = pow(2, args.level)
if args.sigma > 0:
probs_map = filters.gaussian(probs_map, sigma=args.sigma)
outfile = open(args.coord_path+eachfile[:-4]+'.csv', 'w')
while np.max(probs_map) > args.prob_thred:
prob_max = probs_map.max()
max_idx = np.where(probs_map == prob_max)
x_mask, y_mask = max_idx[0][0], max_idx[1][0]
x_wsi = int((x_mask + 0.5) * resolution)
y_wsi = int((y_mask + 0.5) * resolution)
outfile.write('{:0.5f},{},{}'.format(prob_max, x_wsi, y_wsi) + '\n')
x_min = x_mask - args.radius if x_mask - args.radius > 0 else 0
x_max = x_mask + args.radius if x_mask + args.radius <= X else X
y_min = y_mask - args.radius if y_mask - args.radius > 0 else 0
y_max = y_mask + args.radius if y_mask + args.radius <= Y else Y
for x in range(x_min, x_max):
for y in range(y_min, y_max):
probs_map[x, y] = 0
outfile.close()
count += 1
if count%1==0:
time_spent = time.time() - time_now
time_now = time.time()
logging.info(
'{}, Processing : {}/{}, Run Time : {:.2f}'
.format(
time.strftime("%Y-%m-%d %H:%M:%S"), count, len(files), time_spent))
'''单张tif预测'''
# probs_map = np.load(args.probs_map_path)
# X, Y = probs_map.shape
# resolution = pow(2, args.level)
#
# if args.sigma > 0:
# probs_map = filters.gaussian(probs_map, sigma=args.sigma)
#
# outfile = open(args.coord_path, 'w')
# while np.max(probs_map) > args.prob_thred:
# prob_max = probs_map.max()
# max_idx = np.where(probs_map == prob_max)
# x_mask, y_mask = max_idx[0][0], max_idx[1][0]
# x_wsi = int((x_mask + 0.5) * resolution)
# y_wsi = int((y_mask + 0.5) * resolution)
# outfile.write('{:0.5f},{},{}'.format(prob_max, x_wsi, y_wsi) + '\n') #prob(>0.5),x,y
#
# x_min = x_mask - args.radius if x_mask - args.radius > 0 else 0
# x_max = x_mask + args.radius if x_mask + args.radius <= X else X
# y_min = y_mask - args.radius if y_mask - args.radius > 0 else 0
# y_max = y_mask + args.radius if y_mask + args.radius <= Y else Y
#
# for x in range(x_min, x_max):
# for y in range(y_min, y_max):
# probs_map[x, y] = 0
#
# outfile.close()
def main():
logging.basicConfig(level=logging.INFO)
args = parser.parse_args()
run(args)
if __name__ == '__main__':
main()
|
# https://www.python.org/dev/peps/pep-0440/
__version__ = '1.13.3'
|
t = int(input())
answers = []
for a in range(t):
counter = 0
saver = 0
saver2 = 0
n = int(input())
line = [int(b) for b in input().split()]
final = {}
for c in range(len(line)):
final.update({line[c]:final[line[c]].append(c)})
print(final)
for bl in answers:
print(bl)
|
from enum import auto
from ...util.misc import StringEnum
from ...util.colormaps import AVAILABLE_COLORMAPS
class Interpolation(StringEnum):
"""INTERPOLATION: Vispy interpolation mode.
The spatial filters used for interpolation are from vispy's
spatial filters. The filters are built in the file below:
https://github.com/vispy/vispy/blob/master/vispy/glsl/build-spatial-filters.py
"""
BESSEL = auto()
BICUBIC = auto()
BILINEAR = auto()
BLACKMAN = auto()
CATROM = auto()
GAUSSIAN = auto()
HAMMING = auto()
HANNING = auto()
HERMITE = auto()
KAISER = auto()
LANCZOS = auto()
MITCHELL = auto()
NEAREST = auto()
SPLINE16 = auto()
SPLINE36 = auto()
|
from timeseers.linear_trend import LinearTrend
from timeseers.timeseries_model import TimeSeriesModel
from timeseers.fourier_seasonality import FourierSeasonality
from timeseers.logistic_growth import LogisticGrowth
from timeseers.indicator import Indicator
from timeseers.constant import Constant
from timeseers.regressor import Regressor
__all__ = ["LinearTrend", "TimeSeriesModel", "FourierSeasonality", "Indicator",
"Constant", "Regressor", "LogisticGrowth"]
|
from read_wordcount_corpus import read_wordcount_corpus
import MeCab
#mecab_parse用
m = MeCab.Tagger ('-d /usr/local/lib/mecab/dic/mecab-ipadic-neologd')
#mecabFormatを作成
def mecab_parse(text):
mecabFormat = []
for txt in m.parse(text).split('\n'):
t = txt.split('\t')
if len(t)==2:
xt = t[1].split(',')
mecabFormat.append([t[0],xt])
return mecabFormat
class ExtractTopicWord():
#読み込み
def __init__(self,corpus_choice='dialogue',conditional_noun='all',format='binaryfile'):
#コーパスを何にしたかの記録
self.corpus_choice = corpus_choice
#抽出条件
self.conditional_noun = conditional_noun
#指定したコーパスからwordCounterの読み込み
self.wordCounter = read_wordcount_corpus(corpus=corpus_choice,format=format)
#抽出判定(True or Falseを返す関数)
def judge_condition(self,word_format):
conditional_noun = self.conditional_noun
#wordCounterに入ってなければ抽出しない
if word_format[0] in self.wordCounter.keys():
#名詞ならばTrue
if conditional_noun == 'all':
if word_format[1][0] == '名詞':
return True
#一般名詞か固有名詞ならTrue
if conditional_noun == 'name':
if (word_format[1][0] == '名詞') and (word_format[1][1] in ['一般','固有名詞']):
return True
#一般名詞か固有名詞かサ変接続ならTrue
if conditional_noun == 'sahen':
if (word_format[1][0] == '名詞') and (word_format[1][1] in ['一般','固有名詞','サ変接続']):
return True
#条件を満たさなければFalse
return False
#テキストから話題語を抽出する
def getTopicWord(self,text):
nounList = []
mecabFormat = mecab_parse(text)
#テキスト中の名詞でwordCounterに入っている名詞を対象とする
for word_format in mecabFormat:
if self.judge_condition(word_format):
nounList.append(word_format[0])
#候補となる名詞がなければNoneを返す
if nounList == []:
return None
#出現頻度の低い順に並び替えて話題語を出力
rank = list(zip(nounList,map(self.wordCounter.get,nounList)))
rank.sort(key=lambda x:x[1])
return rank[0][0]
#テスト用
#return rank
|
from setuptools import setup, find_packages
setup(
name='cross_dataset_common',
version='0.1.5',
description='Utilities for preparing processed data to be loaded into database',
url='https://github.com/hubmapconsortium/cross-dataset-common',
author='Sean Donahue',
author_email='seandona@andrew.cmu.edu',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
packages=find_packages(),
install_requires=[
'anndata>=0.7.3',
'requests>=2.22.0',
'pyyaml>=5.3',
'scanpy>=1.4.4',
],
python_requires='>=3.6',
)
|
# -*- encoding: utf-8 -*-
# Copyright (c) 2020 Stephen Bunn <stephen@bunn.io>
# ISC License <https://choosealicense.com/licenses/isc>
"""Predefined background (bg) chalk colors."""
from ..chalk import Chalk
from ..color import Color
black = Chalk(background=Color.BLACK)
red = Chalk(background=Color.RED)
green = Chalk(background=Color.GREEN)
yellow = Chalk(background=Color.YELLOW)
blue = Chalk(background=Color.BLUE)
magenta = Chalk(background=Color.MAGENTA)
cyan = Chalk(background=Color.CYAN)
white = Chalk(background=Color.WHITE)
bright_black = Chalk(background=Color.BRIGHT_BLACK)
bright_red = Chalk(background=Color.BRIGHT_RED)
bright_green = Chalk(background=Color.BRIGHT_GREEN)
bright_yellow = Chalk(background=Color.BRIGHT_YELLOW)
bright_blue = Chalk(background=Color.BRIGHT_BLUE)
bright_magenta = Chalk(background=Color.MAGENTA)
bright_cyan = Chalk(background=Color.BRIGHT_CYAN)
bright_white = Chalk(background=Color.BRIGHT_WHITE)
|
#!/usr/bin/env python3
# psw_factorial, an implementation of the prime swing factorial.
# The claim is that this is the fastest known algorithm for
# computing the factorial. It is based on the prime factorization of n!.
# See Peter Luschny, https://oeis.org/A000142/a000142.pdf
# Also http://www.luschny.de/math/factorial/SwingFactorialPy.html
# http://luschny.de/math/factorial/FastFactorialFunctions.htm
from bisect import bisect_left as bisectleft
import math
def prime_pi_upper_bound(n):
"""
Return an upper bound of the number of primes below n.
"""
ln = math.log(n)
return int(n / (ln - 1 - (154/125) / ln))
def Primes(n):
"""
Return the primes in the interval 1..n (n inclusive).
"""
# --- Sieve the primes à la Eratosthenes.
lim, tog = n // 3, False
composite = [False]*lim
d1 = 8; d2 = 8; p1 = 3; p2 = 7; s1 = 7; s2 = 3; m = -1
while s1 < lim: # -- scan the sieve
m += 1 # -- if a prime is found
if not composite[m]: # -- cancel its multiples
inc = p1 + p2
for k in range(s1 , lim, inc): composite[k] = True
for k in range(s1 + s2, lim, inc): composite[k] = True
tog = not tog
if tog: s1 += d2; d1 += 16; p1 += 2; p2 += 2; s2 = p2
else: s1 += d1; d2 += 8; p1 += 2; p2 += 6; s2 = p1
# --- Collect the primes.
primes = [0]*prime_pi_upper_bound(n)
primes[0] = 2; primes[1] = 3
m, k, p, tog = 1, 0, 5, False
while p <= n:
if not composite[k]:
m += 1
primes[m] = p
k += 1
tog = not tog
p += 2 if tog else 4
return primes[0:m+1]
def isqrt(x):
"""
Return the integer square root of x.
"""
if x < 0:
raise ValueError('square root not def. for negative numbers')
n = int(x)
if n == 0: return 0
a, b = divmod(n.bit_length(), 2)
x = 2 ** (a + b)
while True:
y = (x + n // x) // 2
if y >= x: return x
x = y
def product(A):
"""
Return the accumulated product of an array.
"""
def prod(a, b):
n = b - a
if n < 24:
p = 1
for k in range(a, b + 1):
p *= A[k]
return p
m = (a + b) // 2
return prod(a, m) * prod(m + 1, b)
return prod(0, len(A) - 1)
def psw_factorial(n):
"""
Return the factorial of n (using the prime-swing algorithm).
"""
small_swing = [1, 1, 1, 3, 3, 15, 5, 35, 35, 315, 63, 693, 231, 3003,
429, 6435, 6435, 109395, 12155, 230945, 46189, 969969, 88179,
2028117, 676039, 16900975, 1300075, 35102025, 5014575, 145422675,
9694845, 300540195, 300540195]
def swing(m, primes):
if m < 33: return small_swing[m]
s = bisectleft(primes, 1 + isqrt(m))
d = bisectleft(primes, 1 + m // 3)
e = bisectleft(primes, 1 + m // 2)
g = bisectleft(primes, 1 + m)
factors = primes[e:g]
factors += filter(lambda x: (m // x) & 1 == 1, primes[s:d])
for prime in primes[1:s]:
p, q = 1, m
while True:
q //= prime
if q == 0: break
if q & 1 == 1:
p *= prime
if p > 1: factors.append(p)
return product(factors)
def odd_factorial(n, primes):
if n < 2: return 1
tmp = odd_factorial(n // 2, primes)
return (tmp * tmp) * swing(n, primes)
def eval(n):
if n < 0:
raise ValueError('factorial not def. for negative numbers')
if n == 0: return 1
if n < 20: return product(range(2, n + 1))
N, bits = n, n
while N != 0:
bits -= N & 1
N >>= 1
primes = Primes(n)
return odd_factorial(n, primes) << bits
return eval(n)
import time
def main():
"""
Test and benchmark:
We are faster than math.factorial!
"""
for n in range(1000):
mf = math.factorial(n)
psw = psw_factorial(n)
if mf != psw: print("Error at", n)
n = 1000; elapsed_last = 0
while n < 10000000:
print("Test n = {}".format(n), end='', flush=True)
start = time.time()
psw_factorial(n)
# math.factorial(n)
end = time.time()
elapsed = end - start
q = elapsed/elapsed_last if elapsed_last > 0 else 0
print(", elapsed={:1.3f}s, quot={:1.1f}".format(elapsed, q))
elapsed_last = elapsed
n *= 4
# Very roughly: if n is increased by a factor of 4
# then the elapsed time increases by a factor of 10.
if __name__ == '__main__':
main()
|
import unittest
import mock
from pyhpecw7.features.vxlan import Vxlan, Tunnel, L2EthService
from .base_feature_test import BaseFeatureCase
INTERFACE = 'FortyGigE1/0/2'
INSTANCE = '100'
VSI = 'VSI_VXLAN_100'
TUNNEL = '20'
class VxlanTestCase(BaseFeatureCase):
@mock.patch('pyhpecw7.comware.HPCOM7')
def setUp(self, mock_device):
self.device = mock_device
self.l2eth = L2EthService(self.device, INTERFACE, INSTANCE, VSI)
self.vxlan = Vxlan(self.device, INSTANCE, vsi=VSI)
self.tunnel = Tunnel(self.device, TUNNEL)
def test_tunnel_get_config(self):
self.device.cli_display.return_value = """<HP1>display current-configuration interface Tunnel 20
#
interface Tunnel20 mode vxlan
source 10.1.1.1
destination 10.1.1.2
#
return
"""
expected = {'dest': '10.1.1.2', 'src': '10.1.1.1', 'mode': 'vxlan'}
result = self.tunnel.get_config()
self.assertEqual(result, expected)
def test_tunnel_get_config_no_tunnel(self):
self.device.cli_display.return_value = """ ^
% Wrong parameter found at '^' position.
"""
expected = {}
result = self.tunnel.get_config()
self.assertEqual(result, expected)
def test_tunnel_get_global_source(self):
self.device.cli_display.return_value = """<HP1>display current-configuration | inc "tunnel global source"
tunnel global source-address 10.10.10.10
"""
expected = '10.10.10.10'
result = self.tunnel.get_global_source()
self.assertEqual(result, expected)
def test_tunnel_build_config(self):
self.tunnel._build_config('present', src='1.1.1.1', dest='2.2.2.2', global_src='1.1.1.2')
expected_call = ['tunnel global source-address 1.1.1.2', 'interface tunnel 20 mode vxlan', 'source 1.1.1.1', 'destination 2.2.2.2']
self.device.cli_config.assert_called_with(expected_call)
def test_tunnel_build_config_stage(self):
self.tunnel._build_config('present', src='1.1.1.1', dest='2.2.2.2', global_src='1.1.1.2', stage=True)
expected_call = ['tunnel global source-address 1.1.1.2', 'interface tunnel 20 mode vxlan', 'source 1.1.1.1', 'destination 2.2.2.2']
self.device.stage_config.assert_called_with(expected_call, 'cli_config')
def test_tunnel_build_config_remove(self):
self.tunnel._build_config('absent', src='1.1.1.1', dest='2.2.2.2', global_src='1.1.1.2')
expected_call = ['undo interface tunnel 20']
self.device.cli_config.assert_called_with(expected_call)
@mock.patch.object(Tunnel, '_build_config')
def test_tunnel_remove_stage(self, mock_build_config):
self.tunnel.remove(stage=True)
mock_build_config.assert_called_with(state='absent', stage=True)
@mock.patch.object(Tunnel, '_build_config')
def test_tunnel_remove(self, mock_build_config):
self.tunnel.remove()
mock_build_config.assert_called_with(state='absent', stage=False)
@mock.patch.object(Tunnel, '_build_config')
def test_tunnel_build_stage(self, mock_build_config):
self.tunnel.build(stage=True)
mock_build_config.assert_called_with(state='present', stage=True)
@mock.patch.object(Tunnel, '_build_config')
def test_tunnel_build(self, mock_build_config):
self.tunnel.build()
mock_build_config.assert_called_with(state='present', stage=False)
@mock.patch.object(Vxlan, 'get_tunnels')
def test_get_config(self, mock_get_tunnels):
mock_get_tunnels.return_value = ['20']
expected_get, get_reply = self.xml_get_and_reply('vxlan')
self.device.get.return_value = get_reply
expected = {'vsi': 'VSI_VXLAN_100', 'tunnels': ['20'], 'vxlan': '100'}
result = self.vxlan.get_config()
self.assertEqual(result, expected)
self.assert_get_request(expected_get)
def test_build_vsi(self):
result = self.vxlan._build_vsi('merge')
expected = self.read_config_xml('vxlan_vsi')
self.assert_elements_equal(result, expected)
def test_build_vxlan(self):
result = self.vxlan._build_vxlan('merge')
expected = self.read_config_xml('vxlan')
self.assert_elements_equal(result, expected)
def test_build_tunnels(self):
result = self.vxlan._build_tunnels('merge', ['20', '3'])
expected = self.read_config_xml('vxlan_tunnels')
self.assert_elements_equal(result, expected)
def test_build(self):
result = self.vxlan.build(tunnels_to_add=['20', '3'])
expected_add_call = self.read_config_xml('vxlan_tunnels')
self.assert_config_request(expected_add_call)
result = self.vxlan.build(tunnels_to_remove=['4', '5'], stage=True)
expected_rmv_call = self.read_config_xml('vxlan_tunnels_delete')
self.assert_stage_request(expected_rmv_call, 'edit_config')
self.assertEqual(result, True)
def test_create(self):
expected_vsi_call = self.read_config_xml('vxlan_vsi')
expected_vxlan_call = self.read_config_xml('vxlan')
result = self.vxlan.create()
self.assert_config_request(expected_vxlan_call)
result = self.vxlan.create(stage=True)
self.assert_stage_request(expected_vxlan_call, 'edit_config')
def test_remove_vsi(self):
expected_vsi_call = self.read_config_xml('vxlan_vsi_delete')
result = self.vxlan.remove_vsi()
self.assert_config_request(expected_vsi_call)
self.assertTrue(result)
result = self.vxlan.remove_vsi(stage=True)
self.assert_stage_request(expected_vsi_call, 'edit_config')
def test_remove_vxlan(self):
expected_call = self.read_config_xml('vxlan_delete')
result = self.vxlan.remove_vxlan()
self.assert_config_request(expected_call)
self.assertTrue(result)
result = self.vxlan.remove_vxlan(stage=True)
self.assert_stage_request(expected_call, 'edit_config')
def test_get_tunnels(self):
expected_get, get_reply = self.xml_get_and_reply('vxlan_tunnels')
self.device.get.return_value = get_reply
expected = ['20']
result = self.vxlan.get_tunnels()
self.assertEqual(result, expected)
self.assert_get_request(expected_get)
def test_l2eth_vsi_exist(self):
expected_get, get_reply = self.xml_get_and_reply('l2eth_vsi_exist')
self.device.get.return_value = get_reply
expected = {'vsi': 'VSI_VXLAN_100'}
result = self.l2eth.vsi_exist()
self.assertEqual(result, expected)
self.assert_get_request(expected_get)
@mock.patch.object(L2EthService, '_index_from_interface')
def test_l2eth_get_vsi_map(self, mock_index_from_interface):
mock_index_from_interface.return_value = '125'
expected_get, get_reply = self.xml_get_and_reply('l2eth_vsi_map')
self.device.get.return_value = get_reply
expected = {'vsi': 'VSI_VXLAN_100', 'access_mode': 'ethernet', 'index': '125', 'instance': '100'}
result = self.l2eth.get_vsi_map()
self.assertEqual(result, expected)
self.assert_get_request(expected_get)
@mock.patch.object(L2EthService, '_index_from_interface')
@mock.patch.object(L2EthService, '_get_interface_from_index')
def test_l2eth_get_vsi_encap(self, mock_iface_from_index, mock_index_from_interface):
mock_iface_from_index.return_value = 'FortyGigE1/0/32'
mock_index_from_interface.return_value = '125'
expected_get, get_reply = self.xml_get_and_reply('l2eth_vsi_encap')
self.device.get.return_value = get_reply
expected = {'interface': 'FortyGigE1/0/32', 'index': '125', 'encap': 'tagged', 'instance': '100'}
result = self.l2eth.get_vsi_encap()
self.assertEqual(result, expected)
self.assert_get_request(expected_get)
@mock.patch.object(L2EthService, '_index_from_interface')
def test_l2eth_build_encap_default(self, mock_index_from_interface):
mock_index_from_interface.return_value = '125'
expected_call = self.read_config_xml('l2eth_build_encap_default')
self.l2eth._build_encap('merge', encap='default')
self.assert_config_request(expected_call)
self.l2eth._build_encap('merge', stage=True, encap='default')
self.assert_stage_request(expected_call, 'edit_config')
@mock.patch.object(L2EthService, '_index_from_interface')
def test_l2eth_build_encap_tagged(self, mock_index_from_interface):
mock_index_from_interface.return_value = '125'
expected_call = self.read_config_xml('l2eth_build_encap_tagged')
self.l2eth._build_encap('merge', encap='tagged')
self.assert_config_request(expected_call)
self.l2eth._build_encap('merge', stage=True, encap='tagged')
self.assert_stage_request(expected_call, 'edit_config')
@mock.patch.object(L2EthService, '_index_from_interface')
def test_l2eth_build_encap_only_tagged(self, mock_index_from_interface):
mock_index_from_interface.return_value = '125'
expected_call = self.read_config_xml('l2eth_build_encap_only_tagged')
self.l2eth._build_encap('merge', encap='only-tagged', vlanid='10')
self.assert_config_request(expected_call)
self.l2eth._build_encap('merge', stage=True, encap='only-tagged', vlanid='10')
self.assert_stage_request(expected_call, 'edit_config')
@mock.patch.object(L2EthService, '_index_from_interface')
def test_l2eth_build_encap_only_delete(self, mock_index_from_interface):
mock_index_from_interface.return_value = '125'
expected_call = self.read_config_xml('l2eth_build_encap_delete')
self.l2eth._build_encap('delete')
self.assert_config_request(expected_call)
self.l2eth._build_encap('delete', stage=True)
self.assert_stage_request(expected_call, 'edit_config')
def test_l2eth_build_xconnect(self):
expected_call = self.read_config_xml('l2eth_build_xconnect')
self.l2eth._build_xconnect('merge', '125', access_mode='ethernet')
self.assert_config_request(expected_call)
self.l2eth._build_xconnect('merge', '125', stage=True, access_mode='ethernet')
self.assert_stage_request(expected_call, 'edit_config')
@mock.patch.object(L2EthService, '_build_encap')
@mock.patch.object(L2EthService, '_build_xconnect')
def test_l2eth_build_config(self, mock_xconnect, mock_encap):
self.l2eth.jindex = '999'
self.l2eth._build_config('present', encap='tagged', access_mode='vlan')
mock_encap.assert_called_with('merge', access_mode='vlan', encap='tagged', stage=False)
mock_xconnect.assert_called_with('merge', '999', access_mode='vlan', encap='tagged', stage=False)
@mock.patch.object(L2EthService, '_build_encap')
@mock.patch.object(L2EthService, '_build_xconnect')
def test_l2eth_build_config_absent(self, mock_xconnect, mock_encap):
self.l2eth.jindex = '999'
self.l2eth._build_config('absent', encap='tagged', access_mode='vlan')
mock_encap.assert_called_with('delete', access_mode='vlan', encap='tagged', stage=False)
mock_xconnect.assert_not_called()
@mock.patch.object(L2EthService, '_build_config')
def test_l2eth_build(self, mock_build_config):
self.l2eth.build(vsi='abc', encap='tagged', access_mode='ethernet')
mock_build_config.assert_called_with(access_mode='ethernet', encap='tagged', stage=False, state='present', vsi='abc')
@mock.patch.object(L2EthService, '_build_config')
def test_l2eth_remove(self, mock_build_config):
self.l2eth.remove()
mock_build_config.assert_called_with(stage=False, state='absent')
def test_l2eth_get_interface_from_index(self):
expected_get, get_reply = self.xml_get_and_reply('l2eth_get_interface_from_index')
self.device.get.return_value = get_reply
expected = 'FortyGigE1/0/32'
result = self.l2eth._get_interface_from_index('125')
self.assertEqual(result, expected)
self.assert_get_request(expected_get)
if __name__ == '__main__':
unittest.main()
|
import re
import xml.etree.ElementTree as ET
kashf = open('KashfAlZunun.txt', mode='r', encoding='utf-8')
text = kashf.read()
denoised_text = re.sub(r'\.{2,}', '', text)
elementsKashf = denoised_text.split(".")
kashfalzunun = ET.Element("kashf")
y = 1
for x in range(0, len(elementsKashf)-1):
WordsElement = re.findall(r'\w+',elementsKashf[x])
numberWordsElement = len(WordsElement)
if WordsElement[0] =="علم":
entryNew = ET.SubElement(kashfalzunun, "entry")
entryNumber = ET.SubElement(entryNew, "Number")
entryNumber.text = str(y)
entryID = ET.SubElement(entryNew, "ID")
entryID.text = str(x)
entryLength = ET.SubElement(entryNew, "Length")
entryLength.text = str(numberWordsElement)
entryText = ET.SubElement(entryNew, "Text")
entryText.text = elementsKashf[x]
y = y+1
ET.ElementTree(kashfalzunun).write("kashfOnlyUlum.xml")
kashf.close() |
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for Popnn operators."""
from tensorflow.compiler.plugin.poplar.ops import gen_popnn_ops
from tensorflow.compiler.plugin.poplar.ops import gen_functional_ops
from tensorflow.python.framework import func_graph as func_graph_module
from tensorflow.python.framework import ops
from tensorflow.python.ipu import functional_ops
from tensorflow.python.ipu import functional_ops_grad
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_util_v2 as util
from tensorflow.python.ops.nn_grad import _BroadcastMul
@ops.RegisterGradient("IpuGelu")
def _ipu_gelu_grad(op, grad):
"""Gradients for the IpuGelu op."""
x = op.inputs[0]
return [gen_popnn_ops.ipu_gelu_grad(grad, x)]
@ops.RegisterGradient("IpuHardSigmoid")
def _ipu_hard_sigmoid_grad(op, grad):
"""Gradients for the IpuHardSigmoid op."""
x = op.inputs[0]
return [gen_popnn_ops.ipu_hard_sigmoid_grad(grad, x)]
@ops.RegisterGradient("IpuSwish")
def _ipu_swish_grad(op, grad):
"""Gradients for the IpuSwish op."""
x = op.inputs[0]
return [gen_popnn_ops.ipu_swish_grad(grad, x)]
@ops.RegisterGradient("MultiConv")
def _multi_conv_grad(op, *grads):
"""The gradient of a MultiConv op."""
func_grad_graph, func_grad_inputs, constant_outputs = \
functional_ops_grad._get_gradients_for_function(op, *grads) # pylint: disable=protected-access
outputs = gen_functional_ops.multi_conv(
func_grad_inputs,
to_apply=util.create_new_tf_function(func_grad_graph),
Tout=func_grad_graph.output_types,
output_shapes=func_grad_graph.output_shapes,
option_flags=op.get_attr("option_flags"))
outputs = functional_ops._replace_outputs(outputs, constant_outputs) # pylint: disable=protected-access
return functional_ops._pack_sequence_as( # pylint: disable=protected-access
func_grad_graph.structured_outputs, outputs)
@ops.RegisterGradient("PopnnCTCLossWithLogits")
@ops.RegisterGradient("PopnnCTCLossWithLogProbs")
def _ctc_loss_grad(op, loss_grad, _):
"""The gradient of PopnnCTCLossWithLogits and PopnnCTCLossWithLogProbs ops."""
op_grad = array_ops.prevent_gradient(
op.outputs[1],
message="Second order derivative is not currently available for CTC Loss."
)
return [_BroadcastMul(loss_grad, op_grad), None, None, None]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# CDS-ILS is free software; you can redistribute it and/or modify it under
# the terms of the MIT License; see LICENSE file for more details.
"""CDS-ILS MARCXML to JSON fields values mapping."""
from __future__ import unicode_literals
from cds_ils.importer.errors import UnexpectedValue
DOCUMENT_TYPE = {
"PROCEEDINGS": ["PROCEEDINGS", "42", "43"],
"BOOK": ["BOOK", "21"],
"STANDARD": ["STANDARD"],
}
COLLECTION = {
"BOOK SUGGESTION": ["BOOKSUGGESTION"],
"LEGSERLIB": ["LEGSERLIB"],
"YELLOW REPORT": ["YELLOW REPORT", "YELLOWREPORT"],
"CERN": ["CERN"],
"DESIGN REPORT": ["DESIGN REPORT", "DESIGNREPORT"],
"BOOKSHOP": ["BOOKSHOP"],
"LEGSERLIBINTLAW": ["LEGSERLIBINTLAW"],
"LEGSERLIBCIVLAW": ["LEGSERLIBCIVLAW"],
"LEGSERLIBLEGRES": ["LEGSERLIBLEGRES"],
}
ACQUISITION_METHOD = {
# possible user types (created_by.type/created_by.value)
"user": ["H", "R"],
"batchuploader": ["N", "M"],
"migration": ["migration"],
}
MEDIUM_TYPES = [
"ELECTRONIC VERSION",
"PRINT VERSION",
"PRINT VERSION, HARDBACK",
"PRINT VERSION, PAPERBACK",
"PRINT VERSION, SPIRAL-BOUND",
"CD-ROM",
"AUDIOBOOK",
"DVD",
]
ARXIV_CATEGORIES = [
"astro-ph",
"astro-ph.CO",
"astro-ph.EP",
"astro-ph.GA",
"astro-ph.HE",
"astro-ph.IM",
"astro-ph.SR",
"cond-mat",
"cond-mat.dis-nn",
"cond-mat.mes-hall",
"cond-mat.mtrl-sci",
"cond-mat.other",
"cond-mat.quant-gas",
"cond-mat.soft",
"cond-mat.stat-mech",
"cond-mat.str-el",
"cond-mat.supr-con",
"cs",
"cs.AI",
"cs.AR",
"cs.CC",
"cs.CE",
"cs.CG",
"cs.CL",
"cs.CR",
"cs.CV",
"cs.CY",
"cs.DB",
"cs.DC",
"cs.DL",
"cs.DM",
"cs.DS",
"cs.ET",
"cs.FL",
"cs.GL",
"cs.GR",
"cs.GT",
"cs.HC",
"cs.IR",
"cs.IT",
"cs.LG",
"cs.LO",
"cs.MA",
"cs.MM",
"cs.MS",
"cs.NA",
"cs.NE",
"cs.NI",
"cs.OH",
"cs.OS",
"cs.PF",
"cs.PL",
"cs.RO",
"cs.SC",
"cs.SD",
"cs.SE",
"cs.SI",
"cs.SY",
"econ",
"econ.EM",
"eess",
"eess.AS",
"eess.IV",
"eess.SP",
"gr-qc",
"hep-ex",
"hep-lat",
"hep-ph",
"hep-th",
"math",
"math-ph",
"math.AC",
"math.AG",
"math.AP",
"math.AT",
"math.CA",
"math.CO",
"math.CT",
"math.CV",
"math.DG",
"math.DS",
"math.FA",
"math.GM",
"math.GN",
"math.GR",
"math.GT",
"math.HO",
"math.IT",
"math.KT",
"math.LO",
"math.MG",
"math.MP",
"math.NA",
"math.NT",
"math.OA",
"math.OC",
"math.PR",
"math.QA",
"math.RA",
"math.RT",
"math.SG",
"math.SP",
"math.ST",
"nlin",
"nlin.AO",
"nlin.CD",
"nlin.CG",
"nlin.PS",
"nlin.SI",
"nucl-ex",
"nucl-th",
"physics",
"physics.acc-ph",
"physics.ao-ph",
"physics.app-ph",
"physics.atm-clus",
"physics.atom-ph",
"physics.bio-ph",
"physics.chem-ph",
"physics.class-ph",
"physics.comp-ph",
"physics.data-an",
"physics.ed-ph",
"physics.flu-dyn",
"physics.gen-ph",
"physics.geo-ph",
"physics.hist-ph",
"physics.ins-det",
"physics.med-ph",
"physics.optics",
"physics.plasm-ph",
"physics.pop-ph",
"physics.soc-ph",
"physics.space-ph",
"q-bio",
"q-bio.BM",
"q-bio.CB",
"q-bio.GN",
"q-bio.MN",
"q-bio.NC",
"q-bio.OT",
"q-bio.PE",
"q-bio.QM",
"q-bio.SC",
"q-bio.TO",
"q-fin",
"q-fin.CP",
"q-fin.EC",
"q-fin.GN",
"q-fin.MF",
"q-fin.PM",
"q-fin.PR",
"q-fin.RM",
"q-fin.ST",
"q-fin.TR",
"quant-ph",
"stat",
"stat.AP",
"stat.CO",
"stat.ME",
"stat.ML",
"stat.OT",
"stat.TH",
]
MATERIALS = [
"addendum",
"additional material",
"data",
"e-proceedings",
"ebook",
"editorial note",
"erratum",
"preprint",
"publication",
"reprint",
"software",
"translation",
]
SUBJECT_CLASSIFICATION_EXCEPTIONS = [
"PACS",
"CERN LIBRARY",
"CERN YELLOW REPORT",
]
EXTERNAL_SYSTEM_IDENTIFIERS = [
"DCL",
"DESY",
"DOE",
"EBL",
"FIZ",
"HAL",
"IEECONF",
"INDICO.CERN.CH",
"INIS",
"INSPIRE",
"KEK",
"LHCLHC",
"SAFARI",
"SCEM",
"UDCCERN",
"WAI01",
]
EXTERNAL_SYSTEM_IDENTIFIERS_TO_IGNORE = [
"ARXIV",
"CERN ANNUAL REPORT",
"HTTP://INSPIREHEP.NET/OAI2D",
"SLAC",
"SLACCONF",
"SPIRES",
]
def mapping(field_map, val, raise_exception=False, default_val=None):
"""
Maps the old value to a new one according to the map.
important: the maps values must be uppercase, in order to catch all the
possible values in the field
:param field_map: one of the maps specified
:param val: old value
:param raise_exception if mapping should raise exception when value does
not match
:raises UnexpectedValue
:return: output value matched in map
"""
if isinstance(val, str):
val = val.strip()
if val:
if isinstance(field_map, dict):
for k, v in field_map.items():
if val.upper() in v:
return k
elif isinstance(field_map, list):
if val in field_map:
return val
elif default_val:
return default_val
if raise_exception:
raise UnexpectedValue
|
from __future__ import print_function
from tractor.lsqr_optimizer import LsqrOptimizer
import numpy as np
logverb = print
logmsg = print
class ConstrainedOptimizer(LsqrOptimizer):
def optimize_loop(self, tractor, dchisq=0., steps=50, **kwargs):
R = {}
self.hitLimit = False
for step in range(steps):
dlnp,_,_ = self.optimize(tractor, **kwargs)
if dlnp <= dchisq:
break
R.update(steps=step)
R.update(hit_limit=self.hitLimit)
return R
def tryUpdates(self, tractor, X, alphas=None):
if alphas is None:
# 1/1024 to 1 in factors of 2, + sqrt(2.) + 2.
alphas = np.append(2.**np.arange(-10, 1), [np.sqrt(2.), 2.])
pBefore = tractor.getLogProb()
#logverb(' log-prob before:', pBefore)
pBest = pBefore
alphaBest = None
p0 = tractor.getParams()
lowers = tractor.getLowerBounds()
uppers = tractor.getUpperBounds()
# print('Parameters:', tractor.getParamNames())
# print(' lower bounds:', lowers)
# print(' upper bounds:', uppers)
for alpha in alphas:
#logverb(' Stepping with alpha =', alpha)
pa = [p + alpha * d for p, d in zip(p0, X)]
# Check parameter limits
maxalpha = alpha
bailout = False
for i,(l,u,px) in enumerate(zip(lowers, uppers, pa)):
if l is not None and px < l:
# This parameter hits the limit; compute the step size
# to just hit the limit.
a = (l - p0[i]) / X[i]
# print('Parameter', i, 'with initial value', p0[i],
# 'and update', X[i], 'would hit lower limit', l,
# 'with alpha', alpha, '; max alpha', a)
maxalpha = min(maxalpha, a)
if u is not None and px > u:
# This parameter hits the limit; compute the step size
# to just hit the limit.
a = (u - p0[i]) / X[i]
# print('Parameter', i, 'with initial value', p0[i],
# 'and update', X[i], 'would hit upper limit', u,
# 'with alpha', alpha, '; max alpha', a)
maxalpha = min(maxalpha, a)
if maxalpha < 1e-8:
# print('Tiny maxalpha; bailing out without parameter update')
self.hitLimit = True
break
if maxalpha < alpha:
alpha = maxalpha
bailout = True
# Here, we "want" to hit the limit, but we won't necessarily
# accept the update that hits the limit. Still want this flag
# set, or wait to check whether it improves the log-prob?
self.hitLimit = True
# We could just multiply by alpha, but in case of numerical
# instability, clip values right to limits.
pa = []
for p,d,l,u in zip(p0, X, lowers, uppers):
x = p + alpha * d
if l is not None and x < l:
x = l
if u is not None and x > u:
x = u
pa.append(x)
# print('Clipping parameter update to', pa)
# tractor.setParams(pa)
# lp = tractor.getLogPrior()
# print('Log prior:', lp)
tractor.setParams(pa)
pAfter = tractor.getLogProb()
#logverb(' Log-prob after:', pAfter)
#logverb(' delta log-prob:', pAfter - pBefore)
#print('Step', alpha, 'p', pAfter, 'dlnp', pAfter-pBefore)
if not np.isfinite(pAfter):
logmsg(' Got bad log-prob', pAfter)
break
if pAfter < (pBest - 1.):
break
if pAfter > pBest:
alphaBest = alpha
pBest = pAfter
if bailout:
break
# if alphaBest is None or alphaBest == 0:
# print "Warning: optimization is borking"
# print "Parameter direction =",X
# print "Parameters, step sizes, updates:"
# for n,p,s,x in zip(tractor.getParamNames(), tractor.getParams(), tractor.getStepSizes(), X):
# print n, '=', p, ' step', s, 'update', x
if alphaBest is None:
tractor.setParams(p0)
return 0, 0.
#logverb(' Stepping by', alphaBest,
# 'for delta-logprob', pBest - pBefore)
pa = [p + alphaBest * d for p, d in zip(p0, X)]
tractor.setParams(pa)
return pBest - pBefore, alphaBest
|
from . import utils
from .clip import CLIPForImageText
from .huggingface_text import HFAutoModelForTextPrediction
from .timm_image import TimmAutoModelForImagePrediction
from .numerical_mlp import NumericalMLP
from .categorical_mlp import CategoricalMLP
from .numerical_transformer import NumericalTransformer
from .categorical_transformer import CategoricalTransformer
from .fusion import MultimodalFusionMLP, MultimodalFusionTransformer
|
#/bin/env python
# -*-coding:utf=8 -*-
import os,time,subprocess,shlex
import urllib2
def upload_yeelink(image_name, log_file):
url = 'http://api.yeelink.net/v1.0/device/719/sensor/8613/photos'
length = os.path.getsize(image_name)
image_data = open(image_name, 'rb')
request = urllib2.Request(url, data=image_data)
request.add_header('U-ApiKey', '14765d9cc6axxx057880398486d08f9c')
request.add_header('Content-Length', '%d' % length)
res = urllib2.urlopen(request).read().strip()
log_file.write(res + '\n')
if __name__ == '__main__':
images_path = os.path.join(os.getcwd(), 'image')
log = open(os.path.join(os.getcwd(), 'output.log'),'w+')
if not os.path.exists(images_path):
os.makedirs(images_path)
com_line = 'fswebcam -d /dev/video0 -r 320x240 --bottom-banner --title "%s" --no-timestamp %s/%s.jpg'
while True:
time_now = time.strftime('%Y-%m-%d-%H-%M-%S')
com_line_now = com_line % (time_now, images_path, time_now)
subprocess.call(shlex.split(com_line_now), stdout=log, stderr=log)
upload_yeelink('%s/%s.jpg' % (images_path, time_now), log)
print com_line_now
time.sleep(11)
|
import collections, copy
# Part one
class Image:
def __init__(self, enhancement_algorithm, fill=0):
if isinstance(enhancement_algorithm, str):
enhancement_algorithm = list(map('.#'.index, enhancement_algorithm))
self.enhancement_algorithm = enhancement_algorithm
self.fill = fill
self.points = collections.defaultdict(lambda: self.fill)
inf = float('inf')
self.extreme = {
'x_min': +inf,
'x_max': -inf,
'y_min': +inf,
'y_max': -inf,
}
def __getitem__(self, key):
return self.points[key]
def __setitem__(self, key, value):
if isinstance(value, str):
value = '.#'.index(value)
self.points[key] = value
if not value:
return
for dim, k in zip('xy', key):
self.extreme[f'{dim}_min'] = min(self.extreme[f'{dim}_min'], k)
self.extreme[f'{dim}_max'] = max(self.extreme[f'{dim}_max'], k)
def __repr__(self):
s = []
x_min, x_max = self.extreme['x_min'], self.extreme['x_max']
y_min, y_max = self.extreme['y_min'], self.extreme['y_max']
for y in range(y_min, y_max + 1):
for x in range(x_min, x_max + 1):
s.append('.#'[self[x, y]])
s.append('\n')
return ''.join(s).strip()
def copy(self):
return copy.deepcopy(self)
def enhance(self, n=1):
if n == 0:
return
# Build new, enhanced image
win_size = 3
x_min, x_max = self.extreme['x_min'], self.extreme['x_max']
y_min, y_max = self.extreme['y_min'], self.extreme['y_max']
new_image = type(self)(self.enhancement_algorithm, self.fill)
for y in range(y_min - win_size//2, y_max + 1 + win_size//2):
for x in range(x_min - win_size//2, x_max + 1 + win_size//2):
win = []
for y_win in range(y - win_size//2, y + 1 + win_size//2):
for x_win in range(x - win_size//2, x + 1 + win_size//2):
win.append(self[x_win, y_win])
num = int(''.join(map(str, win)), base=2)
new_image[x, y] = self.enhancement_algorithm[num]
# Flip the infinite sea if applicable
if new_image.fill == 0 and new_image.enhancement_algorithm[0] == 1:
new_image.fill = 1
elif new_image.fill == 1 and new_image.enhancement_algorithm[-1] == 0:
new_image.fill = 0
# Assign enhanced attributes to original instance
for attr, value in vars(new_image).items():
setattr(self, attr, value)
# Call recursively
self.enhance(n - 1)
@property
def size(self):
if self.fill == 1:
return float('inf')
return sum(1 for value in self.points.values() if value == 1)
with open('input.txt') as f:
image_ini = Image(f.readline().strip())
f.readline() # blank
for y, line in enumerate(f): # y grows downwards
for x, c in enumerate(line.strip()):
image_ini[x, y] = c
image = image_ini.copy()
image.enhance(2)
print('part one:', image.size)
# Part two
image = image_ini.copy()
image.enhance(50)
print('part two:', image.size)
|
import sqlite3
northwind_conn = sqlite3.connect('northwind_small.sqlite3')
northwind_cursor1 = northwind_conn.cursor()
most_expensive_query = (
'SELECT ProductName FROM ' +
'(SELECT * FROM Product ' +
'Order by UnitPrice DESC LIMIT 10);'
)
most_expensive = northwind_cursor1.execute(most_expensive_query).fetchall()
most_expensive_string = ''
for i in range(len(most_expensive) - 1):
most_expensive_string = most_expensive_string + most_expensive[i][0] + ', '
most_expensive_string = most_expensive_string + most_expensive[9][0]
"""10 MostExpensive Items"""
print(f'The 10 most expensive items are: {most_expensive_string}.\n')
birth_dates = northwind_cursor1.execute(
'SELECT BirthDate FROM Employee;'
).fetchall()
hire_dates = northwind_cursor1.execute(
'SELECT HireDate FROM Employee;'
).fetchall()
def split_dates(dates):
years = []
months = []
days = []
for i in range(len(dates)):
split_date = dates[i][0].split('-')
years.append(int(split_date[0]))
months.append(int(split_date[1]))
days.append(int(split_date[2]))
return years, months, days
def get_average(list):
return int(sum(list) / len(list))
brith_years, birth_months, birth_days = split_dates(birth_dates)
hire_years, hire_months, hire_days = split_dates(hire_dates)
age = []
for i in range(len(brith_years)):
year = (hire_years[i] - brith_years[i])
if(hire_months[i] < birth_months[i]):
year -= 1
elif(hire_months[i] == birth_months[i]):
if(hire_days[i] < birth_days[i]):
year -= 1
age.append(year)
"""Average age of employees at hire"""
print(f'The Average Age of Employees at the time of hire is' +
f'{get_average(age)}.\n')
employee_cities = northwind_cursor1.execute(
'SELECT City FROM Employee;'
).fetchall()
cities = []
city_agess = []
for i in range(len(employee_cities)):
employee_cities[i] = employee_cities[i][0]
if employee_cities[i] not in cities:
cities.append(employee_cities[i])
city_agess.append([age[i]])
else:
index = cities.index(employee_cities[i])
city_agess[index].append(age[i])
"""Average age of employees at hire per city"""
for i in range(len(cities)):
print(f'The Average Age of Employees from {cities[i]}' +
f'at the time of hire is {get_average(city_agess[i])}.\n\n')
northwind_cursor1.close()
northwind_cursor2 = northwind_conn.cursor()
product_supplier_price_query = ('SELECT Product.ProductName, ' +
'Supplier.CompanyName FROM Product ' +
'INNER JOIN Supplier ON '
'Product.SupplierId = Supplier.id ' +
'ORDER BY Product.UnitPrice DESC ' +
'LIMIT 10;')
"""10 MostExpensive Items with their Suppliers"""
print(str(northwind_cursor2.execute(product_supplier_price_query).fetchall()) + '\n\n')
largest_category_query = ('SELECT Category.CategoryName ' +
'FROM Product ' +
'NNER JOIN Category ' +
'ON CategoryId = Category.id ' +
'GROUP BY Category.CategoryName ' +
'ORDER BY count(DISTINCT ProductName) DESC '
'LIMIT 1;')
largest_category = northwind_cursor2.execute(largest_category_query).fetchone()[0]
"""Largest Category"""
print(f'The Largest Category is {largest_category}')
northwind_cursor2.close()
northwind_conn.close()
|
import logging
import os
from django.conf import settings
from django.db import models
from django.utils.timezone import now
from django.utils.functional import cached_property
from django import template
import apps.web as web
from apps.web.utils import PublicStorage, BundleStorage
from datetime import datetime, timedelta
register = template.Library()
User = settings.AUTH_USER_MODEL
logger = logging.getLogger(__name__)
def get_competition_teams(competition):
team_list=Team.objects.filter(
competition=competition,
status=TeamStatus.objects.get(codename="approved"),
).all()
return team_list
def get_competition_pending_teams(competition):
team_list=Team.objects.filter(
competition=competition,
status=TeamStatus.objects.get(codename="pending"),
).select_related("status").all()
return team_list
def get_team_pending_membership(team):
requests = TeamMembership.objects.filter(
team=team,
is_request=True,
status=TeamMembershipStatus.objects.get(codename="pending"),
).select_related("user").all()
return requests
def get_competition_deleted_teams(competition):
team_list=Team.objects.filter(
competition=competition,
status=TeamStatus.objects.get(codename="deleted"),
).all()
return team_list
def get_competition_user_teams(competition,user):
team_list=Team.objects.filter(
competition=competition,
status=TeamStatus.objects.get(codename="approved"),
creator=user.user,
).all()
if len(team_list)==0:
team_list=None
else:
team_list=team_list[0]
return team_list
def get_competition_user_pending_teams(competition,user):
team_list=Team.objects.filter(
competition=competition,
status=TeamStatus.objects.get(codename="pending"),
creator=user.user,
).all()
if len(team_list)==0:
team_list=None
else:
team_list=team_list[0]
return team_list
def get_user_requests(user, competition):
team_list = get_competition_teams(competition)
user_requests = TeamMembership.objects.filter(
user=user.user,
team__in=team_list,
).all()
return user_requests
def get_allowed_teams(user,competition):
# TODO: Remove teams where user already have a request
return get_competition_teams(competition)
# def get_user_team(user, competition):
def get_user_team(participant, competition):
# This function just gets user's created teams that are approved
# and returns the first one or None
user_created_teams = Team.objects.filter(competition=competition, creator=participant.user, status__codename='approved').select_related('status')
# If we have user created teams
if user_created_teams is not None and len(user_created_teams) > 0:
return user_created_teams[0]
else:
# Else if no user created teams
user_approved_team_memberships = list(participant.user.team_memberships.filter(status__codename='approved').select_related('team', 'status'))
user_approved_active_teams = [membership for membership in user_approved_team_memberships if membership.is_active]
if user_approved_active_teams is not None and len(user_approved_active_teams) > 0:
return user_approved_active_teams[0].team
return None
def get_team_submissions(team, phase=None):
if phase is None:
t_s = web.models.CompetitionSubmission.objects.filter(phase=phase, team=team)
else:
t_s = web.models.CompetitionSubmission.objects.filter(team=team)
return t_s
def get_last_team_submissions(team, days=1):
return web.models.CompetitionSubmission.objects.filter(team=team, submitted_at__gte=datetime.now()-timedelta(days))
def get_team_submissions_inf(team, phase):
submissions = web.models.CompetitionSubmission.objects.filter(
team=team,
phase=phase
).select_related('status').order_by('-submitted_at')
# find which submission is in the leaderboard, if any and only if phase allows seeing results.
id_of_submission_in_leaderboard = -1
if phase and not phase.is_blind:
leaderboard_entry = web.models.PhaseLeaderBoardEntry.objects.filter(
board__phase=phase,
result__team=team
).select_related('result', 'result__participant')
if leaderboard_entry:
id_of_submission_in_leaderboard = leaderboard_entry[0].result.pk
submission_info_list = []
for submission in submissions:
submission_info = {
'id': submission.id,
'number': submission.submission_number,
'username': submission.participant.user.username,
'filename': submission.get_filename(), # left as call for legacy update of readable_filename on subs.
'submitted_at': submission.submitted_at,
'status_name': submission.status.name,
'is_finished': submission.status.codename == 'finished',
'is_in_leaderboard': submission.id == id_of_submission_in_leaderboard,
'exception_details': submission.exception_details,
'description': submission.description,
'team_name': submission.team_name,
'method_name': submission.method_name,
'method_description': submission.method_description,
'project_url': submission.project_url,
'publication_url': submission.publication_url,
'bibtex': submission.bibtex,
'organization_or_affiliation': submission.organization_or_affiliation,
'is_public': submission.is_public,
'score': submission.get_default_score(),
}
submission_info_list.append(submission_info)
return submission_info_list
def get_available_participants(competition):
return []
class TeamStatus(models.Model):
UNKNOWN = 'unknown'
DENIED = 'denied'
APPROVED = 'approved'
PENDING = 'pending'
DELETED = 'deleted'
name = models.CharField(max_length=30)
codename = models.CharField(max_length=30,unique=True)
description = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class Team(models.Model):
""" This is the base team. """
class Meta:
unique_together = (('name', 'competition'),)
def __unicode__(self):
return "[%s] %s - %s" % (self.status.codename, self.competition.title, self.name)
name = models.CharField(max_length=100, null=False, blank=False)
competition = models.ForeignKey('web.Competition')
description = models.TextField(null=True, blank=True)
image = models.FileField(upload_to='team_logo', storage=PublicStorage, null=True, blank=True, verbose_name="Logo")
image_url_base = models.CharField(max_length=255)
allow_requests = models.BooleanField(default=True, verbose_name="Allow requests")
creator = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='team_creator')
members = models.ManyToManyField(settings.AUTH_USER_MODEL, through='TeamMembership', blank=True, null=True, related_name='teams')
created_at = models.DateTimeField(null=True, auto_now_add=True)
last_modified = models.DateTimeField(auto_now_add=True)
status = models.ForeignKey(TeamStatus, null=True)
reason = models.CharField(max_length=100,null=True,blank=True)
def save(self, *args, **kwargs):
# Make sure the image_url_base is set from the actual storage implementation
self.image_url_base = self.image.storage.url('')
self.last_modified=now()
if self.status is None:
self.status = TeamStatus.objects.get(codename=TeamStatus.PENDING)
# Do the real save
return super(Team,self).save(*args,**kwargs)
@cached_property
def image_url(self):
# Return the transformed image_url
if self.image:
return os.path.join(self.image_url_base, self.image.name)
return None
@cached_property
def active_members(self):
return self.get_members("approved")
@cached_property
def active_members_count(self):
return len(self.get_members("approved")) + 1
@cached_property
def active_requests(self):
return self.get_members("pending")
def has_applied(self, user):
for member in self.get_members("pending"):
if member.user == user:
return True
return False
def is_member(self, user):
for member in self.get_members("approved"):
if member.user == user:
return True
return False
@property
def is_admin(self, user):
return self.creator==user
def get_members(self, status):
requests = TeamMembership.objects.filter(
team=self,
is_request=True,
status=TeamMembershipStatus.objects.get(codename=status),
).select_related("user").all()
members=[]
for member in requests:
if member.is_active:
members.append(member)
return members
class TeamMembershipStatus(models.Model):
UNKNOWN = 'unknown'
REJECTED = 'rejected'
APPROVED = 'approved'
PENDING = 'pending'
CANCELED = 'canceled'
name = models.CharField(max_length=30)
codename = models.CharField(max_length=30,unique=True)
description = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class TeamMembership(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='team_memberships')
team = models.ForeignKey(Team)
is_invitation = models.BooleanField(default=False)
is_request = models.BooleanField(default=False)
start_date = models.DateTimeField(null=True, blank=True, verbose_name="Start Date (UTC)")
end_date = models.DateTimeField(null=True, blank=True, verbose_name="End Date (UTC)")
message = models.TextField(null=True, blank=True)
status = models.ForeignKey(TeamMembershipStatus, null=True)
reason = models.CharField(max_length=100,null=True,blank=True)
def __unicode__(self):
return "%s - %s" % (self.team_id, self.user_id)
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
other_memberships = TeamMembership.objects.filter(user=self.user, team__competition=self.team.competition).exclude(pk=self.pk)
if len(other_memberships) != 0:
print("Removing user: {0} from other memberships in competition: {1}".format(self.user, self.team.competition))
other_memberships.delete()
super(TeamMembership, self).save(
force_insert=force_insert,
force_update=force_update,
using=using,
update_fields=update_fields
)
@property
def is_active(self):
if self.start_date is not None and now() < self.start_date:
return False
if self.end_date is not None and now() > self.end_date:
return False
return True
|
import math
import itertools
# class Error is derived from super class Exception
class Error(Exception):
# Error is derived class for Exception, but
# Base class for exceptions in this module
pass
class Bunny_Console_Error(Error):
def __init__(self, prev, nex, msg):
self.prev = prev
self.next = nex
def nCr(n,r):
return math.factorial(n)/(math.factorial(r) * math.factorial(n-r))
def solution(bunnies,keys_required):
answer = []
for i in range(bunnies):
answer.append([])
# try:
if bunnies < keys_required:
# raise(Exception("Need more bunnies than consoles (have {0} need {1}).".format(bunnies, keys_required)))
pass
# except Exception as Argument:
# print('Exception occurred: ', Argument)
if keys_required == 0:
return [ [] ]
elif bunnies == 1:
for key in range(keys_required):
answer[0].append(key)
elif keys_required == 1:
key = 0
for group in range(bunnies):
answer[group].append(key)
elif bunnies == keys_required:
key = 0
for group in range(bunnies):
answer[group].append(key)
key += 1
else:
key = 0
for item in itertools.combinations(range(bunnies), keys_required):
for group in item:
answer[group].append(key)
key += 1
return answer
for num_buns in range(1,10):
for num_required in range(10):
key_dist = solution(num_buns,num_required)
print("-" * 60)
print("Answer for {0:d} bunnies, requiring {1:d}".format(num_buns,num_required))
if ( len(key_dist[0]) * len(key_dist) ) < 25:
print(key_dist)
else:
for bun in key_dist:
print(bun)
'''
key_dist = solution(3,0)
print(key_dist)
key_dist = solution(3,1)
print(key_dist)
key_dist = solution(2,2)
print(key_dist)
key_dist = solution(3,2)
print(key_dist)
key_dist = solution(2,1)
print(key_dist)
key_dist = solution(4,4)
print(key_dist)
key_dist = solution(5,3)
print(key_dist)
'''
import itertools
def solution(bunnies,keys_required):
answer = []
for i in range(bunnies):
answer.append([])
if keys_required > bunnies:
for key in keys_required:
answer[0].append(key)
# raise(Exception("Need more bunnies than consoles (have {0} need {1}).".format(bunnies, keys_required)))
# return [ [] * bunnies ]
# return [ [] * keys_required ]
# return [ [0] * bunnies ]
# return [ [0] * keys_required ]
# return [ [] * bunnies ]
# return [ [] * keys_required ]
# return None
# return [ ]
pass
# elif keys_required == 0 and bunnies == 5:
# return [ [0] ]
elif keys_required == 1:
key = 0
for group in range(bunnies):
answer[group].append(key)
elif bunnies == keys_required:
key = 0
for group in range(bunnies):
answer[group].append(key)
key += 1
else:
key = 0
for item in itertools.combinations(range(bunnies), keys_required):
for group in item:
answer[group].append(key)
key += 1
return answer
|
import requests
from functools import partialmethod
def raise_for_status_hook(response: requests.Response, *args, **kwargs):
response.raise_for_status()
class ApigeeClient:
def __init__(
self,
apigee_org: str,
username: str = None,
password: str = None,
access_token: str = None,
session: requests.Session = requests.Session(),
):
self.apigee_org = apigee_org
if access_token:
self.access_token = access_token
elif username and password:
self.access_token = self._get_access_token(username, password)
self._session = session
self._session.hooks = {"response": raise_for_status_hook}
def _request(self, method: str, url: str, **kwargs):
headers = self._auth_headers
if "headers" in kwargs:
headers.update(kwargs["headers"])
del kwargs["headers"]
return self._session.request(method, url, headers=headers, **kwargs)
get = partialmethod(_request, "GET")
post = partialmethod(_request, "POST")
put = partialmethod(_request, "PUT")
delete = partialmethod(_request, "DELETE")
patch = partialmethod(_request, "PATCH")
head = partialmethod(_request, "HEAD")
options = partialmethod(_request, "OPTIONS")
def list_proxies(self):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/apis"
)
return response.json()
def list_env_proxy_deployments(self, env: str):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/environments/{env}/deployments"
)
return response.json()
def get_proxy(self, proxy):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/apis/{proxy}"
)
return response.json()
def get_proxy_revision(self, proxy: str, revision: str):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/apis/{proxy}/revisions/{revision}"
)
return response.json()
def undeploy_proxy_revision(self, env: str, proxy: str, revision: str):
response = self.delete(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/environments/{env}/apis/{proxy}/revisions/{revision}/deployments"
)
return response.json()
def delete_proxy(self, proxy: str):
response = self.delete(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/apis/{proxy}"
)
return response.json()
def list_products(self):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/apiproducts"
)
return response.json()
def delete_product(self, product: str):
# TODO implement cascade behaviour
response = self.delete(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/apiproducts/{product}"
)
return response.json()
def list_specs(self):
response = self.get(
f"https://apigee.com/dapi/api/organizations/{self.apigee_org}/specs/folder/home"
)
return response.json()
def create_spec(self, name: str, folder: str):
response = self.post(
f"https://apigee.com/dapi/api/organizations/{self.apigee_org}/specs/doc",
json={"folder": folder, "name": name, "kind": "Doc"},
)
return response
def update_spec(self, spec_id: str, content: str):
response = self.put(
f"https://apigee.com/dapi/api/organizations/{self.apigee_org}/specs/doc/{spec_id}/content",
headers=dict(**{"Content-Type": "text/plain"}, **self._auth_headers),
data=content.encode("utf-8"),
)
return response
def delete_spec(self, spec_id: str):
response = self.delete(
f"https://apigee.com/dapi/api/organizations/{self.apigee_org}/specs/doc/{spec_id}",
)
return response
def get_portals(self):
response = self.get(
f"https://apigee.com/portals/api/sites?orgname={self.apigee_org}",
)
return response
def get_apidocs(self, portal_id: str):
response = self.get(
f"https://apigee.com/portals/api/sites/{portal_id}/apidocs",
)
return response
def create_portal_api(
self,
friendly_name: str,
spec_name: str,
spec_id: str,
portal_id: str,
visible: bool = True,
requireCallbackUrl: bool = False,
):
response = self.post(
f"https://apigee.com/portals/api/sites/{portal_id}/apidocs",
json={
"anonAllowed": True,
"description": "",
"edgeAPIProductName": spec_name,
"requireCallbackUrl": requireCallbackUrl,
"specContent": spec_id,
"specId": spec_name,
"title": friendly_name,
"visibility": visible,
},
)
return response
def update_portal_api(
self,
apidoc_id: str,
friendly_name: str,
spec_name: str,
spec_id: str,
portal_id: str,
visible: bool = True,
requireCallbackUrl: bool = False,
):
response = self.put(
f"https://apigee.com/portals/api/sites/{portal_id}/apidocs/{apidoc_id}",
json={
"anonAllowed": True,
"description": "",
"edgeAPIProductName": spec_name,
"requireCallbackUrl": requireCallbackUrl,
"specContent": spec_id,
"specId": spec_name,
"title": friendly_name,
"visibility": visible,
},
)
return response
def get_apidoc(self, portal_id: str, apidoc_id: str):
response = self.get(
f"https://apigee.com/portals/api/sites/{portal_id}/apidocs/{apidoc_id}",
)
return response
def update_spec_snapshot(self, portal_id: str, apidoc_id: str):
apidoc = self.get_apidoc(portal_id, apidoc_id).json()["data"]
self.put(
f"https://apigee.com/portals/api/sites/{portal_id}/apidocs/{apidoc_id}",
json={
"anonAllowed": True,
"description": apidoc["description"],
"specId": apidoc["specId"],
"visibility": True,
},
)
return self.put(
f"https://apigee.com/portals/api/sites/{portal_id}/apidocs/{apidoc_id}/snapshot",
)
def list_keystores(self, environment: str):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/environments/{environment}/keystores",
)
return response.json()
def get_keystore(self, environment: str, keystore_name: str):
response = self.get(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/environments/{environment}/keystores/{keystore_name}",
)
return response.json()
def create_keystore(self, environment: str, keystore_name: str):
"""
Create a return a keystore.
Is idempotent, if keystore already exists will just retrieve.
"""
if keystore_name in self.list_keystores(environment):
return self.get_keystore(environment, keystore_name)
response = self.post(
f"https://api.enterprise.apigee.com/v1/organizations/{self.apigee_org}/environments/{environment}/keystores",
data={"name": keystore_name},
)
return response.json()
@property
def _auth_headers(self):
return {"Authorization": f"Bearer {self.access_token}"}
def _get_access_token(self, username: str, password: str):
response = self.post(
"https://login.apigee.com/oauth/token",
data={"username": username, "password": password, "grant_type": "password"},
headers={
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Basic ZWRnZWNsaTplZGdlY2xpc2VjcmV0",
},
)
return response.json()["access_token"]
|
"""
Copyright (C) 2021 Patrick Maloney
"""
import requests
from typing import Tuple
from dataclasses import dataclass
from geopy.geocoders import Nominatim
# TODO: Documentation
@dataclass()
class Properties:
updated: str
generated_at: str
update_time: str
valid_times: str
elevation: float
@dataclass()
class Period:
number: int
name: str # unused in hourly
start_time: str
end_time: str
is_daytime: bool
temperature: int
temp_unit: str
wind_speed: str
wind_direction: str
icon: str
short_forecast: str
long_forecast: str # unused in hourly
@dataclass()
class Forecast:
properties: Properties
periods: list # list of dataclass Period
class Forecaster(object):
def __init__(self, user_agent: str = '') -> None:
if len(user_agent) == 0:
raise TypeError('User Agent is required.') # maybe change error or make new one
self.__user_agent = user_agent
self.__request_headers = {"User-Agent": f"({user_agent})"}
self.__geolocator = Nominatim(user_agent=self.__user_agent)
def __get_coords(self, postal_code: int) -> Tuple[float, float]:
if type(postal_code) != int or len(str(postal_code)) != 5: # for now will require postal code until i think of
raise TypeError('postal_code must be 5 digit postal code as an integer.') # something better
location = self.__geolocator.geocode(str(postal_code))
return location.latitude, location.longitude
def __get_gridpoints(self, coords: Tuple[float, float]) -> dict:
url = f'https://api.weather.gov/points/{coords[0]},{coords[1]}'
response = requests.get(url, headers=self.__request_headers)
if response.status_code != 200:
response.raise_for_status()
return response.json()
def get_forecast(self, postal_code: int, hourly: bool = False) -> Forecast:
coords = self.__get_coords(postal_code)
if hourly:
forecast_url = self.__get_gridpoints(coords)['properties']['forecastHourly']
else:
forecast_url = self.__get_gridpoints(coords)['properties']['forecast']
response = requests.get(forecast_url, headers=self.__request_headers)
if response.status_code != 200:
response.raise_for_status()
r_json = response.json()
properties = Properties(r_json['properties']['updated'],
r_json['properties']['generatedAt'],
r_json['properties']['updateTime'],
r_json['properties']['validTimes'],
r_json['properties']['elevation']['value'])
periods = []
for period in r_json['properties']['periods']:
periods.append(Period(period['number'],
period['name'],
period['startTime'],
period['endTime'],
period['isDaytime'],
period['temperature'],
period['temperatureUnit'],
period['windSpeed'],
period['windDirection'],
period['icon'],
period['shortForecast'],
period['detailedForecast'],))
return Forecast(properties, periods)
def get_hourly_forecast(self, postal_code: int):
return self.get_forecast(postal_code, hourly=True)
|
#!/usr/bin/env python
"""The setup script."""
import os
from setuptools import setup, find_namespace_packages
with open("README.rst") as readme_file:
readme = readme_file.read()
# The requirements section should be kept in sync with the environment.yml file
requirements = [
# fmt: off
"chalice>=1.13",
"click>=7.0",
"click-plugins",
"click-log",
"entrypoints",
"environs",
"jinja2>=2.9,<3",
"octokitpy>=0.13.0,<0.14",
"PyPubSub>=4.0.3,<5",
"python-dateutil",
# fmt: on
]
setup_requirements = [
# fmt: off
"pytest-runner",
"setuptools_scm",
"wheel",
# fmt: on
]
test_requirements = [
# fmt: off
"pytest>=3",
"pytest-chalice",
"pytest-cov",
"requests-mock",
# fmt: on
]
conda_rosetta_stone = {
# fmt: off
"pypa-requirement": "conda-dependency"
# fmt: on
}
setup_kwargs = dict(
author="Patrick Sodré",
author_email="psodre@gmail.com",
use_scm_version={"write_to": "zeroae/goblet/_version.py"},
python_requires=">=3.6",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="A Chalice blueprint for creating GitHub Apps",
# fmt: off
entry_points={
"console_scripts": [
"smee=zeroae.smee.cli:smee"
],
"zeroae.cli": [
"goblet=zeroae.goblet.cli:goblet",
],
},
# fmt: on
install_requires=requirements,
license="Apache",
long_description=readme,
long_description_content_type="text/x-rst",
include_package_data=True,
keywords="goblet zeroae",
name="zeroae-goblet",
packages=find_namespace_packages(include=["zeroae.*"]),
setup_requires=setup_requirements,
test_suite="tests",
tests_require=test_requirements,
extras_require={
# fmt: off
"test": test_requirements
# fmt: on
},
url="https://github.com/zeroae/zeroae-goblet",
zip_safe=False,
)
if "CONDA_BUILD_STATE" in os.environ:
try:
from setuptools_scm import get_version
setup_kwargs["version"] = get_version(**setup_kwargs["use_scm_version"])
del setup_kwargs["use_scm_version"]
except ModuleNotFoundError:
print(
"Error: zeroae-goblet requires that setuptools_scm be installed with conda-build!" # noqa: E501
)
raise
setup_kwargs["conda_rosetta_stone"] = conda_rosetta_stone
setup(**setup_kwargs)
|
"""Fix for pyaudio (or rather portaudio) debug messages
These get very annoying and break npyscreen interface, even though everything
works properly.
Stolen from http://stackoverflow.com/q/7088672
"""
from ctypes import (
CFUNCTYPE,
c_char_p,
c_int,
cdll
)
def _py_error_handler(filename, line, function, err, fmt):
pass
_ERROR_HANDLER_FUNC = CFUNCTYPE(
None,
c_char_p,
c_int,
c_char_p,
c_int,
c_char_p,
)
_c_error_handler = None
def fix_pyaudio():
global _c_error_handler # required to trick the garbage collector
_c_error_handler = _ERROR_HANDLER_FUNC(_py_error_handler)
_asound = cdll.LoadLibrary('libasound.so')
_asound.snd_lib_error_set_handler(_c_error_handler)
__all__ = [
'fix_pyaudio',
]
|
# Generated by Django 3.2.5 on 2021-07-08 13:09
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Journalist',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=60)),
('last_name', models.CharField(max_length=60)),
('emotion', models.CharField(choices=[('New', 'new'), ('Happy', 'happy'), ('Sad', 'sad'), ('Calm', 'calm'), ('Nervous', 'nervous'), ('Etc', 'etc')], default='New', max_length=10)),
],
),
migrations.CreateModel(
name='Article',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('description', models.CharField(max_length=120)),
('body', models.TextField(max_length=500)),
('location', models.CharField(max_length=50)),
('publication_date', models.DateField()),
('active', models.BooleanField(default=False)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='news.journalist')),
],
),
]
|
# import third party libs
from flask import Blueprint
api = Blueprint("api", __name__, url_prefix="/api")
from . import vrf
from . import vrfs
|
from django.db import models
# internal text/value object
class SurveyQuestion(object):
def __init__(self, value, text):
self.value = value
self.text = text
class SurveyAnswerValues(object):
def __init__(self, option, votes, votespercent):
self.option = option
self.votes = votes
self.votespercent = votespercent
class Survey(models.Model):
question = models.CharField(max_length=500, null=False, blank=False)
opt1 = models.CharField(max_length=500, null=False, blank=False)
opt2 = models.CharField(max_length=500, null=False, blank=False)
opt3 = models.CharField(max_length=500, null=False, blank=True)
opt4 = models.CharField(max_length=500, null=False, blank=True)
opt5 = models.CharField(max_length=500, null=False, blank=True)
opt6 = models.CharField(max_length=500, null=False, blank=True)
opt7 = models.CharField(max_length=500, null=False, blank=True)
opt8 = models.CharField(max_length=500, null=False, blank=True)
posted = models.DateTimeField(null=False, auto_now_add=True)
current = models.BooleanField(null=False, default=False)
purge_urls = ('/community/survey', '/community/$')
def __str__(self):
return self.question
@property
def questions(self):
for i in range(1, 9):
v = getattr(self, "opt%s" % i)
if not v:
break
yield SurveyQuestion(i, v)
@property
def answers(self):
if not hasattr(self, "_answers"):
self._answers = SurveyAnswer.objects.get_or_create(survey=self)[0]
return self._answers
@property
def completeanswers(self):
for a in self._get_complete_answers():
yield SurveyAnswerValues(a[0], a[1], self.totalvotes > 0 and (100 * a[1] / self.totalvotes) or 0)
@property
def totalvotes(self):
if not hasattr(self, "_totalvotes"):
self._totalvotes = 0
for a in self._get_complete_answers():
self._totalvotes = self._totalvotes + a[1]
return self._totalvotes
def _get_complete_answers(self):
for i in range(1, 9):
q = getattr(self, "opt%s" % i)
if not q:
break
n = getattr(self.answers, "tot%s" % i)
yield (q, n)
def save(self):
# Make sure only one survey at a time can be the current one
# (there may be some small race conditions here, but the likelihood
# that two admins are editing the surveys at the same time...)
if self.current:
previous = Survey.objects.filter(current=True)
for p in previous:
if not p == self:
p.current = False
p.save() # primary key check avoids recursion
# Now that we've made any previously current ones non-current, we are
# free to save this one.
super(Survey, self).save()
class SurveyAnswer(models.Model):
survey = models.OneToOneField(Survey, null=False, blank=False, primary_key=True, on_delete=models.CASCADE)
tot1 = models.IntegerField(null=False, default=0)
tot2 = models.IntegerField(null=False, default=0)
tot3 = models.IntegerField(null=False, default=0)
tot4 = models.IntegerField(null=False, default=0)
tot5 = models.IntegerField(null=False, default=0)
tot6 = models.IntegerField(null=False, default=0)
tot7 = models.IntegerField(null=False, default=0)
tot8 = models.IntegerField(null=False, default=0)
purge_urls = ('/community/survey', )
class SurveyLock(models.Model):
ipaddr = models.GenericIPAddressField(null=False, blank=False)
time = models.DateTimeField(null=False, auto_now_add=True)
|
# container-service-extension
# Copyright (c) 2017 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: BSD-2-Clause
import functools
import hashlib
import os
import pathlib
import stat
import sys
import threading
import click
import requests
# chunk size in bytes for file reading
BUF_SIZE = 65536
# chunk size for downloading files
SIZE_1MB = 1024 * 1024
_type_to_string = {
str: 'string',
int: 'number',
bool: 'true/false',
dict: 'mapping',
list: 'sequence',
}
class ConsoleMessagePrinter():
"""Callback object to print color coded message on console."""
def general_no_color(self, msg):
click.secho(msg)
def general(self, msg):
click.secho(msg, fg='green')
def info(self, msg):
click.secho(msg, fg='yellow')
def error(self, msg):
click.secho(msg, fg='red')
def prompt_text(text, color='black', hide_input=False):
click_text = click.style(str(text), fg=color)
return click.prompt(click_text, hide_input=hide_input, type=str)
def get_server_runtime_config():
from container_service_extension.service import Service
return Service().get_service_config()
def get_pks_cache():
from container_service_extension.service import Service
return Service().get_pks_cache()
def is_pks_enabled():
from container_service_extension.service import Service
return Service().is_pks_enabled()
def get_duplicate_items_in_list(items):
"""Find duplicate entries in a list.
:param list items: list of items with possible duplicates.
:return: the items that occur more than once in niput list. Each duplicated
item will be mentioned only once in the returned list.
:rtype: list
"""
seen = set()
duplicates = set()
if items:
for item in items:
if item in seen:
duplicates.add(item)
else:
seen.add(item)
return list(duplicates)
def check_keys_and_value_types(dikt, ref_dict, location='dictionary',
excluded_keys=[], msg_update_callback=None):
"""Compare a dictionary with a reference dictionary.
The method ensures that all keys and value types are the same in the
dictionaries.
:param dict dikt: the dictionary to check for validity
:param dict ref_dict: the dictionary to check against
:param str location: where this check is taking place, so error messages
can be more descriptive.
:param list excluded_keys: list of str, representing the list of key which
if missing won't raise an exception.
:param utils.ConsoleMessagePrinter msg_update_callback: Callback
object that writes messages onto console.
:raises KeyError: if @dikt has missing or invalid keys
:raises TypeError: if the value of a property in @dikt does not match with
the value of the same property in @ref_dict
"""
ref_keys = set(ref_dict.keys())
keys = set(dikt.keys())
missing_keys = ref_keys - keys - set(excluded_keys)
if missing_keys and msg_update_callback:
msg_update_callback.error(
f"Missing keys in {location}: {missing_keys}")
bad_value = False
for k in ref_keys:
if k not in keys:
continue
value_type = type(ref_dict[k])
if not isinstance(dikt[k], value_type):
if msg_update_callback:
msg_update_callback.error(
f"{location} key '{k}': value type should be "
f"'{_type_to_string[value_type]}'")
bad_value = True
if missing_keys:
raise KeyError(f"Missing and/or invalid key in {location}")
if bad_value:
raise TypeError(f"Incorrect type for property value(s) in {location}")
def check_python_version(msg_update_callback=None):
"""Ensure that user's Python version >= 3.6.
:param utils.ConsoleMessagePrinter msg_update_callback: Callback object
that writes messages onto console.
:raises Exception: if user's Python version < 3.6.
"""
if msg_update_callback:
msg_update_callback.general_no_color(
"Required Python version: >= 3.7.3\n"
f"Installed Python version: {sys.version}")
if sys.version_info < (3, 7, 3):
raise Exception("Python version should be 3.7.3 or greater")
def str_to_bool(s):
"""Convert string boolean values to bool.
The conversion is case insensitive.
:param val: input string
:return: True if val is 'true' otherwise False
"""
return str(s).lower() == 'true'
def get_sha256(filepath):
"""Get sha256 hash of file as a string.
:param str filepath: path to file.
:return: sha256 string for the file.
:rtype: str
"""
sha256 = hashlib.sha256()
with open(filepath, 'rb') as f:
while True:
data = f.read(BUF_SIZE)
if not data:
break
sha256.update(data)
return sha256.hexdigest()
def check_file_permissions(filename, msg_update_callback=None):
"""Ensure that the file has correct permissions.
Unix based system:
Owner - r/w permission
Other - No access
Windows:
No check
:param str filename: path to file.
:param utils.ConsoleMessagePrinter msg_update_callback: Callback
object that writes messages onto console.
:raises Exception: if file has 'x' permissions for Owner or 'rwx'
permissions for 'Others' or 'Group'.
"""
if os.name == 'nt':
return
err_msgs = []
file_mode = os.stat(filename).st_mode
if file_mode & stat.S_IXUSR:
msg = f"Remove execute permission of the Owner for the file {filename}"
if msg_update_callback:
msg_update_callback.error(msg)
err_msgs.append(msg)
if file_mode & stat.S_IROTH or file_mode & stat.S_IWOTH \
or file_mode & stat.S_IXOTH:
msg = f"Remove read, write and execute permissions of Others for " \
f"the file {filename}"
if msg_update_callback:
msg_update_callback.error(msg)
err_msgs.append(msg)
if file_mode & stat.S_IRGRP or file_mode & stat.S_IWGRP \
or file_mode & stat.S_IXGRP:
msg = f"Remove read, write and execute permissions of Group for the " \
f"file {filename}"
if msg_update_callback:
msg_update_callback.error(msg)
err_msgs.append(msg)
if err_msgs:
raise IOError(err_msgs)
def download_file(url, filepath, sha256=None, force_overwrite=False,
logger=None, msg_update_callback=None):
"""Download a file from a url to local filepath.
Will not overwrite files unless @sha256 is given.
Recursively creates specified directories in @filepath.
:param str url: source url.
:param str filepath: destination filepath.
:param str sha256: without this argument, if a file already exists at
@filepath, download will be skipped. If @sha256 matches the file's
sha256, download will be skipped.
:param bool force_overwrite: if True, will download the file even if it
already exists or its SHA hasn't changed.
:param logging.Logger logger: optional logger to log with.
:param utils.ConsoleMessagePrinter msg_update_callback: Callback
object that writes messages onto console.
"""
path = pathlib.Path(filepath)
if not force_overwrite and path.is_file() and \
(sha256 is None or get_sha256(filepath) == sha256):
msg = f"Skipping download to '{filepath}' (file already exists)"
if logger:
logger.info(msg)
if msg_update_callback:
msg_update_callback.general(msg)
return
path.parent.mkdir(parents=True, exist_ok=True)
msg = f"Downloading file from '{url}' to '{filepath}'..."
if logger:
logger.info(msg)
if msg_update_callback:
msg_update_callback.info(msg)
response = requests.get(url, stream=True)
with path.open(mode='wb') as f:
for chunk in response.iter_content(chunk_size=SIZE_1MB):
f.write(chunk)
msg = f"Download complete"
if logger:
logger.info(msg)
if msg_update_callback:
msg_update_callback.general(msg)
def read_data_file(filepath, logger=None, msg_update_callback=None):
"""Retrieve file content from local disk as a string.
:param str filepath: absolute filepath of the file, whose content we want
to read.
:param logging.Logger logger: optional logger to log with.
:param utils.ConsoleMessagePrinter msg_update_callback: Callback
object that writes messages onto console.
:return: the contents of the file.
:rtype: str
:raises FileNotFoundError: if requested data file cannot be
found.
"""
path = pathlib.Path(filepath)
if not path.is_file():
msg = f"Requested data file at '{filepath}' not found"
if msg_update_callback:
msg_update_callback.error(msg)
if logger:
logger.error(msg, exc_info=True)
raise FileNotFoundError(msg)
msg = f"Found data file: {path}"
if msg_update_callback:
msg_update_callback.general(msg)
if logger:
logger.info(msg)
return path.read_text()
def run_async(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
t = threading.Thread(target=func, args=args, kwargs=kwargs,
daemon=True)
t.start()
return t
return wrapper
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import logging
import emission.storage.decorations.stats_queries as esds
import emission.net.usercache.formatters.common as enufc
def store_server_api_time(user_id, call, ts, reading):
esds.store_server_api_time(user_id, call, ts, reading)
def store_server_api_error(user_id, call, ts, reading):
esds.store_server_api_error(user_id, call, ts, reading)
# Backward compat to store old-style client stats until the phone upgrade
# has been pushed out to all phones
def setClientMeasurements(user_id, reportedVals):
logging.info("Received %d client keys and %d client readings for user_id %s" % (len(reportedVals['Readings']),
getClientMeasurementCount(reportedVals['Readings']), user_id))
logging.debug("reportedVals = %s" % reportedVals)
metadata = reportedVals['Metadata']
stats = reportedVals['Readings']
for key in stats:
values = stats[key]
for value in values:
storeClientEntry(user_id, key, value[0], value[1], metadata)
def getClientMeasurementCount(readings):
retSum = 0
for currReading in readings:
currArray = readings[currReading]
# logging.debug("currArray for reading %s is %s and its length is %d" % (currReading, currArray, len(currArray)))
retSum += len(currArray)
return retSum
def storeClientEntry(user_id, key, ts, reading, metadata):
logging.debug("storing client entry for user_id %s, key %s at timestamp %s" % (user_id, key, ts))
old_style_data = createEntry(user_id, key, ts, reading)
old_style_data.update(metadata)
save_to_timeseries(old_style_data)
def save_to_timeseries(old_style_data):
import emission.storage.timeseries.abstract_timeseries as esta
user_id = old_style_data["user"]
new_entry = old2new(old_style_data)
return esta.TimeSeries.get_time_series(user_id).insert(new_entry)
def old2new(old_style_data):
import emission.core.wrapper.entry as ecwe
import emission.core.wrapper.statsevent as ecws
import emission.core.wrapper.battery as ecwb
none2None = lambda s: None if s == 'none' else s
float_with_none = lambda s: float(s) if s is not None else None
ms_to_sec_with_none = lambda s: (float(s))/1000 if type(s) == str or type(s) == unicode else float(s)
user_id = old_style_data["user"]
del old_style_data["user"]
if old_style_data["stat"] == "battery_level":
new_style_data = ecwb.Battery({
"battery_level_pct" : float_with_none(none2None(old_style_data["reading"])),
"ts": ms_to_sec_with_none(old_style_data["ts"])
})
new_key = "background/battery"
else:
new_style_data = ecws.Statsevent()
new_style_data.name = old_style_data["stat"]
new_style_data.ts = ms_to_sec_with_none(old_style_data["ts"])
new_style_data.reading = float_with_none(none2None(old_style_data["reading"]))
new_style_data.client_app_version = old_style_data["client_app_version"]
new_style_data.client_os_version = old_style_data["client_os_version"]
new_key = stat2key(old_style_data["stat"])
new_entry = ecwe.Entry.create_entry(user_id, new_key, new_style_data)
# For legacy entries, make sure that the write_ts doesn't become the conversion
# time or the server arrival time
new_entry["metadata"]["write_ts"] = float_with_none(old_style_data["reported_ts"])
del new_entry["metadata"]["write_local_dt"]
del new_entry["metadata"]["write_fmt_time"]
enufc.expand_metadata_times(new_entry["metadata"])
return new_entry
def stat2key(stat_name):
stat_name_mapping = {
"app_launched": "stats/client_nav_event",
"push_stats_duration": "stats/client_time",
"sync_duration": "stats/client_time",
"sync_launched": "stats/client_nav_event",
"button_sync_forced": "stats/client_nav_event",
"sync_pull_list_size": "stats/client_time",
"sync_push_list_size": "stats/client_time",
"confirmlist_ucs_size": "stats/client_time",
"confirmlist_resume": "stats/client_nav_event",
"result_display_duration": "stats/client_time",
"button_confirm_all": "stats/client_nav_event",
"button_confirm_all_skipped": "stats/client_nav_event",
"button_moves_linked": "stats/client_nav_event",
"confirmlist_auth_not_done": "stats/client_nav_event",
"button_account_changed": "stats/client_nav_event",
"result_display_failed": "stats/client_error",
"pull_duration": "stats/client_time"
}
# Old-style stats never stored server_api_error
# https://github.com/e-mission/e-mission-server/commit/7487c82578e8933f4da8f9d3fa3522c102906c81#diff-a6a7bc47405d23c166d7b6f86bea4d2eR588
# And we are not converting result stats, so we don't care
# hahaha
return stat_name_mapping[stat_name]
def createEntry(user, stat, ts, reading):
return {'user': user,
'stat': stat,
'ts': float(ts),
'reading': reading}
|
from ..lang import H
from ..lang import sig
from ..lang import L
@sig(H/ str >> [str])
def lines(string):
"""
lines :: String -> [String]
lines breaks a string up into a list of strings at newline characters.
The resulting strings do not contain newlines.
"""
return L[[]] if not string else L[string.split("\n")]
@sig(H/ str >> [str])
def words(string):
"""
words :: String -> [String]
words breaks a string up into a list of words, which were delimited by
white space.
"""
return L[[]] if string == "" else L[string.split(" ")]
@sig(H/ [str] >> str)
def unlines(strings):
"""
lines :: [String] -> String
unlines is an inverse operation to lines. It joins lines, after appending a
terminating newline to each.
"""
return "\n".join(strings)
@sig(H/ [str] >> str)
def unwords(strings):
"""
unwords :: [String] -> String
unwords is an inverse operation to words. It joins words with separating
spaces.
"""
return " ".join(strings)
|
_base_ = '../model.py'
model = dict(
type='ImageClassifier',
task='classification',
pretrained=None,
backbone=dict(),
neck=dict(type='GlobalAveragePooling'),
head=dict(
in_channels=-1,
loss=dict(type='CrossEntropyLoss', loss_weight=1.0),
topk=(1, 5)
)
)
checkpoint_config = dict(
type='CheckpointHookWithValResults'
)
|
class Solution(object):
def minMeetingRooms(self, intervals):
"""
:type intervals: List[List[int]]
:rtype: int
"""
start=[]
end = []
for i in intervals:
start.append(i[0])
end.append(i[1])
start.sort()
end.sort()
low=0
res=0
high=0
#0 5 15
#10 20 30
while low<len(start):
while low<=len(start)-1 and start[low]<end[high]:
low+=1
res+=1
else:
high+=1
low+=1
return res
|
#!/usr/bin/env python3
#
# format.py
# Copyright (c) 2017 Dylan Brown. All rights reserved.
#
# NOTES
# Use Python 3. Run from the `build/` directory.
import os
import sys
import json
CLANG_FORMAT_EXE = "/usr/local/opt/llvm/bin/clang-format"
# Ensure we don't silently fail by running Python 2.
assert sys.version_info[0] >= 3, "This script requires Python 3.x"
# Ensure we're in the correct directory.
if os.getcwd().split("/")[-1] != "build" or "compile_commands.json" not in os.listdir():
raise OSError("Run this from the build/ directory containing the file: compile_commands.json")
with open("compile_commands.json") as db_file:
database = json.load(db_file)
files = [entry['file'] for entry in database]
# Make formatting changes in-place to all files.
for file in files:
os.system(" ".join(["{}".format(CLANG_FORMAT_EXE),
"-i",
"-style=llvm",
"{}".format(file)]))
|
#!/usr/bin/env python
# system
import os
import copy
import numpy as np
import threading
import time
import math
# drake
import lcm
from drake import lcmt_iiwa_command, lcmt_iiwa_status ,lcmt_robot_state
import pydrake
from pydrake.solvers import ik
from robotlocomotion import robot_plan_t
from bot_core import robot_state_t
# ROS
import rospy
# ROS custom packages
from robot_msgs.msg import *
from robot_msgs.srv import *
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
class TrajectoryServer:
def __init__(self):
self.rate = rospy.Rate(10)
self.lc = lcm.LCM()
self.publishChannel = "COMMITTED_ROBOT_PLAN"
def handle_send_trajectory(self, req):
resp = SendJointTrajectoryResponse()
plan = TrajectoryServer.ROSJointTrajectoryToLCMRobotPlan(req.trajectory)
planDuration = req.trajectory.points[-1].time_from_start
self.lc.publish(self.publishChannel, plan.encode())
eps = 0.2
rospy.sleep(planDuration + rospy.Duration.from_sec(eps))
resp.success = True
return resp
def spinOnce(self):
self.lc.handle()
def advertiseService(self):
rospy.Service('/robot_control/SendJointTrajectory', SendJointTrajectory, self.handle_send_trajectory)
@staticmethod
def ROSJointTrajectoryToLCMRobotPlan(jointTrajectoryRos):
plan = robot_plan_t()
plan.utime = 0
plan.robot_name = ""
plan.num_states = len(jointTrajectoryRos.points)
plan.plan_info = [1]*plan.num_states
jointNames = jointTrajectoryRos.joint_names
for idx, jointTrajectoryPoint in enumerate(jointTrajectoryRos.points):
robotState = TrajectoryServer.ROSJointTrajectoryPointToLCMRobotState(jointNames, jointTrajectoryPoint)
plan.plan.append(robotState)
return plan
@staticmethod
def ROSJointTrajectoryPointToLCMRobotState(jointNames, jointTrajectoryPoint):
robotState = robot_state_t()
robotState.num_joints = len(jointTrajectoryPoint.positions)
robotState.joint_name = jointNames
# print "duration = ", jointTrajectoryPoint.time_from_start.to_sec()
# print "typeof(duration) ", typeof(JointTrajectoryPoint.time_from_start)
robotState.utime = TrajectoryServer.ROSdurationToUtime(jointTrajectoryPoint.time_from_start)
robotState.joint_position = jointTrajectoryPoint.positions
robotState.joint_velocity = jointTrajectoryPoint.velocities
robotState.joint_effort = jointTrajectoryPoint.effort
return robotState
@staticmethod
def ROSdurationToUtime(duration):
return int(duration.to_sec()*1e6)
if __name__ == '__main__':
rospy.init_node('TrajectoryServer')
fs = TrajectoryServer()
print "Starting TrajectoryServer"
fs.advertiseService()
while not rospy.is_shutdown():
fs.spinOnce()
fs.rate.sleep()
|
def list_check(lst):
"""Are all items in lst a list?
>>> list_check([[1], [2, 3]])
True
>>> list_check([[1], "nope"])
False
"""
t = [1 if isinstance(x, list) else 0 for x in lst]
return len(lst) == sum(t) |
from rest_framework import serializers
from rdmo.core.serializers import TranslationSerializerMixin
from ..models import Task
class TaskExportSerializer(TranslationSerializerMixin, serializers.ModelSerializer):
start_attribute = serializers.CharField(source='start_attribute.uri', default=None, read_only=True)
end_attribute = serializers.CharField(source='end_attribute.uri', default=None, read_only=True)
conditions = serializers.SerializerMethodField()
class Meta:
model = Task
fields = (
'uri',
'uri_prefix',
'key',
'comment',
'start_attribute',
'end_attribute',
'days_before',
'days_after',
'conditions'
)
trans_fields = (
'title',
'text'
)
def get_conditions(self, obj):
return [condition.uri for condition in obj.conditions.all()]
|
from django import forms
from .models import Car, Owner
class AddCarForm(forms.ModelForm):
class Meta:
model = Car
fields = [
"brand",
"model",
"color",
"plate_number"
]
|
import argparse
import logging
import time
from api.ingest import IngestAPI
from ena.ena_api import EnaApi
from ena.util import write_xml
logging.getLogger('ena.ena_api').setLevel(logging.DEBUG)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Submits sequencing run entities to ENA')
parser.add_argument('ingest_submission_uuid', type=str, help='Ingest submission UUID')
parser.add_argument('md5_file', type=str, help='Filename containing md5 of files for the Ingest submission')
parser.add_argument('token', type=str, help='Ingest API Token without bearer prefix')
parser.add_argument('--ftp_dir', type=str, required=False, help='Directory of files in the FTP upload area.')
parser.add_argument('--action', type=str, required=False, default='ADD', help='ADD or MODIFY')
args = parser.parse_args()
ingest_api = IngestAPI()
ingest_api.set_token('Bearer ' + args.token)
ena_api = EnaApi(ingest_api)
manifest_ids = ingest_api.get_manifest_ids_from_submission(args.ingest_submission_uuid)
files = ena_api.submit_run_xml_files(manifest_ids, args.md5_file, args.ftp_dir, args.action.upper())
|
import asyncio
import logging
import os
import uvloop
from functools import partial
from inspect import isawaitable
from multiprocessing import Process
from ssl import create_default_context, Purpose
from signal import (
SIGTERM, SIGINT,
signal as signal_func,
Signals
)
from socket import (
socket,
SOL_SOCKET,
SO_REUSEADDR,
)
from mach9.http import HttpProtocol
from mach9.signal import Signal
from mach9.timer import update_current_time
class Server:
def __init__(self, app):
self.app = app
self.request_handler = app
self.signal = Signal()
self.log = app.log
self.log_config = app.log_config
self.listeners = app.listeners
self.debug = app.debug
self.netlog = app.netlog
self.request_timeout = app.request_timeout
self.request_max_size = app.request_max_size
self.keep_alive = app.keep_alive
def get_server_setting(self, protocol, host='127.0.0.1', port=8000,
debug=False, ssl=None, sock=None, workers=1,
loop=None, backlog=100, has_log=True):
'''Helper function used by `run`.'''
if isinstance(ssl, dict):
# try common aliaseses
cert = ssl.get('cert') or ssl.get('certificate')
key = ssl.get('key') or ssl.get('keyfile')
if cert is None or key is None:
raise ValueError('SSLContext or certificate and key required.')
context = create_default_context(purpose=Purpose.CLIENT_AUTH)
context.load_cert_chain(cert, keyfile=key)
ssl = context
server_settings = {
'protocol': protocol,
'request_handler': self.request_handler,
'log': self.log,
'netlog': self.netlog,
'host': host,
'port': port,
'sock': sock,
'ssl': ssl,
'signal': self.signal,
'debug': debug,
'request_timeout': self.request_timeout,
'request_max_size': self.request_max_size,
'keep_alive': self.keep_alive,
'loop': loop,
'backlog': backlog,
'has_log': has_log
}
for event_name, settings_name, reverse in (
('before_server_start', 'before_start', False),
('after_server_start', 'after_start', False),
('before_server_stop', 'before_stop', True),
('after_server_stop', 'after_stop', True),
):
listeners = self.listeners[event_name].copy()
if reverse:
listeners.reverse()
# Prepend mach9 to the arguments when listeners are triggered
listeners = [partial(listener, self.app) for listener in listeners]
server_settings[settings_name] = listeners
if debug:
self.log.setLevel(logging.DEBUG)
# Serve
if host and port:
proto = 'http'
if ssl is not None:
proto = 'https'
self.log.info('Goin\' Fast @ {}://{}:{}'.format(proto, host, port))
return server_settings
def run(self, host='127.0.0.1', port=8000, ssl=None,
sock=None, workers=1, backlog=100, protocol=None):
protocol = protocol or HttpProtocol
server_settings = self.get_server_setting(
protocol, host=host, port=port, debug=self.debug, ssl=ssl,
sock=sock, workers=workers, backlog=backlog,
has_log=self.log_config is not None)
try:
if workers == 1:
self.serve(**server_settings)
else:
self.serve_multiple(server_settings, workers)
except:
self.log.exception(
'Experienced exception while trying to serve')
self.log.info('Server Stopped')
def trigger_events(self, events, loop):
"""Trigger event callbacks (functions or async)
:param events: one or more sync or async functions to execute
:param loop: event loop
"""
for event in events:
result = event(loop)
if isawaitable(result):
loop.run_until_complete(result)
def serve(self, host, port, request_handler,
before_start=None,
after_start=None, before_stop=None, after_stop=None, debug=False,
request_timeout=60, ssl=None, sock=None, request_max_size=None,
reuse_port=False, loop=None, protocol=None, backlog=100,
connections=None, signal=None, has_log=True, keep_alive=True,
log=None, netlog=None):
self.loop = loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
if debug:
loop.set_debug(debug)
self.trigger_events(before_start, loop)
connections = connections if connections is not None else set()
server = partial(
protocol,
loop=loop,
connections=connections,
signal=signal,
request_handler=request_handler,
request_timeout=request_timeout,
request_max_size=request_max_size,
has_log=has_log,
keep_alive=keep_alive,
log=log,
netlog=netlog
)
server_coroutine = loop.create_server(
server,
host,
port,
ssl=ssl,
reuse_port=reuse_port,
sock=sock,
backlog=backlog
)
# Instead of pulling time at the end of every request,
# pull it once per minute
loop.call_soon(partial(update_current_time, loop))
try:
http_server = loop.run_until_complete(server_coroutine)
except:
log.exception("Unable to start server")
return
self.trigger_events(after_start, loop)
# Register signals for graceful termination
for _signal in (SIGINT, SIGTERM):
try:
loop.add_signal_handler(_signal, loop.stop)
except NotImplementedError:
log.warn('Mach9 tried to use loop.add_signal_handler but it is'
' not implemented on this platform.')
pid = os.getpid()
try:
log.info('Starting worker [{}]'.format(pid))
loop.run_forever()
finally:
log.info("Stopping worker [{}]".format(pid))
# Run the on_stop function if provided
self.trigger_events(before_stop, loop)
# Wait for event loop to finish and all connections to drain
http_server.close()
loop.run_until_complete(http_server.wait_closed())
# Complete all tasks on the loop
signal.stopped = True
for connection in connections:
connection.close_if_idle()
while connections:
loop.run_until_complete(asyncio.sleep(0.1))
self.trigger_events(after_stop, loop)
loop.close()
def serve_multiple(self, server_settings, workers):
server_settings['reuse_port'] = True
# Handling when custom socket is not provided.
if server_settings.get('sock') is None:
sock = socket()
sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
sock.bind((server_settings['host'], server_settings['port']))
sock.set_inheritable(True)
server_settings['sock'] = sock
server_settings['host'] = None
server_settings['port'] = None
log = server_settings['log']
def sig_handler(signal, frame):
log.info("Received signal {}. Shutting down.".format(
Signals(signal).name))
for process in processes:
os.kill(process.pid, SIGINT)
signal_func(SIGINT, lambda s, f: sig_handler(s, f))
signal_func(SIGTERM, lambda s, f: sig_handler(s, f))
processes = []
for _ in range(workers):
process = Process(target=self.serve, kwargs=server_settings)
process.daemon = True
process.start()
processes.append(process)
for process in processes:
process.join()
# the above processes will block this until they're stopped
for process in processes:
process.terminate()
server_settings.get('sock').close()
def stop(self):
self.loop.stop()
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.certificate_orders_operations import CertificateOrdersOperations
from .operations.certificates_operations import CertificatesOperations
from .operations.classic_mobile_services_operations import ClassicMobileServicesOperations
from .operations.domains_operations import DomainsOperations
from .operations.global_model_operations import GlobalModelOperations
from .operations.global_certificate_order_operations import GlobalCertificateOrderOperations
from .operations.global_domain_registration_operations import GlobalDomainRegistrationOperations
from .operations.global_resource_groups_operations import GlobalResourceGroupsOperations
from .operations.hosting_environments_operations import HostingEnvironmentsOperations
from .operations.managed_hosting_environments_operations import ManagedHostingEnvironmentsOperations
from .operations.provider_operations import ProviderOperations
from .operations.recommendations_operations import RecommendationsOperations
from .operations.server_farms_operations import ServerFarmsOperations
from .operations.sites_operations import SitesOperations
from .operations.top_level_domains_operations import TopLevelDomainsOperations
from .operations.usage_operations import UsageOperations
from . import models
class WebSiteManagementClientConfiguration(AzureConfiguration):
"""Configuration for WebSiteManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Subscription Id
:type subscription_id: str
:param api_version: API Version
:type api_version: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, credentials, subscription_id, api_version='2015-08-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not isinstance(subscription_id, str):
raise TypeError("Parameter 'subscription_id' must be str.")
if api_version is not None and not isinstance(api_version, str):
raise TypeError("Optional parameter 'api_version' must be str.")
if accept_language is not None and not isinstance(accept_language, str):
raise TypeError("Optional parameter 'accept_language' must be str.")
if not base_url:
base_url = 'https://management.azure.com'
super(WebSiteManagementClientConfiguration, self).__init__(base_url, filepath)
self.add_user_agent('websitemanagementclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
self.api_version = api_version
self.accept_language = accept_language
self.long_running_operation_retry_timeout = long_running_operation_retry_timeout
self.generate_client_request_id = generate_client_request_id
class WebSiteManagementClient(object):
"""Use these APIs to manage Azure Websites resources through the Azure Resource Manager. All task operations conform to the HTTP/1.1 protocol specification and each operation returns an x-ms-request-id header that can be used to obtain information about the request. You must make sure that requests made to these resources are secure. For more information, see https://msdn.microsoft.com/en-us/library/azure/dn790557.aspx.
:ivar config: Configuration for client.
:vartype config: WebSiteManagementClientConfiguration
:ivar certificate_orders: CertificateOrders operations
:vartype certificate_orders: .operations.CertificateOrdersOperations
:ivar certificates: Certificates operations
:vartype certificates: .operations.CertificatesOperations
:ivar classic_mobile_services: ClassicMobileServices operations
:vartype classic_mobile_services: .operations.ClassicMobileServicesOperations
:ivar domains: Domains operations
:vartype domains: .operations.DomainsOperations
:ivar global_model: GlobalModel operations
:vartype global_model: .operations.GlobalModelOperations
:ivar global_certificate_order: GlobalCertificateOrder operations
:vartype global_certificate_order: .operations.GlobalCertificateOrderOperations
:ivar global_domain_registration: GlobalDomainRegistration operations
:vartype global_domain_registration: .operations.GlobalDomainRegistrationOperations
:ivar global_resource_groups: GlobalResourceGroups operations
:vartype global_resource_groups: .operations.GlobalResourceGroupsOperations
:ivar hosting_environments: HostingEnvironments operations
:vartype hosting_environments: .operations.HostingEnvironmentsOperations
:ivar managed_hosting_environments: ManagedHostingEnvironments operations
:vartype managed_hosting_environments: .operations.ManagedHostingEnvironmentsOperations
:ivar provider: Provider operations
:vartype provider: .operations.ProviderOperations
:ivar recommendations: Recommendations operations
:vartype recommendations: .operations.RecommendationsOperations
:ivar server_farms: ServerFarms operations
:vartype server_farms: .operations.ServerFarmsOperations
:ivar sites: Sites operations
:vartype sites: .operations.SitesOperations
:ivar top_level_domains: TopLevelDomains operations
:vartype top_level_domains: .operations.TopLevelDomainsOperations
:ivar usage: Usage operations
:vartype usage: .operations.UsageOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Subscription Id
:type subscription_id: str
:param api_version: API Version
:type api_version: str
:param accept_language: Gets or sets the preferred language for the
response.
:type accept_language: str
:param long_running_operation_retry_timeout: Gets or sets the retry
timeout in seconds for Long Running Operations. Default value is 30.
:type long_running_operation_retry_timeout: int
:param generate_client_request_id: When set to true a unique
x-ms-client-request-id value is generated and included in each request.
Default is true.
:type generate_client_request_id: bool
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, credentials, subscription_id, api_version='2015-08-01', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None):
self.config = WebSiteManagementClientConfiguration(credentials, subscription_id, api_version, accept_language, long_running_operation_retry_timeout, generate_client_request_id, base_url, filepath)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.certificate_orders = CertificateOrdersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.certificates = CertificatesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.classic_mobile_services = ClassicMobileServicesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.domains = DomainsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.global_model = GlobalModelOperations(
self._client, self.config, self._serialize, self._deserialize)
self.global_certificate_order = GlobalCertificateOrderOperations(
self._client, self.config, self._serialize, self._deserialize)
self.global_domain_registration = GlobalDomainRegistrationOperations(
self._client, self.config, self._serialize, self._deserialize)
self.global_resource_groups = GlobalResourceGroupsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.hosting_environments = HostingEnvironmentsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.managed_hosting_environments = ManagedHostingEnvironmentsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.provider = ProviderOperations(
self._client, self.config, self._serialize, self._deserialize)
self.recommendations = RecommendationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.server_farms = ServerFarmsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.sites = SitesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.top_level_domains = TopLevelDomainsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.usage = UsageOperations(
self._client, self.config, self._serialize, self._deserialize)
|
import time
from lib.rotation import Rotation
rot = Rotation(18, 0, 180)
rot.setup()
time.sleep(2)
rot.cleanup()
|
# -*- coding: utf-8 -*-
"""
Example using the functions in TopomapArray to plot a set of 10:20 EEG sensors
onto a single subplot.
@author: KensingtonSka (Rhys Hobbs)
"""
import numpy as np
import matplotlib.pyplot as plt
from TopomapArray import project_onto_zplane, gen_grid_size, project_onto_grid
# %% Hypothetical sensor positions:
ch_names = ['Fp1', 'Fp2', 'Fz', 'F3', 'F4', 'F7', 'F8',
'Cz', 'C3', 'C4', 'T3', 'T4',
'Pz', 'P3', 'P4', 'T5', 'T6', 'O1', 'O2']
xyz = [np.array([-21.5, 70.2, -0.1]),
np.array([28.4, 69.1, -0.4]),
np.array([0.6, 40.9, 53.9]),
np.array([-35.5, 49.4, 32.4]),
np.array([40.2, 47.6, 32.1]),
np.array([-54.8, 33.9, -3.5]),
np.array([56.6, 30.8, -4.1]),
np.array([0.8, -14.7, 73.9]),
np.array([-52.2, -16.4, 57.8]),
np.array([54.1, -18.0, 57.5]),
np.array([-70.2, -21.3, -10.7]),
np.array([71.9, -25.2, -8.2]),
np.array([0.2, -62.1, 64.5]),
np.array([-39.5, -76.3, 47.4]),
np.array([36.8, -74.9, 49.2]),
np.array([-61.5, -65.3, 1.1]),
np.array([59.3, -67.6, 3.8]),
np.array([-26.8, -100.2, 12.8]),
np.array([24.1, -100.5, 14.1])]
xyz = np.array(xyz)
##Triangle example (uncomment to use):
#ch_names = ['U', 'L', 'R']
#length = 1
#xyz = [np.array([0, length, 0.5]),
# np.array([-length*np.cos(np.pi/6), -length*np.sin(np.pi/6), 0.5]),
# np.array([length*np.cos(np.pi/6), -length*np.sin(np.pi/6), 0.5])]
#xyz = np.array(xyz)
##Circle example (uncomment to use):
#radius = 1
#n_points = 10
#angle = np.linspace(0, 2*np.pi*(1-(1/n_points)), n_points)
#xyz = radius*np.array([np.cos(angle), np.sin(angle), angle*0]).T
# Generate data to plot:
X = np.arange(0, 100, 1)
Y = (np.random.rand(len(xyz), 100)*8) - (4 + np.random.rand()*0.2)
# %% Generate subplot positions:
xy = project_onto_zplane(xyz, projection='z',
scale_seperation_distance=2, ch_names=ch_names)
grid_size = gen_grid_size(xy, sbp=2)
grid_pos = project_onto_grid(xy, grid_size, rotation_matrix=-90)
# %% PLOT:
# %% Using grid_size & grid_pos to plot the data onto the subplots:
# I explicitly don't use gridspec becuase I have observed poor performance
# when working with very large grids (~400 squares).
plt.style.use('dark_background')
fig, ax = plt.subplots(grid_size[0], grid_size[1], figsize=(12, 8))
[axi.set_axis_off() for axi in ax.ravel()] #Turn off axes
[axi.tick_params(axis='both', labelsize=8) for axi in ax.ravel()] #Set axis text size
# Generating equal y-limits:
ylim = round(max([abs(Y.max()), abs(Y.min())]))
plt.setp(ax, xlim=[X[0], X[-1]],
ylim=[-ylim, ylim])
for i, pos in enumerate(grid_pos):
ax[pos[0], pos[1]].plot(X, Y[i,:], '-', linewidth=0.5)
#General settings:
ax[pos[0], pos[1]].set_title(ch_names[i], size=8)
# Ticks and border settings:
ax[pos[0], pos[1]].axis('on')
ax[pos[0], pos[1]].spines['right'].set_visible(False)
ax[pos[0], pos[1]].spines['top'].set_visible(False)
ax[pos[0], pos[1]].set_xticklabels([])
ax[pos[0], pos[1]].set_yticklabels([])
#Make the top-left a reference:
ax[0, 0].axis('on')
ax[0, 0].spines['right'].set_visible(False)
ax[0, 0].spines['top'].set_visible(False)
ax[0, 0].set_xlabel('time (s)')
ax[0, 0].set_ylabel('voltage (V)')
|
from datetime import datetime, timedelta
from nose.tools import eq_
from mock import Mock, patch
import amo
import amo.tests
from addons.models import Addon
from users.models import UserProfile
from devhub.models import ActivityLog
from mkt.developers.models import (AddonPaymentAccount, CantCancel,
PaymentAccount, SolitudeSeller)
from mkt.developers.providers import get_provider
from mkt.site.fixtures import fixture
from test_providers import Patcher
class TestActivityLogCount(amo.tests.TestCase):
fixtures = ['base/addon_3615']
def setUp(self):
now = datetime.now()
bom = datetime(now.year, now.month, 1)
self.lm = bom - timedelta(days=1)
self.user = UserProfile.objects.filter()[0]
amo.set_user(self.user)
def test_not_review_count(self):
amo.log(amo.LOG['EDIT_VERSION'], Addon.objects.get())
eq_(len(ActivityLog.objects.monthly_reviews()), 0)
def test_review_count(self):
amo.log(amo.LOG['APPROVE_VERSION'], Addon.objects.get())
result = ActivityLog.objects.monthly_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 1)
eq_(result[0]['user'], self.user.pk)
def test_review_count_few(self):
for x in range(0, 5):
amo.log(amo.LOG['APPROVE_VERSION'], Addon.objects.get())
result = ActivityLog.objects.monthly_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 5)
def test_review_last_month(self):
log = amo.log(amo.LOG['APPROVE_VERSION'], Addon.objects.get())
log.update(created=self.lm)
eq_(len(ActivityLog.objects.monthly_reviews()), 0)
def test_not_total(self):
amo.log(amo.LOG['EDIT_VERSION'], Addon.objects.get())
eq_(len(ActivityLog.objects.total_reviews()), 0)
def test_total_few(self):
for x in range(0, 5):
amo.log(amo.LOG['APPROVE_VERSION'], Addon.objects.get())
result = ActivityLog.objects.total_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 5)
def test_total_last_month(self):
log = amo.log(amo.LOG['APPROVE_VERSION'], Addon.objects.get())
log.update(created=self.lm)
result = ActivityLog.objects.total_reviews()
eq_(len(result), 1)
eq_(result[0]['approval_count'], 1)
eq_(result[0]['user'], self.user.pk)
def test_log_admin(self):
amo.log(amo.LOG['OBJECT_EDITED'], Addon.objects.get())
eq_(len(ActivityLog.objects.admin_events()), 1)
eq_(len(ActivityLog.objects.for_developer()), 0)
def test_log_not_admin(self):
amo.log(amo.LOG['EDIT_VERSION'], Addon.objects.get())
eq_(len(ActivityLog.objects.admin_events()), 0)
eq_(len(ActivityLog.objects.for_developer()), 1)
class TestPaymentAccount(Patcher, amo.tests.TestCase):
fixtures = fixture('webapp_337141', 'user_999')
def setUp(self):
self.user = UserProfile.objects.filter()[0]
solsel_patcher = patch('mkt.developers.models.SolitudeSeller.create')
self.solsel = solsel_patcher.start()
self.solsel.return_value = self.seller = (
SolitudeSeller.objects.create(
resource_uri='selleruri', user=self.user))
self.solsel.patcher = solsel_patcher
super(TestPaymentAccount, self).setUp()
def tearDown(self):
self.solsel.patcher.stop()
super(TestPaymentAccount, self).tearDown()
def test_create_bango(self):
# Return a seller object without hitting Bango.
self.bango_patcher.package.post.return_value = {
'resource_uri': 'zipzap',
'package_id': 123,
}
res = get_provider().account_create(
self.user, {'account_name': 'Test Account'})
eq_(res.name, 'Test Account')
eq_(res.user, self.user)
eq_(res.seller_uri, 'selleruri')
eq_(res.account_id, 123)
eq_(res.uri, 'zipzap')
self.bango_patcher.package.post.assert_called_with(
data={'paypalEmailAddress': 'nobody@example.com',
'seller': 'selleruri'})
self.bango_patcher.bank.post.assert_called_with(
data={'seller_bango': 'zipzap'})
def test_cancel(self):
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo',
solitude_seller=self.seller)
addon = Addon.objects.get()
AddonPaymentAccount.objects.create(
addon=addon, account_uri='foo',
payment_account=res, product_uri='bpruri')
res.cancel()
assert res.inactive
assert not AddonPaymentAccount.objects.exists()
def test_cancel_shared(self):
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo',
solitude_seller=self.seller, shared=True)
addon = Addon.objects.get()
AddonPaymentAccount.objects.create(
addon=addon, account_uri='foo',
payment_account=res, product_uri='bpruri')
with self.assertRaises(CantCancel):
res.cancel()
def test_get_details(self):
package = Mock()
package.get.return_value = {'full': {'vendorName': 'a',
'some_other_value': 'b'}}
self.bango_patcher.package.return_value = package
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='/foo/bar/123',
solitude_seller=self.seller)
deets = res.get_provider().account_retrieve(res)
eq_(deets['account_name'], res.name)
eq_(deets['vendorName'], 'a')
assert 'some_other_value' not in deets
self.bango_patcher.package.assert_called_with('123')
package.get.assert_called_with(data={'full': True})
def test_update_account_details(self):
res = PaymentAccount.objects.create(
name='asdf', user=self.user, uri='foo',
solitude_seller=self.seller)
res.get_provider().account_update(res, {
'account_name': 'new name',
'vendorName': 'new vendor name',
'something_other_value': 'not a package key'
})
eq_(res.name, 'new name')
self.bango_patcher.api.by_url(res.uri).patch.assert_called_with(
data={'vendorName': 'new vendor name'})
|
def find_higher(first, window):
for i in range(len(window)):
if window[i] > first:
return (i, window[i])
return None
with open('B-large-practice.in') as file:
T = int(file.readline())
for case in range(1, T+1):
(emax, regain, n) = map(int, file.readline().split())
window = (emax // regain) + (1 if emax % regain else 0) - 1
values = map(int, file.readline().split())
gain = 0
energy = emax
for i in range(n):
win = values[i+1 : i+window+1]
if find_higher(values[i], win):
(j, v) = find_higher(values[i], win)
# use up enough that we will be recharged by then
ideal_energy_now = min(max(0, emax - (j+1)*regain), energy)
gain += values[i] * (energy - ideal_energy_now)
energy = ideal_energy_now
else:
# use up all energy
gain += energy * values[i]
energy = 0
energy = min(energy + regain, emax)
print 'Case #{}: {}'.format(case, gain)
|
import ast
import atexit
import json
import os
import time
from datetime import datetime, timedelta
from random import randrange
import git
from apscheduler.schedulers.background import BackgroundScheduler
from dotenv import load_dotenv
from flask import Flask, redirect
from newsapi import NewsApiClient
COUNTRIES_LANGUAGES = {
"ar": [None], "gr": [None], "nl": [None], "za": [None], "au": [None],
"hk": [None], "nz": [None], "kr": [None], "at": [None], "hu": [None],
"ng": [None], "se": [None], "be": [None], "in": [None], "no": [None],
"ch": [None], "br": [None], "id": [None], "ph": [None], "tw": [None],
"bg": [None], "ie": [None], "pl": [None], "th": [None], "ca": [None],
"il": [None], "pt": [None], "tr": [None], "cn": [None], "it": [None],
"ro": [None], "ae": [None], "co": [None], "jp": [None], "ru": [None],
"ua": [None], "cu": [None], "lv": [None], "sa": [None], "gb": [None],
"cz": [None], "lt": [None], "rs": [None], "us": [None], "eg": [None],
"my": [None], "sg": [None], "ve": [None], "fr": [None], "mx": [None],
"sk": [None], "de": [None], "ma": [None], "si": [None],
}
CATEGORIES = ["business", "entertainment", "general", "health", "science", "sports", "technology"]
SOURCES_LANGUAGE = {
"abc-news": "en", "bbc-news": "en", "cnn": "en", "fox-news": "en", "google-news": "en",
}
app = Flask(__name__)
load_dotenv()
API_KEYS = ast.literal_eval(os.getenv("NEWS_API_KEYS"))
LAST_KEY_INDEX = randrange(0, len(API_KEYS))
repo = git.Repo.init(path='.')
BRANCH_MASTER_NAME = "main"
BRANCH_DATA_NAME = "data"
remote_origin = repo.remote()
def get_key():
global LAST_KEY_INDEX
LAST_KEY_INDEX = (LAST_KEY_INDEX + 1) % len(API_KEYS)
return API_KEYS[LAST_KEY_INDEX]
def git_prepare():
repo.index.checkout(force=True)
repo.git.checkout(BRANCH_MASTER_NAME)
remote_origin.pull()
if repo.git.branch("--list", BRANCH_DATA_NAME):
git.Head.delete(repo, BRANCH_DATA_NAME, force=True)
repo.create_head(BRANCH_DATA_NAME).checkout()
def git_done():
if repo.active_branch.name == BRANCH_DATA_NAME:
commit_and_push(BRANCH_DATA_NAME, "update data")
else:
print("Branch[{0}] wrong while commit the data!".format(repo.active_branch.name))
def commit_and_push(branch: str = 'master', message: str = 'Auto commit'):
has_changed = False
for file in repo.untracked_files:
print(f'Added untracked file: {file}')
repo.git.add(file)
if has_changed is False:
has_changed = True
if repo.is_dirty() is True:
for file in repo.git.diff(None, name_only=True).split('\n'):
if file:
print(f'Added file: {file}')
repo.git.add(file)
if has_changed is False:
has_changed = True
if has_changed is True:
repo.git.commit('-m', message)
repo.git.push('origin', branch, force=True)
@app.route('/')
def index():
return redirect("https://github.com/PythonKnife/NewsAPI/raw/master/README.md")
def write_file(path, file_name, content, mode='a'):
if not os.path.exists(path):
os.makedirs(path)
with open(os.path.join(path, file_name), mode) as f:
f.write(content)
def update_top_headline():
for category in CATEGORIES:
for (country, language) in COUNTRIES_LANGUAGES.items():
print("Started category:{0} country:{1} language:{2} at :{3}".format(category, country, language,
time.strftime(
"%A, %d. %B %Y %I:%M:%S %p")))
for lan in language:
newsapi = NewsApiClient(api_key=get_key())
top_headlines = newsapi.get_top_headlines(category=category, country=country,
language=lan, page_size=100)
if lan is None:
lan = country
write_file("top-headlines/category/{0}/{1}/".format(category, country), "{0}.json".format(lan),
json.dumps(top_headlines))
def update_everything():
for (source, language) in SOURCES_LANGUAGE.items():
print("Started source:{0} : {1}".format(source, time.strftime("%A, %d. %B %Y %I:%M:%S %p")))
newsapi = NewsApiClient(api_key=get_key())
all_articles = newsapi.get_everything(sources=source,
from_param=(datetime.now() - timedelta(days=1, hours=5,
minutes=30)).date().isoformat(),
language=language,
sort_by='publishedAt',
page_size=100)
write_file("everything/", "{0}.json".format(source), json.dumps(all_articles))
def update_data():
git_prepare()
update_top_headline()
update_everything()
git_done()
scheduler = BackgroundScheduler()
INTERVAL = 6 * 60
scheduler.add_job(func=update_data, trigger="interval", minutes=INTERVAL)
if not scheduler.running:
scheduler.start()
# Shut down the scheduler when exiting the app
atexit.register(lambda: scheduler.shutdown())
|
"""
Copyright: Wenyi Tang 2017-2018
Author: Wenyi Tang
Email: wenyi.tang@intel.com
Created Date: Oct 15th 2018
Improved train/benchmark/infer script
Type --helpfull to get full doc.
"""
# Import models in development
try:
from Exp import *
except ImportError as ex:
pass
import tensorflow as tf
from importlib import import_module
from VSR.Tools import Run
from VSR.Tools import EvalModelCheckpoint, EvalDataDirectory
tf.flags.DEFINE_enum("mode", 'run', ('run', 'eval'), "tools to use.")
FLAGS = tf.flags.FLAGS
def main(*args, **kwargs):
additional_functions = {}
if FLAGS.add_custom_callbacks:
m = import_module('custom_api')
for fn_name in FLAGS.add_custom_callbacks:
try:
if '#' in fn_name:
fn_name = fn_name.split('#')[0]
additional_functions[fn_name] = m.__dict__[fn_name]
except KeyError:
raise KeyError("Function [{}] couldn't be found in 'custom_api.py'".format(fn_name))
if FLAGS.mode == 'run':
return Run.run(*args[0][1:], **additional_functions)
if FLAGS.mode == 'eval':
if FLAGS.checkpoint_dir:
return EvalModelCheckpoint.evaluate()
elif FLAGS.input_dir:
return EvalDataDirectory.evaluate()
print(("In mode 'eval', parse either '--checkpoint_dir' with '--model'"
" or '--input_dir' to evaluate models, see details --helpfull"))
if __name__ == '__main__':
tf.app.run(main)
|
# -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# ZTE.ZXA10.get_inventory
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Python modules
import re
# NOC modules
from noc.core.script.base import BaseScript
from noc.sa.interfaces.igetinventory import IGetInventory
class Script(BaseScript):
name = "ZTE.ZXA10.get_inventory"
interface = IGetInventory
type = {"PRWGS": "PWR", "SCXN": "MAINBOARD", "GUSQ": "LINECARD", "VDWVD": "LINECARD"}
rx_platform = re.compile(r"^\d+\s+(?P<platform>\S+)MBRack\s+.+\n", re.MULTILINE)
rx_card = re.compile(
r"^Real-Type\s+:\s+(?P<type>\S+)\s+Serial-Number\s+:(?P<serial>.*)\n", re.MULTILINE
)
rx_detail = re.compile(
r"^M-CODE\s+:\s+\S+\s+Hardware-VER\s+:\s+(?P<hardware>\S+)\s*\n", re.MULTILINE
)
def execute_cli(self):
v = self.scripts.get_version()
r = [{"type": "CHASSIS", "vendor": "ZTE", "part_no": [v["platform"]]}]
ports = self.profile.fill_ports(self)
for p in ports:
v = self.cli("show card shelfno %s slotno %s" % (p["shelf"], p["slot"]))
match = self.rx_card.search(v)
if not match:
continue
i = {
"type": self.type[match.group("type")],
"number": p["slot"],
"vendor": "ZTE",
"part_no": [match.group("type")],
}
if match.group("serial").strip():
i["serial"] = match.group("serial").strip()
match = self.rx_detail.search(v)
if match:
i["revision"] = match.group("hardware")
r += [i]
return r
|
from __future__ import absolute_import, print_function
import sys
if sys.version_info < (3, 0):
import __builtin__ as builtins
else:
import builtins
from collections import namedtuple
# TODO : use basemsg and develop a generic way of converting python type to custom type
if sys.version_info < (3, 0):
# type map specifying conversion
# basic types should be python builtin types
# if not in there : assumed to be a custom type, convertible to a dict.
type_map = {
"bool": ["bool"],
"int": ["int", "float"], # can convert int to float
"float": ["float"],
"str": ["str", "unicode"], # convert str to unicode to follow python 3 default behavior
"unicode": ["unicode"],
"list": ["list"],
"tuple": ["tuple", "list"], # can convert tuple to list
}
# Starting with whatever ROS needs, but we could extend this
primitive_types = [bool, int, long, float, str, unicode]
composed_types = [list, tuple]
else: # python3
# type map specifying conversion
# basic types should be python builtin types
# if not in there : assumed to be a custom type, convertible to a dict.
type_map = {
"bool": ["bool"],
"int": ["int", "float"], # can convert int to float
"float": ["float"],
"bytes": ["bytes", "str"], # bytes can be converted to str
"str": ["str"],
"list": ["list"],
"tuple": ["tuple", "list"], # can convert tuple to list
}
primitive_types = [bool, int, float, bytes, str]
composed_types = [list, tuple]
# defining Mock message types using namedtuple to keep things small
StatusMsg = namedtuple("StatusMsg", "error code message")
# defining Exceptions
class NonexistentFieldException(Exception):
def __init__(self, oridict, basetype, message):
Exception.__init__(self, "Trying to convert {0!s} to Message type {1!s} triggered {2!s}".format(oridict, basetype, message))
class FieldTypeMismatchException(Exception):
def __init__(self, roottype, fields, expected_type, found_type):
if roottype == expected_type:
Exception.__init__(self, "Expected a Python object for type {0!s} but received a {1!s}".format(roottype, found_type))
else:
Exception.__init__(self, "{0!s} message requires a {1!s} for field {2!s}, but got a {3!s}".format(roottype, expected_type, '.'.join(fields), found_type))
def extract_values(inst):
"""
:param inst: the instance
:return: python values extracted from the instance
"""
# inst should already be python
return inst
def populate_instance(msg, inst):
"""
:param msg: contains the values to use to populate inst.
:param inst: message class instance to populate.
:return: an instance of the provided message class, with its fields populated according to the values in msg
"""
return _to_inst(msg, type(inst).__name__, type(inst).__name__, inst)
def _to_inst(msg, ptype, roottype, inst=None, stack=None):
# Check to see whether this is a primitive type
if stack is None:
stack = []
if ptype in builtins.__dict__ and (
builtins.__dict__[ptype] in primitive_types or
builtins.__dict__[ptype] in composed_types
):
# Typecheck the msg
msgtype = type(msg)
if msgtype in primitive_types and ptype in type_map[msgtype.__name__]:
return builtins.__dict__[ptype](msg)
elif msgtype in composed_types and ptype in type_map[msgtype.__name__]:
# Call to _to_inst for every element of the list/tuple
def recurse_iter(msg):
for e in msg: # we do this with yield to get an iteratable and build the tuple/list at once
yield _to_inst(e, type(e).__name__, roottype, None, stack)
return builtins.__dict__[ptype](recurse_iter(msg))
raise FieldTypeMismatchException(roottype, stack, ptype, msgtype)
# Otherwise, the type has to be a custom type, so msg must be a dict
if type(msg) is not dict:
raise FieldTypeMismatchException(roottype, stack, ptype, type(msg))
# and ptype should be able to build with same fields.
# modifying dict with dict comprehension
instmsg = dict((k, _to_inst(v, type(v).__name__, roottype, None, stack)) for k, v in msg.items())
# using ** to get dict content as named args for the namedtuple
try:
inst = globals()[ptype](**instmsg)
except TypeError as e:
raise NonexistentFieldException(msg, ptype, e)
return inst
|
# coding: utf-8
from fabric.api import *
# from fab_deploy.db import mysql
from fab_deploy import pip, utils, system, crontab, vcs, db
|
import numpy as np
import sympy as sp
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
def gauss(x,y):
exp_term = -1.0/(2*(1-corr_hw**2))*((x-mean_h)**2/var_h+(y-mean_w)**2/var_w-2*corr_hw*(x-mean_h)*(y-mean_w)/(std_h*std_w))
return 1.0 / (2*np.pi*std_h*std_w*np.sqrt(1-corr_hw**2)) * np.exp(exp_term)
# Importing dataset
## Dataset path
dataset_path = 'data/whData.dat'
## Column names
dt = np.dtype([('w', np.float), ('h', np.float), ('g', np.str_, 1)])
## Loading the dataset
dataset = np.loadtxt(dataset_path, dtype=dt, comments='#', delimiter=None)
## Loading dataset without outliers
dataset_without_outliers = dataset[np.where(dataset['w'] > 0)]
## Loading dataset outliers (w < 0)
dataset_outliers = dataset[np.where(dataset['w'] < 0)]
# Heights from the training data
train_data = dataset_without_outliers['h']
# Weights from the training data
train_labels = dataset_without_outliers['w']
# Heights to predict their corresponding weights
test_data = dataset_outliers['h']
# Calculate the Covariance matrix
covariance_matrix = np.cov(np.array([train_data, train_labels]))
## Heights mean
mean_h = np.mean(train_data)
## Heights variance
var_h = covariance_matrix[0, 0]
## Heights standard deviation
std_h = np.sqrt(var_h)
## Weight mean
mean_w = np.mean(train_labels)
## Weight variance
var_w = covariance_matrix[1, 1]
## Weights standard deviation
std_w = np.sqrt(var_w)
## Correlation coefficient between heights and weights
corr_hw = covariance_matrix[0, 1] / (std_h * std_w)
# Calculate (predict) the corresponding weight for given height
f = lambda x : mean_w + corr_hw * std_w / std_h * (x - mean_h)
# Plot data
plt.plot(train_data, train_labels, 'k.', label='Data')
# Plot predicted value
plt.plot(test_data, f(test_data), 'r.', label='Predictions')
# Plot the model
## Grid XY points to build contour
x = np.linspace(np.amin(train_data)-5, np.amax(train_data)+5, 1000)
y = np.linspace(np.amin(train_labels)-5, np.amax(train_labels)+5, 1000)
X, Y = np.meshgrid(x, y)
zi = mlab.bivariate_normal(X, Y, std_h, std_w, mean_h, mean_w, covariance_matrix[0, 1])
## Contour the gridded data
plt.contour(x,y,zi)
plt.xlim(np.amin(train_data)-5, np.amax(train_data)+5)
plt.ylim(np.amin(train_labels)-5, np.amax(train_labels)+5)
plt.legend(loc='upper left')
plt.savefig("out/task22/bivariate_gaussian.png", bbox_inches="tight", pad_inches=0)
plt.show()
|
from ._base import PatternBase
from ._next import Next
from ._inspect import is_iterable
class Chord(PatternBase):
"""
Simply put the input into a list
"""
def __init__(self, values):
assert is_iterable(values)
self.values = None
super().__init__(values=values)
def iterate(self):
yield list(self.values)
|
"""
MINIMUM CloudBolt version required: v9.2
This Plug-in configures servers for RKE, deploys a Kubernetes cluster with
`rke up`, and creates the required CloudBolt objects to manage the deployed
cluster from CloudBolt.
"""
import os
import time
import yaml
from common.methods import set_progress
from containerorchestrators.models import ContainerOrchestratorTechnology
from containerorchestrators.kuberneteshandler.models import Kubernetes
from infrastructure.models import CustomField, Environment, Namespace
from resources.models import Resource
from utilities.exceptions import CommandExecutionException
from utilities.logger import ThreadLogger
from utilities import run_command
import settings
# set this if `rke` is not in your PATH
PATH_TO_RKE_EXECUTABLE = '/var/opt/cloudbolt/kubernetes/bin/rke'
logger = ThreadLogger(__name__)
def create_ssh_keypair(size=2048):
"""
Make a new ssh keypair of a given size
:param: size (optional, defaults to 2048). How many bits large should the key be?
:return: UTF-8 strings representing the public key and private key in that order
"""
from cryptography.hazmat.primitives import serialization as crypto_serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.backends import default_backend as crypto_default_backend
key = rsa.generate_private_key(
backend=crypto_default_backend(),
public_exponent=65537,
key_size=size,
)
private_key = key.private_bytes(
crypto_serialization.Encoding.PEM,
crypto_serialization.PrivateFormat.PKCS8,
crypto_serialization.NoEncryption())
public_key = key.public_key().public_bytes(
crypto_serialization.Encoding.OpenSSH,
crypto_serialization.PublicFormat.OpenSSH
)
return public_key.decode('utf-8'), private_key.decode('utf-8')
def generate_rke_yaml(ips, user, ssh_private_key):
"""
Make the YAML file that RKE is going to use to create the kubernetes cluster
:param ips: What IP addresses are we working with? Array of strings
:param user: What user should we create?
:param ssh_private_key: Private key text (utf-8) to include in yaml
:return: string with the formatted yaml in it
"""
nodes = []
for ip in ips:
nodes.append(
{
'address': ip,
'user': user,
'ssh_key': ssh_private_key,
'role': [],
},
)
for i, role in enumerate(['controlplane', 'etcd']):
node_idx = i % len(nodes)
nodes[node_idx]['role'].append(role)
for node in nodes:
if len(node['role']) == 0:
node['role'].append('worker')
worker_count = 0
for node in nodes:
if 'worker' in node['role']:
worker_count += 1
if worker_count == 0:
for node in nodes:
if 'controlplane' not in node['role']:
node['role'].append('worker')
worker_count += 1
if worker_count == 0:
nodes[0]['role'].append('worker')
etcd_count = 0
for node in nodes:
if 'etcd' in node['role']:
etcd_count += 1
if etcd_count < 3:
for node in nodes:
if 'etcd' not in node['role'] and 'controlplane' not in node['role']:
node['role'].append('etcd')
etcd_count += 1
if etcd_count >= 3:
break
services = {
'etcd': {'image': 'quay.io/coreos/etcd:latest'},
'kube-api': {'image': 'rancher/k8s:v1.11.6-rancher2'},
'kube-controller': {'image': 'rancher/k8s:v1.11.6-rancher2'},
'scheduler': {'image': 'rancher/k8s:v1.11.6-rancher2'},
'kubelet': {'image': 'rancher/k8s:v1.11.6-rancher2'},
'kubeproxy': {'image': 'rancher/k8s:v1.11.6-rancher2'},
}
addons = [
{
"apiVersion": "v1",
"kind": "ServiceAccount",
"metadata": {
"name": "cloudbolt-admin",
"namespace": "kube-system"
}
},
{
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "ClusterRoleBinding",
"metadata": {
"name": "cloudbolt-admin"
},
"roleRef": {
"apiGroup": "rbac.authorization.k8s.io",
"kind": "ClusterRole",
"name": "cluster-admin"
},
"subjects": [
{
"kind": "ServiceAccount",
"name": "cloudbolt-admin",
"namespace": "kube-system"
}
]
},
]
document = {
'nodes': nodes,
'services': services,
'addons': '---\n' + yaml.dump_all(addons),
}
return yaml.dump(document)
def find_all_server_ips(blueprint_context):
"""
Given the blueprint context, find the IP addresses for each server
:param blueprint_context:
:return: list of ip addresses represented as strings
"""
ips = []
for server_obj in find_all_servers(blueprint_context):
if hasattr(server_obj, 'ip') and server_obj.ip is not None and (server_obj.ip != ''):
ips.append(server_obj.ip)
return ips
def find_all_servers(blueprint_context):
"""
Given the blueprint context, find all servers
:param blueprint_context:
:return: iterable (yield) over Server objects
"""
for key, value in blueprint_context.items():
if isinstance(value, dict) and 'servers' in value:
server_query_set = value.get('servers')
for server_obj in server_query_set:
yield server_obj
def prepare_server_hosts(user, blueprint_context, ssh_public_key):
docker_script = 'yum -y install docker firewalld || exit 1;\n' + \
'systemctl enable docker || exit 1;\n' + \
'useradd {};\n'.format(user) + \
'groupadd docker\n' + \
'usermod -aG docker {} || exit 1\n'.format(user) + \
'mkdir -p /home/{}/.ssh || exit 1;\n'.format(user) + \
'echo \'{}\' >> /home/{}/.ssh/authorized_keys || exit 1;\n'.format(ssh_public_key, user) + \
'chown -R {} /home/{}/.ssh || exit 1;\n'.format(user, user) + \
'chmod 755 /home/{}/.ssh || exit 1;\n'.format(user) + \
'chmod 644 /home/{}/.ssh/authorized_keys || exit 1;\n'.format(user) + \
'echo \'net.ipv6.conf.all.forwarding=1\' >> /etc/sysctl.conf || exit 1\n' + \
'sysctl -p /etc/sysctl.conf || exit 1'
# See https://github.com/coreos/coreos-kubernetes/blob/master/Documentation/kubernetes-networking.md
# for official documentation on kubernetes networking and port useage.
for tcp_port_num in [80, 443, 10250, 2379, 2380, 6443]:
docker_script += '\nfirewall-offline-cmd --add-port={}/tcp || exit 1;'.format(tcp_port_num)
for udp_port_num in [8285, 8472]:
docker_script += '\nfirewall-offline-cmd --add-port={}/udp || exit 1;'.format(udp_port_num)
docker_script += '\nsystemctl restart firewalld;\n'
logger.info(f"docker script:\n{docker_script}")
for server in find_all_servers(blueprint_context=blueprint_context):
set_progress("Starting script execution on server {}".format(server.ip))
try:
server.execute_script(script_contents=docker_script, timeout=700)
except CommandExecutionException:
set_progress("Failed to run command. Trying again with `sudo`.")
server.execute_script(script_contents=docker_script, timeout=700, run_with_sudo=True)
server.reboot()
set_progress("Waiting for server(s) to begin reboot.")
time.sleep(10)
for server in find_all_servers(blueprint_context=blueprint_context):
server.wait_for_os_readiness()
def kubernetes_up(cluster_path):
if PATH_TO_RKE_EXECUTABLE:
cmd = f"{PATH_TO_RKE_EXECUTABLE} up --config={cluster_path}/cluster.yml"
else:
cmd = f"rke up --config={cluster_path}/cluster.yml"
run_command.execute_command(cmd, timeout=900, stream_title="Running rke up")
# optional: use run_command.run_command(cmd) instead of run_command.execute_command()
def create_cb_objects(resource_id):
"""
Create the corresponding ContainerOrchestrator and Environment for the
newly deployed cluster.
We read in the certificates returned by RKE and store them on the
ContainerOrchestrator to auth future requests to Kubernetes.
"""
ip = None
protocol = None
port = None
ca_cert = None
cert_data = None
key_data = None
kube_config_yml = os.path.join(settings.VARDIR, "opt", "cloudbolt", "kubernetes", f"resource-{resource_id}", "kube_config_cluster.yml")
with open(kube_config_yml) as file:
documents = yaml.full_load(file)
for item, doc in documents.items():
if item == "clusters":
control_plane = doc[0]["cluster"]
server = control_plane["server"]
protocol, address = server.split('://')
ip, port = address.split(':')
ca_cert = control_plane["certificate-authority-data"]
elif item == "users":
user = doc[0]["user"]
cert_data = user["client-certificate-data"]
key_data = user["client-key-data"]
corch_technology = ContainerOrchestratorTechnology.objects.get(name="Kubernetes")
kubernetes_data = {
"name": "Cluster-{}".format(resource_id),
"ip": ip,
"protocol": protocol,
"port": port,
"auth_type": "CERTIFICATE",
"cert_file_contents": cert_data,
"key_file_contents": key_data,
"ca_file_contents": ca_cert,
"container_technology": corch_technology,
}
container_orchestrator = Kubernetes.objects.create(**kubernetes_data)
Environment.objects.create(name="Resource-{} Environment".format(resource_id), container_orchestrator=container_orchestrator)
resource = Resource.objects.get(id=resource_id)
resource.container_orchestrator_id = container_orchestrator.id
resource.save()
def create_required_parameters():
"""
We create the containerorchestrator namespace, to keep this CF from adding noise to
the Parameters list page.
"""
namespace, created = Namespace.objects.get_or_create(name='containerorchestrators')
CustomField.objects.get_or_create(
name='container_orchestrator_id',
defaults=dict(
label="Container Orchestrator ID",
description=("Used by the Multi-Node Kubernetes Blueprint. Maps the provisioned CloudBolt resource"
"to the Container Orchestrator used to manage the Kubernetes cluster."),
type="INT",
namespace=namespace,
)
)
def run(job, *_args, **kwargs):
"""
main entry point for the plugin
"""
create_required_parameters()
user = 'cbrke'
blueprint_context = kwargs.get('blueprint_context', {})
ssh_public_key, ssh_private_key = create_ssh_keypair()
prepare_server_hosts(user, blueprint_context, ssh_public_key)
resource_id = job.parent_job.resource_set.first().id
cluster_path = os.path.join(settings.VARDIR, "opt", "cloudbolt", "kubernetes", f"resource-{resource_id}")
os.makedirs(cluster_path)
with open(f"{cluster_path}/rke_private_key.pem", 'w') as fl:
fl.write(ssh_private_key)
with open(f"{cluster_path}/rke_public_key.pem", 'w') as fl:
fl.write(ssh_public_key)
ips = find_all_server_ips(kwargs.get('blueprint_context', {}))
rke_yaml_text = generate_rke_yaml(ips, user, ssh_private_key)
cluster_yml_name = "cluster.yml"
with open(f"{cluster_path}/{cluster_yml_name}", 'w') as fl:
fl.write(rke_yaml_text)
set_progress(f"Your ssh public and private keys have been generated. Please find them in {cluster_path}")
set_progress(f"Your RKE cluster.yml file has been generated. Please find it in {cluster_path}/{cluster_yml_name}")
set_progress(f"Running RKE Config...")
kubernetes_up(cluster_path)
set_progress(f"Creating CB objects...")
create_cb_objects(resource_id)
return "SUCCESS", f"./kubernetes up --config={cluster_path}/cluster.yml", "" |
import pickle
class ResultWrapper:
def __init__(self, parent_task, path):
self.parent_task = parent_task
self.result_path = path
def set(self, result):
# Todo Save numpy, pytorch and pandas using custom serialization
# save the results
with open(self.result_path + '.pkl', 'wb') as f:
pickle.dump(result, f)
def get(self):
# Handle when not on disk (task run on another node)
with open(self.result_path + '.pkl', 'rb') as f:
return pickle.load(f)
@property
def sge_job_id(self):
return self.parent_task.sge_job_id
class LocalResult(ResultWrapper):
def __init__(self, result):
self.result = result
def get(self):
return self.result
def set(self, result):
self.result = result
@property
def sge_job_id(self):
raise NotImplemented("Local results have no SGE job id.")
|
#!/usr/bin/env python
# encoding: utf-8
'''Functions for feature transformations'''
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import sklearn as skl
from common.functions import vec, row
from pylab import *
def get_one_hot_encoding(x):
'''
INPUT:
x: pandas series
OUTPUT:
pandas dataframe df with one-hot encoded values of x
COMMENTS:
df preserves index of x.
EXAMPLE:
A=pd.Series(['new','old','new','old','moderate','moderate'],index=[11,12,13,14,15,16],name='status')
TRANSFORMS TO:
status=moderate status=new status=old
11 0 1 0
12 0 0 1
13 0 1 0
14 0 0 1
15 1 0 0
16 1 0 0
Author: Victor Kitov (v.v.kitov@yandex.ru), 03.2016.
'''
assert x.name is not None,'Series should contain name!'
values = np.unique(x)
values2nums = dict( zip(values,list(range(len(values)))) )
nums2values = dict( zip(values2nums.values(),values2nums.keys()) )
x = x.replace(values2nums)
encoder = skl.preprocessing.OneHotEncoder(sparse=False)
data = encoder.fit_transform(x.values[:,np.newaxis])
columns = ['%s=%s' % (x.name,nums2values[value]) for value in encoder.active_features_]
return pd.DataFrame(data, index=x.index, columns=columns, dtype=np.int8)
def get_probability_encoding(feature,output,fill_nans=True, missing_value=None):
'''
Input:
feature - feature (pandas series)
output - class output (pandas series)
fill_nans - fill nan rows (undefined probabilities) with general class priors
Output:
pandas dataframe of probabilities of all classes given each value of feature
example1:
get_probability_encoding(f=[10,10,20,20,30,30,nan,nan,10,20,30],Y=[0,0,1,1,0,1,0,1,nan,nan,nan], fill_nans=False)
returns:
(array([[ 1. , 0. ],
[ 1. , 0. ],
[ 0. , 1. ],
[ 0. , 1. ],
[ 0.5, 0.5],
[ 0.5, 0.5],
[ nan, nan],
[ nan, nan],
[ 1. , 0. ],
[ 0. , 1. ],
[ 0.5, 0.5]]), array([ 0., 1.]))
example2:
get_probability_encoding(f=['a','a','b','b','c','c','?','?','a','b','c'],Y=[0,0,1,1,0,1,0,1,nan,nan,nan], fill_nans=False, missing_value='?')
returns:
(array([[ 1. , 0. ],
[ 1. , 0. ],
[ 0. , 1. ],
[ 0. , 1. ],
[ 0.5, 0.5],
[ 0.5, 0.5],
[ nan, nan],
[ nan, nan],
[ 1. , 0. ],
[ 0. , 1. ],
[ 0.5, 0.5]]), array([ 0., 1.]))
Author: Victor Kitov (v.v.kitov@yandex.ru), 03.2016.'''
f = feature.values
Y = output.values
assert len(f)==len(Y), 'length on features and length of outputs should be the same.'
assert all(feature.index==output.index),'feature.index should be equal to output.index'
assert hasattr(feature,'name'),'feature should be pandas Series with name property.'
if (missing_value==None): # no account for missing values
valid_f_sels = ones(len(f),dtype=bool)
elif is_numlike(missing_value) and isnan(missing_value):
valid_f_sels = ~isnan(f)
else:
valid_f_sels = (f!=missing_value)
feature_vals = unique(f[valid_f_sels])
Y_vals,counts = np.unique(Y[~isnan(Y)], return_counts=True) # Y_vals correspond to columns of output matrix X
y_probs = counts/np.sum(counts)
X = np.zeros( (len(Y),len(Y_vals)) )
X[~valid_f_sels,:]=nan
f_values_y_not_defined = set(f[isnan(Y)])-set(f[~isnan(Y)]) # a set of feature values that appear only when Y is nan
for f_val in f_values_y_not_defined:
X[f==f_val,:]=nan
for feature_val in feature_vals:
(Y_cond_vals,Y_cond_counts) = unique(Y[~isnan(Y) & (f==feature_val)], return_counts=True)
Y_cond_probs = Y_cond_counts/sum(Y_cond_counts)
for Y_cond_val,Y_cond_prob in zip(Y_cond_vals,Y_cond_probs):
X[f==feature_val, Y_vals==Y_cond_val] = Y_cond_prob
if fill_nans:
for i in range(X.shape[1]):
X[np.isnan(X[:,i]),i]=y_probs[i]
return pd.DataFrame(X,index=feature.index, columns=['P(y=%s|%s)'%(y_val, feature.name) for y_val in Y_vals])
|
ERR_MSG_DATASET_INFO_NOT_FOUND = 'DataSetInfo object not found'
ERR_MSG_INVALID_DATASET_INFO_OBJECT_ID = 'Invalid DataSetInfo object id.'
ERR_MSG_DATASET_INFO_NOT_FOUND_CURRENT_USER = ('DataSetInfo object not found'
' for the current user.')
ERR_MSG_FAILED_TO_READ_DATASET = 'Failed to read the dataset.'
ERR_MSG_DATASET_POINTER_NOT_SET = 'In order to profile the data, the "dataset_pointer" must be set.'
ERR_MSG_DATASET_LOCKED_BY_ANOTHER_USER = 'This Dataverse file is locked by another user.'
MSG_VAL_NOT_SPECIFIED = '(not specified)' |
from django.contrib.auth.decorators import login_required
from django.urls import path, include
from .views import *
urlpatterns = [
path('auth/', include('allauth.urls')),
path('settings', login_required(SettingsView.as_view()), name="user.settings"),
path('user/<username>', ProfileView.as_view(), name="user.profile"),
]
|
from eth2spec.test.context import spec_test, with_phases
from eth2spec.test.helpers.deposits import (
prepare_genesis_deposits,
)
def create_valid_beacon_state(spec):
deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT
deposits, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True)
eth1_block_hash = b'\x12' * 32
eth1_timestamp = spec.MIN_GENESIS_TIME
return spec.initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits)
def run_is_valid_genesis_state(spec, state, valid=True):
"""
Run ``is_valid_genesis_state``, yielding:
- genesis ('state')
- is_valid ('is_valid')
"""
yield 'genesis', state
is_valid = spec.is_valid_genesis_state(state)
yield 'is_valid', is_valid
assert is_valid == valid
@with_phases(['phase0'])
@spec_test
def test_is_valid_genesis_state_true(spec):
state = create_valid_beacon_state(spec)
yield from run_is_valid_genesis_state(spec, state, valid=True)
@with_phases(['phase0'])
@spec_test
def test_is_valid_genesis_state_false_invalid_timestamp(spec):
state = create_valid_beacon_state(spec)
state.genesis_time = spec.MIN_GENESIS_TIME - 1
yield from run_is_valid_genesis_state(spec, state, valid=False)
@with_phases(['phase0'])
@spec_test
def test_is_valid_genesis_state_true_more_balance(spec):
state = create_valid_beacon_state(spec)
state.validators[0].effective_balance = spec.MAX_EFFECTIVE_BALANCE + 1
yield from run_is_valid_genesis_state(spec, state, valid=True)
# TODO: not part of the genesis function yet. Erroneously merged.
# @with_phases(['phase0'])
# @spec_test
# def test_is_valid_genesis_state_false_not_enough_balance(spec):
# state = create_valid_beacon_state(spec)
# state.validators[0].effective_balance = spec.MAX_EFFECTIVE_BALANCE - 1
#
# yield from run_is_valid_genesis_state(spec, state, valid=False)
@with_phases(['phase0'])
@spec_test
def test_is_valid_genesis_state_true_one_more_validator(spec):
deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT + 1
deposits, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True)
eth1_block_hash = b'\x12' * 32
eth1_timestamp = spec.MIN_GENESIS_TIME
state = spec.initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits)
yield from run_is_valid_genesis_state(spec, state, valid=True)
@with_phases(['phase0'])
@spec_test
def test_is_valid_genesis_state_false_not_enough_validator(spec):
deposit_count = spec.MIN_GENESIS_ACTIVE_VALIDATOR_COUNT - 1
deposits, _ = prepare_genesis_deposits(spec, deposit_count, spec.MAX_EFFECTIVE_BALANCE, signed=True)
eth1_block_hash = b'\x12' * 32
eth1_timestamp = spec.MIN_GENESIS_TIME
state = spec.initialize_beacon_state_from_eth1(eth1_block_hash, eth1_timestamp, deposits)
yield from run_is_valid_genesis_state(spec, state, valid=False)
|
"""
log utils
"""
from ytdlmusic.params import is_verbose
def print_debug(message):
"""
print "[debug] " + message only if --verbose
"""
if is_verbose():
print("[debug] " + message)
|
# Ivan Carvalho
# Solution to https://www.urionlinejudge.com.br/judge/problems/view/1087
#!/usr/bin/env python2.7
# encoding : utf-8
while True:
a,b,c,d = [int(i) for i in raw_input().split(" ")]
diferencax = abs(a-c)
diferencay = abs(b-d)
if a == 0 and b == 0 and c== 0 and d == 0:
break
else:
if a == c and b == d:
print 0
elif a==c or b == d:
print 1
elif diferencax == diferencay :
print 1
else:
print 2
|
#
# Copyright 2020-2021 Hewlett Packard Enterprise Development LP
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# (MIT License)
import logging
import sys
from time import sleep
from bos.reporter.client import requests_retry_session
from bos.reporter.node_identity import read_identity
from bos.reporter.components.state import report_state, BOSComponentException, UnknownComponent
from bos.reporter.proc_cmdline import get_value_from_proc_cmdline
# Configure Project Level Logging options when invoked through __main__;
# This allows the whole project to log from their source when invoked through
# __main__, but does not populate standard out streaming when the code
# is imported by other tooling.
try:
LOG_LEVEL = get_value_from_proc_cmdline('bos_log_level')
LOG_LEVEL = getattr(logging, LOG_LEVEL.upper(), logging.WARN)
except KeyError:
LOG_LEVEL = logging.WARN
PROJECT_LOGGER = logging.getLogger('BOS')
LOGGER = logging.getLogger('bos.reporter.status_reporter')
LOGGER.setLevel(LOG_LEVEL)
_stream_handler = logging.StreamHandler(sys.stdout)
_stream_handler.setLevel(LOG_LEVEL)
PROJECT_LOGGER.addHandler(_stream_handler)
PROJECT_LOGGER.setLevel(LOG_LEVEL)
def report_state_until_success(component):
"""
Loop until BOS component information has been registered;
tells BOS the component's (_this_ node) state.
"""
backoff_ceiling = 30
backoff_scalar = 2
attempt = 0
while True:
# Each iteration, wait a bit longer before patching BOS component
# state until the ceiling is reached.
time_to_wait = backoff_scalar * attempt
time_to_wait = min([backoff_ceiling, time_to_wait])
sleep(time_to_wait)
attempt += 1
LOGGER.info("Attempt %s of contacting BOS..." % (attempt))
session = requests_retry_session()
try:
boot_artifact_id = get_value_from_proc_cmdline('boot_artifact_id')
state = {'bootArtifacts': {'boot_artifact_id': boot_artifact_id}}
report_state(component, state, session)
except UnknownComponent:
LOGGER.warning("BOS has no record of component '%s'; nothing to report." % (component))
LOGGER.warning("Will re-attempt patch operation as necessary.")
continue
except BOSComponentException as cce:
LOGGER.warning("Unable to contact BOS to report component status: %s" % (cce))
continue
except OSError as exc:
LOGGER.error("BOS client encountered an %s" % (exc))
continue
LOGGER.info("Updated the actualState record for BOS component '%s'." % (component))
return
STATE_UPDATE_FREQUENCY = 86400 # Number of seconds between state updates
def main():
"""
Read the Boot Artifact ID from the /proc/cmdline and report it to the BOS
API. This reports the booted 'state' of the node to BOS.
"""
component = read_identity()
try:
sleep_time = get_value_from_proc_cmdline('bos_update_frequency')
except KeyError:
sleep_time = STATE_UPDATE_FREQUENCY
while True:
LOGGER.info("Attempting to report status for '%s'" % (component))
try:
report_state_until_success(component)
except Exception as exp:
LOGGER.error("An error occurred: {}".format(exp))
sleep(sleep_time)
if __name__ == '__main__':
main()
|
import glob
import zlib
import numpy
import pytest
import rasterio
from rasterio.windows import Window
from nodata.scripts.alpha import (
all_valid, init_worker, finalize_worker, compute_window_mask,
NodataPoolMan)
def test_all_valid():
assert (
all_valid(numpy.empty((2, 2), dtype='uint8'), 0) == 255).all()
@pytest.fixture(
scope='function', params=glob.glob('tests/fixtures/alpha/*.tif'))
def worker(request):
"""This provides the global `src` for compute_window_mask"""
init_worker(request.param, all_valid)
def fin():
finalize_worker()
request.addfinalizer(fin)
def test_compute_window_mask(worker):
"""Get an all-valid mask for one window"""
in_window = Window.from_slices((0, 100), (0, 100))
out_window, data = compute_window_mask((in_window, 0, {}))
assert in_window == out_window
assert (numpy.fromstring(
zlib.decompress(data), 'uint8').reshape(
[int(c) for c in rasterio.windows.shape(out_window)]) == 255).all()
@pytest.mark.parametrize(
"input_path", glob.glob('tests/fixtures/alpha/*.tif'))
def test_pool_man_mask(input_path):
"""NodataPoolMan initializes and computes mask of a file"""
manager = NodataPoolMan(input_path, all_valid, 0)
assert manager.input_path == input_path
assert manager.nodata == 0
result = manager.mask(windows=[Window.from_slices((0, 100), (0, 100))])
window, arr = next(result)
assert window == Window.from_slices((0, 100), (0, 100))
assert (arr == 255).all()
with pytest.raises(StopIteration):
next(result)
@pytest.mark.parametrize("keywords", [
{'padding': 0}])
def test_pool_man_mask_keywords(keywords):
"""NodataPoolMan initializes and computes mask of a file"""
manager = NodataPoolMan(
'tests/fixtures/alpha/lossy-curved-edges.tif', all_valid, 0)
result = manager.mask(windows=[Window.from_slices((0, 100), (0, 100))], **keywords)
window, arr = next(result)
assert window == Window.from_slices((0, 100), (0, 100))
assert (arr == 255).all()
with pytest.raises(StopIteration):
next(result)
|
from parlai.agents.programr.parser.exceptions import ParserException
from parlai.agents.programr.parser.pattern.matcher import EqualsMatch
from parlai.agents.programr.parser.pattern.nodes.base import PatternNode
#from parlai.agents.programr.nlp.semantic.semantic_similarity import SemanticSimilarity
DEBUG = False
class PatternConceptNode(PatternNode):
def __init__(self, attribs, text, userid='*'):
PatternNode.__init__(self, userid)
if 'name' in attribs:
self._concept_name = attribs['name'].upper()
elif text:
self._concept_name = text.upper()
else:
raise ParserException("Invalid concept node, no name specified as attribute or text")
@property
def concept_name(self):
return self._concept_name
def is_set(self):
return False
def to_xml(self, client_context, include_user=False):
#todo needs to be implemented
raise Exception("Needs to be implemented")
def equals(self, brain, words, word_no):
word = words.word(word_no)
if self.userid != '*':
if self.userid != brain.userid:
return EqualsMatch(False, word_no)
last_index = words.words.index("__TOPIC__")
sentence = words.words[:last_index]
if DEBUG:
print("Sentence: {}".format(sentence))
if DEBUG:
print("self._concept_name: {}".format(self._concept_name))
if DEBUG:
print("words.words: {}".format(words.words))
if sentence:
concepts = self._concept_name.split("|")
this_concept = concepts[0] # this is the main concept we have to compare others to
# if DEBUG: print("this_concept: {}".format(type(this_concept)))
other_concepts = concepts[1:] # this is other concepts we have to compare this_concept to
# if DEBUG: print("other_concepts: {}".format(type(other_concepts)))
sentence_text = " ".join(sentence)
try:
similarity_service = brain.services.get_service("semanticsimilarity")
similarity_with_this_concept = similarity_service.similarity_with_concept(sentence_text, this_concept)
similarity_with_other_concepts = similarity_service.similarity_with_concepts(sentence_text, other_concepts)
if DEBUG:
print("Score between \'{}\' and \'{}\': {}".format(sentence_text, this_concept, similarity_with_this_concept))
if DEBUG:
print("Score between \'{}\' and \'{}\': {}".format(sentence_text, other_concepts, similarity_with_other_concepts))
except Exception as ex:
print("Error getting similarity {}".format(ex))
result = False
return EqualsMatch(result, word_no, word)
if similarity_with_this_concept > max(similarity_with_other_concepts):
if DEBUG:
print("similarity_with_this_concept > max(similarity_with_other_concepts), setting result to true")
result = True
else:
if DEBUG:
print("similarity_with_this_concept < max(similarity_with_other_concepts), setting result to false")
result = False
word_no = word_no + len(sentence) - 1
else:
result = True
return EqualsMatch(result, word_no, word)
|
import os.path
from cStringIO import StringIO
from django.test import TestCase
from storages.backends.overwrite import OverwriteStorage
from localshop.apps.packages import models
from localshop.apps.packages import utils
from localshop.apps.packages.tests import factories
from localshop.utils import TemporaryMediaRootMixin
class TestReleaseFile(TemporaryMediaRootMixin, TestCase):
def setUp(self):
super(TestReleaseFile, self).setUp()
field = [field for field in models.ReleaseFile._meta.fields
if field.name == 'distribution'][0]
field.storage = OverwriteStorage()
def test_save_contents(self):
release_file = factories.ReleaseFileFactory()
dummy_fh = StringIO("release-file-contents")
release_file.save_filecontent('dummy.txt', dummy_fh)
self.assertEqual(
release_file.distribution.name, '2.7/t/test-package/dummy.txt')
self.assertTrue(os.path.exists(release_file.distribution.path))
def test_delete_file(self):
release_file = factories.ReleaseFileFactory()
dummy_fh = StringIO("release-file-contents")
release_file.save_filecontent('dummy.txt', dummy_fh)
self.assertTrue(os.path.exists(release_file.distribution.path))
utils.delete_files(models.ReleaseFile, instance=release_file)
self.assertFalse(os.path.exists(release_file.distribution.path))
def test_delete_file_twice_referenced(self):
release_file = factories.ReleaseFileFactory()
dummy_fh = StringIO("release-file-contents")
release_file.save_filecontent('dummy.txt', dummy_fh)
release_file = factories.ReleaseFileFactory(
release=release_file.release, filetype='bdist_egg')
release_file.save_filecontent('dummy.txt', dummy_fh)
self.assertTrue(os.path.exists(release_file.distribution.path))
utils.delete_files(models.ReleaseFile, instance=release_file)
# File should still exist
self.assertTrue(os.path.exists(release_file.distribution.path))
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .pix3d_evaluation import Pix3DEvaluator, transform_meshes_to_camera_coord_system
|
from tkinter import *
root = Tk()
root.title("sliders")
root.iconbitmap(None)
root.geometry("400x400")
# Drop Down Boxes
def show():
myLabel = Label(root, text=clicked.get()).pack()
options = [
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
]
clicked = StringVar()
clicked.set(options[0])
# we can not write ->> variable = clicked,value = options
# put * for show everything
drop = OptionMenu(root, clicked, *options)
drop.pack()
myButton = Button(root, text="Show Selection", command=show).pack()
root.mainloop()
|
# -*- coding: utf-8 -*-
# This file is part of Kaleidoscope.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
# pylint: disable=wrong-import-position, unused-argument, unused-import
"""Kaleidoscope"""
import os
# This is needed because version info is only generated
# at setup. This should only fall back when not using
# setup.py lint or style to check.
try:
from .version import version as __version__
except ImportError:
__version__ = '0.0.0'
from kaleidoscope.interactive import *
try:
from qiskit import QuantumCircuit
from qiskit.providers.aer import Aer
from qiskit.providers.ibmq import IBMQ
except ImportError:
HAS_QISKIT = False
else:
HAS_QISKIT = True
|
import dns.resolver
import subprocess
# query the server and parse the response
server_answer = str(dns.resolver.resolve('current.cvd.clamav.net', 'TXT').response.answer[0]).replace('"', ':').split(':')
# save list entrys in spezific variables
server_clamav_version = server_answer[1]
server_daily_database_version = server_answer[3]
server_main_database_version = server_answer[2]
server_bytecode_database_version = server_answer[8]
# run command on host and parse the response
local_answer = str(subprocess.check_output('clamscan --version', shell=True)).replace(' ', '/').split('/')
# save list entrys in spezific variables
local_clamav_version = local_answer[1]
local_daily_database_version = local_answer[2]
# read the main.cvd database and parse the first line
main_database_file = open('/var/lib/clamav/main.cvd','r', errors='replace')
local_main_database_version = main_database_file.readline().split(':')[2]
main_database_file.close()
# read the bytecode.cvd database and parse the first line
bytecode_database_file = open('/var/lib/clamav/bytecode.cvd','r', errors='replace')
local_bytecode_database_version = bytecode_database_file.readline().split(':')[2]
bytecode_database_file.close()
# print the version numbers in Prometheus readable format
print(
"# HELP server_clamav_version Version of ClamAV from ClamAV server.\n" +
"# TYPE server_clamav_version gauge\n" +
"server_clamav_version " + server_clamav_version + "\n" +
"# HELP server_daily_database_version Version of daily database from ClamAV server.\n" +
"# TYPE server_daily_database_version gauge\n" +
"server_daily_database_version " + server_daily_database_version + "\n" +
"# HELP server_main_database_version Version of main database from ClamAV server.\n" +
"# TYPE server_main_database_version gauge\n" +
"server_main_database_version " + server_main_database_version + "\n" +
"# HELP server_bytecode_database_version Version of bytecode database from ClamAV server.\n" +
"# TYPE server_bytecode_database_version gauge\n" +
"server_bytecode_database_version " + server_bytecode_database_version + "\n" +
"# HELP local_clamav_version Local ClamAV version.\n" +
"# TYPE local_clamav_version gauge\n" +
"local_clamav_version " + local_clamav_version + "\n" +
"# HELP local_daily_database_version Local daily database version.\n" +
"# TYPE local_daily_database_version gauge\n" +
"local_daily_database_version " + local_daily_database_version + "\n" +
"# HELP local_main_database_version Local main database version.\n" +
"# TYPE local_main_database_version gauge\n" +
"local_main_database_version " + local_main_database_version + "\n" +
"# HELP local_bytecode_database_version Local bytecode database version.\n" +
"# TYPE local_bytecode_database_version gauge\n" +
"local_bytecode_database_version " + local_bytecode_database_version
)
|
from flask import Flask, render_template #importando o framework do Flask
app = Flask(__name__)
@app.route('/') #criando uma rota na primeira pagina
def home():
return render_template(
'index.html'
)
if __name__ == '__main__':
app.run(debug=True) |
#!/usr/bin/env python3
import logging
import os
import pathlib
import socket
import subprocess
import sys
import tempfile
from configparser import ConfigParser
from typing import Generator, cast
import boto3
from botocore.exceptions import ClientError
from glacier_backup.db import GlacierDB
from glacier_backup.uploader import Uploader
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
CONFDIR = os.path.join(cast(str, os.environ.get('HOME')), '.config', 'glacier_backup')
class OngoingUploadException(Exception):
"""There is an active upload"""
class Backup(object):
def __init__(self, config: ConfigParser, dryrun: bool = False):
self.config = config
self.dryrun = dryrun
self._lock()
account_id = self.config.get('main', 'account_id', fallback='-')
vault_name = self.config.get('main', 'vault_name', fallback='default')
self.db = GlacierDB(os.path.join(CONFDIR, f'glacier.{vault_name}.sqlite3'))
glacier = boto3.resource('glacier')
vault = glacier.Vault(account_id, vault_name)
self.uploader = Uploader(vault)
def _lock(self) -> None:
try:
self.s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.s.bind('\0' + self.__class__.__name__)
except socket.error as e:
if e.errno == 98: # 'Address already in use'
raise OngoingUploadException()
else:
raise e
def _unlock(self) -> None:
if self.s:
self.s.close()
def backup_file(self, path: pathlib.Path) -> None:
"""Backup a single file or directory"""
if self.dryrun:
logger.info(f'dry run: would have uploaded {path}')
return
tarpath = ''
if path.is_dir():
tarfilename = path.name.replace(' ', '_') + '.tar'
tempdir = tempfile.mkdtemp(prefix='glacier_backup')
tarpath = os.path.join(tempdir, tarfilename)
logger.info(f'creating tar achive for {path}')
try:
# Create the tarball.
subprocess.check_call(['tar', 'cf', tarpath, path.as_posix()], stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
except Exception as e:
logger.error(f'failed to tar: {e}')
raise e
if tarpath:
upload_file_path = tarpath
upload_description = tarfilename
else:
upload_file_path = path.as_posix()
upload_description = path.name
# upload can raise, but we will catch it in run()
try:
logger.info(f'starting upload for {path}')
archive_id = self.uploader.upload(upload_file_path, upload_description)
self.db.mark_uploaded(path.as_posix(), upload_description, archive_id)
finally:
if tarpath:
os.remove(tarpath)
def run(self, stop_on_first: bool = True) -> None:
"""Run all configured backups"""
for candidate in self.backup_candidates():
try:
logger.info(f'Starting backup for path {candidate}')
self.backup_file(candidate)
except (self.uploader.UploadFailedException, ClientError) as e:
logger.error(f'failed to upload {candidate}: {e}')
continue
if stop_on_first:
return
def needs_upload(self, file: pathlib.Path, upload_if_changed: bool = False) -> bool:
uploaded_date = self.db.get_uploaded_date(file.as_posix())
if not uploaded_date:
return True
if upload_if_changed and file.stat().st_mtime > float(uploaded_date):
return True
return False
def backup_candidates(self) -> Generator[pathlib.Path, None, None]:
"""Run all configured backups"""
sections = self.config.sections()
for name, cfg in [(x.rstrip('/'), self.config[x]) for x in sections]:
if not os.path.exists(name):
logger.debug(f'skipping nonexistent path {name}')
continue
logger.info(f'checking [{name}]')
upload_if_changed = cfg.getboolean('upload_if_changed')
upload_single_dir = cfg.getboolean('upload_single_dir')
upload_dirs = cfg.getboolean('upload_dirs')
upload_files = cfg.getboolean('upload_files')
exclude = cfg.get('exclude_prefix')
yield from self.backup_candidates_by_path(name, upload_single_dir, upload_files, upload_dirs,
upload_if_changed, exclude)
def backup_candidates_by_path(self, path: str, single_dir: bool = False, upload_files: bool = False,
upload_dirs: bool = False, upload_if_changed: bool = False,
exclude: str = None) -> Generator[pathlib.Path, None, None]:
"""Returns a generator of backup candidates."""
if os.path.isfile(path):
yield pathlib.Path(path)
return
if not os.path.isdir(path):
return
if not any([single_dir, upload_files, upload_dirs]):
return
if single_dir:
yield pathlib.Path(path)
return
with os.scandir(path) as it:
for entry in it:
if exclude and entry.name.startswith(cast(str, exclude)):
continue
if entry.is_dir() and upload_dirs:
rentry = pathlib.Path(entry)
if self.needs_upload(rentry, upload_if_changed):
yield rentry
if entry.is_file() and upload_files:
rentry = pathlib.Path(entry)
if self.needs_upload(rentry, upload_if_changed):
yield rentry
def setup_logging(logfile: str = None) -> None:
# Must use root logger here, not __name__
log = logging.getLogger()
log.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if logfile and os.path.exists(logfile):
fh = logging.FileHandler(logfile, mode='a')
fh.setFormatter(formatter)
log.addHandler(fh)
sh = logging.StreamHandler(sys.stdout)
sh.setFormatter(formatter)
log.addHandler(sh)
def main():
import argparse
import configparser
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--dryrun', help='only show what would be backed up', action='store_true')
parser.add_argument('-c', '--config', help='config file location. defaults'
' to ${HOME}/.config/glacier_backup/glacier_backup.conf',
default=os.path.join(CONFDIR, 'glacier_backup.conf'))
parser.add_argument('-l', '--logfile', help='backup log file. defaults'
' to ${HOME}/.config/glacier_backup/glacier_backup.log',
default=os.path.join(CONFDIR, 'glacier_backup.log'))
parser.add_argument('-v', '--vault', help='name of vault to use')
parser.add_argument('-a', '--account', help='account ID to use')
parser.add_argument('-p', '--path', dest='paths', nargs='*', help=('path of file or dir to backup. will'
' override paths specified in config'))
args = parser.parse_args()
config = configparser.ConfigParser()
# args.path overrides config.
if args.paths:
for path in args.paths:
# we treat each provided path as the object to be uploaded, whether file or dir.
config[path] = {'upload_single_dir': True}
else:
if not os.path.exists(args.config):
print('no config file found, quitting.')
return
config.read(args.config)
if args.vault:
config['main']['vault_name'] = args.vault
if args.account:
config['main']['account_id'] = args.account
logfile = config.get('main', 'logfile', fallback=None)
setup_logging(logfile or args.logfile)
try:
Backup(config, args.dryrun).run()
except OngoingUploadException:
print('backup already in progress, exiting')
sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-08-13 03:50
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('proso_user', '0001_initial'),
('proso_subscription', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='subscription',
name='session',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_user.Session'),
),
]
|
import csv
import io
from io import TextIOWrapper
from io import TextIOBase
from django.urls import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.views import View
from django.views.generic import TemplateView, CreateView
from django.views.generic.list import ListView
from django.contrib.auth.mixins import LoginRequiredMixin
from .permissions import SuperUserMixin
from .forms import ProjectForm
from .models import Document, Project
class IndexView(TemplateView):
template_name = 'index.html'
class ProjectView(LoginRequiredMixin, TemplateView):
def get_template_names(self):
project = get_object_or_404(Project, pk=self.kwargs['project_id'])
return [project.get_template_name()]
class ProjectsView(LoginRequiredMixin, CreateView):
form_class = ProjectForm
template_name = 'projects.html'
class DatasetView(SuperUserMixin, LoginRequiredMixin, ListView):
template_name = 'admin/dataset.html'
paginate_by = 5
def get_queryset(self):
project = get_object_or_404(Project, pk=self.kwargs['project_id'])
return project.documents.all()
class LabelView(SuperUserMixin, LoginRequiredMixin, TemplateView):
template_name = 'admin/label.html'
class StatsView(SuperUserMixin, LoginRequiredMixin, TemplateView):
template_name = 'admin/stats.html'
class GuidelineView(SuperUserMixin, LoginRequiredMixin, TemplateView):
template_name = 'admin/guideline.html'
class DataUpload(SuperUserMixin, LoginRequiredMixin, TemplateView):
template_name = 'admin/dataset_upload.html'
def post(self, request, *args, **kwargs):
project = get_object_or_404(Project, pk=kwargs.get('project_id'))
try:
#f= io.BufferedReader(io.BytesIO(request.FILES['csv_file'].file))
f= request.FILES['csv_file']
form_data = f.read().decode("utf-8")
#form_data = TextIOWrapper(request.FILES['csv_file'].file, encoding='utf-8',newline='\n')
#form_data=request.FILES['csv_file'].read().splitlines()
# for linevk in form_data :
# print(linevk)
# Document.objects.bulk_create([Document(
# text=line.strip(),
# project=project) for line in form_data])
Document.objects.bulk_create([Document(
text=form_data,
project=project)])
return HttpResponseRedirect(reverse('dataset', args=[project.id]))
except:
return HttpResponseRedirect(reverse('dataset-upload', args=[project.id]))
class DataDownload(SuperUserMixin, LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
project_id = self.kwargs['project_id']
project = get_object_or_404(Project, pk=project_id)
docs = project.get_documents(is_null=False).distinct()
filename = '_'.join(project.name.lower().split())
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="{}.csv"'.format(filename)
writer = csv.writer(response)
for d in docs:
writer.writerows(d.make_dataset())
return response
class DemoTextClassification(TemplateView):
template_name = 'demo/demo_text_classification.html'
class DemoNamedEntityRecognition(TemplateView):
template_name = 'demo/demo_named_entity.html'
class DemoTranslation(TemplateView):
template_name = 'demo/demo_translation.html'
|
import cv2 as cv
image = cv.imread() |
__version__ = "0.2"
# flake8: noqa
from .horizon import horizon, KM, MILES
from .sunpos import sunpos
from .tppss import (
above_horizon,
sunrise_sunset,
sunrise_sunset_details,
sunrise_sunset_year,
SunriseSunset,
times_in_day,
)
|
#!/usr/bin/env python3
import sys
import os
from inotify_simple import INotify, flags
SERVER_NAME = os.environ['server_name'].replace(' ', '_')
LOG_DIR = f'/srv/{SERVER_NAME}/logs'
LATEST_LOG = f'{LOG_DIR}/latest.log'
class McDirWatcher:
def __init__(self):
self.inotify = INotify()
watch_flags = flags.CREATE | flags.MODIFY
self.inotify.add_watch(LOG_DIR, watch_flags)
def events(self):
while True:
yield from self.inotify.read()
def close(self):
self.inotify.close()
class McLatestLog:
def __init__(self):
self.dir_watcher = McDirWatcher()
self.cur_log = open(LATEST_LOG)
self.cur_log.seek(0, 2) # Seek to end
def close(self):
self.cur_log.close()
self.dir_watcher.close()
def __enter__(self):
return self
def __exit__(self, _1, _2, _3):
self.close()
def __iter__(self):
return self.lines()
def lines(self):
# let's assume we don't read partial lines. lol
for event in self.dir_watcher.events():
if event.mask & flags.CREATE:
self.cur_log.close()
self.cur_log = open(LATEST_LOG)
if event.mask & flags.MODIFY:
yield from self.cur_log
def main():
print('Log watcher starting...', file=sys.stderr)
with McLatestLog() as log:
for line in log:
print(line, end='', flush=True)
if __name__ == '__main__':
main()
|
from discord.ext import commands
from src.config import OPERATOR_ROLE
class PlaybackCog(commands.Cog):
def __init__(self):
self.on_join_server = []
self.on_leave_server = []
def add_join_server_callback(self, callback):
self.on_join_server.append(callback)
def add_leave_server_callback(self, callback):
self.on_leave_server.append(callback)
@commands.has_role(OPERATOR_ROLE)
@commands.command(name='join', help='Join the voice channel the user is in')
async def join(self, ctx):
if not ctx.message.author.voice:
await ctx.send("{}, you aren't in a voice channel".format(ctx.message.author.name))
return
channel = ctx.message.author.voice.channel
await channel.connect()
for callback in self.on_join_server:
callback(ctx.guild)
@commands.has_role(OPERATOR_ROLE)
@commands.command(name='leave', help='Leaves the current voice channel')
async def leave(self, ctx):
voice_client = ctx.message.guild.voice_client
if voice_client.is_connected():
await voice_client.disconnect()
self.dispatch_leave_server(ctx.guild.id)
def dispatch_leave_server(self, guild_id):
for callback in self.on_leave_server:
callback(guild_id)
|
#!/usr/bin/python
import sys
import socket
import time
from random import randint
from rosbridge_library.util import json
# ##################### variables begin ########################################
# these parameters should be changed to match the actual environment #
# ##############################################################################
tcp_socket_timeout = 10 # seconds
max_msg_length = 20000 # bytes
rosbridge_ip = "localhost" # hostname or ip
rosbridge_port = 9090 # port as integer
service_type = "rosbridge_library/SendBytes" # make sure this matches an existing service type on rosbridge-server (in specified srv_module)
service_name = "send_bytes" # service name
send_fragment_size = 1000
# delay between sends to rosbridge is not needed anymore, if using my version of
# protocol (uses buffer to collect data from stream)
send_fragment_delay = 0.000 # 1
receive_fragment_size = 10
receive_message_intervall = 0.0
# ##################### variables end ##########################################
# ##################### service_calculation begin ##############################
# change this function to match whatever service should be provided #
# ##############################################################################
def calculate_service_response(request):
request_object = json.loads(request) # parse string for service request
args = request_object["args"] # get parameter field (args)
count = int(args["count"] ) # get parameter(s) as described in corresponding ROS srv-file
message = ""
# calculate service response
for i in range(0,count):
#message += str(chr(randint(32,126)))
message+= str(chr(randint(32,126)))
if i% 100000 == 0:
print(count - i, "bytes left to generate")
"""
IMPORTANT!
use base64 encoding to avoid JSON-parsing problems!
--> use .decode("base64","strict") at client side
"""
message = message.encode('base64','strict')
service_response_data = { "data": message} # service response (as defined in srv-file)
response_object = { "op": "service_response",
"id": request_object["id"],
"service": service_name,
"values": service_response_data # put service response in "data"-field of response object (in this case it's twice "data", because response value is also named data (in srv-file)
}
response_message = json.dumps(response_object)
return response_message
# ##################### service_calculation end ################################
# ##################### helper functions / and variables begin #################
# should not need to be changed (but could be improved ) #
# ##############################################################################
buffer = ""
def connect_tcp_socket():
tcp_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # connect to rosbridge
tcp_sock.settimeout(tcp_socket_timeout)
tcp_sock.connect((rosbridge_ip, rosbridge_port))
return tcp_sock
def advertise_service(): # advertise service
advertise_message_object = {"op":"advertise_service",
"type": service_type,
"service": service_name,
"fragment_size": receive_fragment_size,
"message_intervall": receive_message_intervall
}
advertise_message = json.dumps(advertise_message_object)
tcp_socket.send(str(advertise_message))
def unadvertise_service(): # unadvertise service
unadvertise_message_object = {"op":"unadvertise_service",
"service": service_name
}
unadvertise_message = json.dumps(unadvertise_message_object)
tcp_socket.send(str(unadvertise_message))
def wait_for_service_request(): # receive data from rosbridge
data = None
global buffer
try:
done = False
global buffer
while not done:
incoming = tcp_socket.recv(max_msg_length) # get data from socket
if incoming == '':
print("connection closed by peer")
sys.exit(1)
buffer = buffer + incoming # append data to buffer
try: # try to parse JSON from buffer
data_object = json.loads(buffer)
if data_object["op"] == "call_service":
data = buffer
done = True
return data # if parsing was successful --> return data string
except Exception:
#print "direct_access error:"
#print(e)
pass
#print "trying to defragment"
try: # opcode was not "call_service" -> try to defragment
result_string = buffer.split("}{") # split buffer into fragments and re-fill with curly brackets
result = []
for fragment in result_string:
if fragment[0] != "{":
fragment = "{"+fragment
if fragment[len(fragment)-1] != "}":
fragment = fragment + "}"
result.append(json.loads(fragment))
try: # try to defragment when received all fragments
fragment_count = len(result)
announced = int(result[0]["total"])
if fragment_count == announced:
reconstructed = ""
sorted_result = [None] * fragment_count # sort fragments..
unsorted_result = []
for fragment in result:
unsorted_result.append(fragment)
sorted_result[int(fragment["num"])] = fragment
for fragment in sorted_result: # reconstruct from fragments
reconstructed = reconstructed + fragment["data"]
#print "reconstructed", reconstructed
buffer = "" # empty buffer
done = True
print("reconstructed message from", len(result), "fragments")
#print reconstructed
return reconstructed
except Exception as e:
print("not possible to defragment:", buffer)
print(e)
except Exception as e:
print("defrag_error:", buffer)
print(e)
pass
except Exception:
#print "network-error(?):", e
pass
return data
def send_service_response(response): # send response to rosbridge
tcp_socket.send(response)
def list_of_fragments(full_message, fragment_size): # create fragment messages for a huge message
message_id = randint(0,64000) # generate random message id
fragments = [] # generate list of data fragments
cursor = 0
while cursor < len(full_message):
fragment_begin = cursor
if len(full_message) < cursor + fragment_size:
fragment_end = len(full_message)
cursor = len(full_message)
else:
fragment_end = cursor + fragment_size
cursor += fragment_size
fragment = full_message[fragment_begin:fragment_end]
fragments.append(fragment)
fragmented_messages_list = [] # generate list of fragmented messages (including headers)
if len(fragments) > 1:
for count, fragment in enumerate(fragments): # iterate through list and have index counter
fragmented_message_object = {"op":"fragment", # create Python-object for each fragment message
"id": str(message_id),
"data": str(fragment),
"num": count,
"total": len(fragments)
}
fragmented_message = json.dumps(fragmented_message_object) # create JSON-object from python-object for each fragment message
fragmented_messages_list.append(fragmented_message) # append JSON-object to list of fragmented messages
else: # if only 1 fragment --> do not send as fragment, but as service_response
fragmented_messages_list.append(str(fragment))
return fragmented_messages_list # return list of 'ready-to-send' fragmented messages
# ##################### helper functions end ###################################
# ##################### script begin ###########################################
# should not need to be changed (but could be improved ) #
# ##############################################################################
tcp_socket = connect_tcp_socket() # open tcp_socket
advertise_service() # advertise service in ROS (via rosbridge)
print("service provider started and waiting for requests")
try: # allows to catch KeyboardInterrupt
while True: # loop forever (or until ctrl-c is pressed)
data = None
try: # allows to catch any Exception (network, json, ..)
data = wait_for_service_request() # receive request from rosbridge
if data == '': # exit on empty string
break
elif data: # received service_request (or at least some data..)
response = calculate_service_response(data) # generate service_response
print("response calculated, now splitting into fragments..")
fragment_list = list_of_fragments(response, send_fragment_size) # generate fragments to send to rosbridge
print("sending", len(fragment_list), "messages as response")
for fragment in fragment_list:
#print "sending:" ,fragment
send_service_response(fragment) # send service_response to rosbridge (or fragments; just send any list entry)
time.sleep(send_fragment_delay) # (not needed if using patched rosbridge protocol.py)
except Exception as e:
print(e)
pass
except KeyboardInterrupt:
try:
unadvertise_service() # unadvertise service
tcp_socket.close() # close tcp_socket
except Exception as e:
print(e)
print("non-ros_service_server stopped because user pressed \"Ctrl-C\"")
|
from __future__ import absolute_import
from .vertexartist import *
from .edgeartist import *
from .faceartist import *
__all__ = [name for name in dir() if not name.startswith('_')]
|
# Alex Eidt
import tkinter as tk
import string
import imageio
import numpy as np
import numexpr as ne
import keyboard
from PIL import Image, ImageTk, ImageFont, ImageDraw
# Mirror image stream along vertical axis.
MIRROR = True
# Video Stream to use.
STREAM = '<video0>'
# Background color of the ASCII stream.
BACKGROUND_COLOR = 'white'
# Font color used in the ASCII stream. Make sure there's some contrast between the two.
FONT_COLOR = 'black'
# Font size to use with colored/grayscaled ASCII.
FONTSIZE = 12
# Boldness to use with colored/grayscaled ASCII.
BOLDNESS = 1
# Factor to divide image height and width by. 1 For for original size, 2 for half size, etc...
FACTOR = 1
# Characters to use in ASCII.
CHARS = "@%#*+=-:. "
# Font to use in ASCII Graphics.
FONT = 'cour.ttf'
COLOR = 1
ASCII = 0
FILTER = 0
BLOCKS = 0
TEXT = 0
MONO = 0
MIRROR = 1
def get_font_maps(fontsize, boldness, chars):
"""
Returns a list of font bitmaps.
Parameters
fontsize - Font size to use for ASCII characters
boldness - Stroke size to use when drawing ASCII characters
chars - ASCII characters to use in media
Returns
List of font bitmaps corresponding to the order of characters in CHARS
"""
fonts = []
widths, heights = set(), set()
font = ImageFont.truetype(FONT, size=fontsize)
for char in chars:
w, h = font.getsize(char)
widths.add(w)
heights.add(h)
image = Image.new("RGB", (w, h), (255, 255, 255))
draw = ImageDraw.Draw(image)
draw.text(
(0, - (fontsize // 6)),
char,
fill=(0, 0, 0),
font=font,
stroke_width=boldness
)
bitmap = np.array(image)[:, :, 0]
fonts.append(((255 - bitmap) / 255).astype(np.float32))
fonts = list(map(lambda x: x[:min(heights), :min(widths)], fonts))
return np.array(sorted(fonts, key=lambda x: x.sum(), reverse=True))
def update():
"""
Update settings based on user input.
"""
global COLOR, ASCII, FILTER, BLOCKS, TEXT, MONO
if keyboard.is_pressed('shift+g'): # Color/Grayscale Mode.
COLOR = 1
elif keyboard.is_pressed('g'):
COLOR = 0
if keyboard.is_pressed('shift+a'): # ASCII Mode.
ASCII = 0
elif keyboard.is_pressed('a'):
ASCII = 1
if keyboard.is_pressed('shift+t'): # Text Mode.
TEXT = 0
elif keyboard.is_pressed('t'):
TEXT = 1
if keyboard.is_pressed('shift+m'): # Monochromatic Mode.
MONO = 0
elif keyboard.is_pressed('m'):
MONO = 1
if keyboard.is_pressed('o'): # Outline Filter.
FILTER = 1
elif keyboard.is_pressed('s'): # Sobel Filter.
FILTER = 2
elif keyboard.is_pressed('space'): # No Filter.
FILTER = 0
for i in range(10):
if keyboard.is_pressed(str(i)):
BLOCKS = i
break
def tile_tuples(w, h):
"""
Return tile sizes for resizing ASCII Images.
"""
result = lambda x: [i for i in range(2, x) if x % i == 0]
return list(zip(result(w), result(h)))
def convolve(frame, kernel):
"""
Peform a 2D image convolution on the given frame with the given kernel.
"""
height, width = frame.shape
kernel_height, kernel_width = kernel.shape
# assert kh == kw
output = np.pad(frame, kernel_height // 2, mode='edge')
output_shape = kernel.shape + tuple(np.subtract(output.shape, kernel.shape) + 1)
strides = output.strides + output.strides
return np.einsum(
'ij,ijkl->kl',
kernel,
np.lib.stride_tricks.as_strided(output, output_shape, strides)
)
def main():
# All ASCII characters used in the images sorted by pixel density.
chars = np.array(list(''.join(c for c in string.printable if c in CHARS)))
font_maps = [get_font_maps(FONTSIZE, BOLDNESS, chars)]
for fontsize in [5, 10, 15, 20, 30, 45, 60, 85, 100]:
font_maps.append(get_font_maps(fontsize, BOLDNESS, chars))
# Set up window.
root = tk.Tk()
root.title('ASCII Streamer')
mainframe = tk.Frame()
image_label = tk.Label(mainframe, borderwidth=5, relief='solid')
ascii_label = tk.Label(mainframe, font=('courier', 2), fg=FONT_COLOR, bg=BACKGROUND_COLOR, borderwidth=5, relief='solid')
mainframe.pack(side=tk.LEFT, expand=tk.YES, padx=10)
root.protocol("WM_DELETE_WINDOW", lambda: (video.close(), root.destroy()))
# Get image stream from webcam or other source and begin streaming.
video = imageio.get_reader(STREAM)
w, h = video.get_meta_data()['source_size']
tiles = tile_tuples(w, h)
def stream():
image = video.get_next_data()
# Update settings based on pressed keys.
update()
h, w, c = image.shape
# Text image is larger than regular, so multiply scaling factor by 2 if Text mode is on.
size = FACTOR * 2 if TEXT else FACTOR
h //= size
w //= size
# Resize Image.
image = image[::size, ::size]
if not COLOR or TEXT: # Grayscale Image.
image = (image * np.array([0.299, 0.587, 0.114])).sum(axis=2, dtype=np.uint8)
if MIRROR: # Mirror Image along vertical axis.
image = image[:, ::-1]
# Tile Image into dw x dh blocks for resized ASCII streams.
if BLOCKS > 0 and TEXT:
dw, dh = tiles[min(BLOCKS, len(tiles) - 1)]
image = (np.add.reduceat(
np.add.reduceat(image.astype(np.int), np.arange(0, h, dh), axis=0),
np.arange(0, w, dw),
axis=1
) / (dw * dh)).astype(np.uint8)
h, w = image.shape
# Apply image convolutions to stream.
if FILTER > 0 and (not COLOR or TEXT):
if FILTER == 1: # Outline Kernel.
image = convolve(image, np.array([[-1, -1, -1], [-1, -8, -1], [-1, -1, -1]])).astype(np.uint8)
elif FILTER == 2: # Sobel Kernel.
gx = np.array([[1, 0, -1], [2, 0, -2], [1, 0, -1]])
gy = np.array([[1, 2, 1], [0, 0, 0], [-1, -2, -1]])
image = np.hypot(convolve(image, gx), convolve(image, gy)).astype(np.uint8)
if ASCII and not TEXT:
fh, fw = font_maps[BLOCKS][0].shape
frame = image[::fh, ::fw]
nh, nw = frame.shape[:2]
if not MONO:
colors = np.repeat(np.repeat(255 - frame, fw, axis=1), fh, axis=0)
if COLOR:
grayscaled = (frame * np.array([3, 4, 1])).sum(axis=2, dtype=np.uint32).ravel()
else:
grayscaled = frame.ravel().astype(np.uint32)
grayscaled *= len(chars)
grayscaled >>= 11 if COLOR else 8
# Create a new list with each font bitmap based on the grayscale value
grayscaled = grayscaled[range(len(grayscaled))]
image = font_maps[BLOCKS][grayscaled]
image = image.reshape((nh, nw, fh, fw)).transpose(0, 2, 1, 3).ravel()
if COLOR:
image = np.tile(image, 3).reshape((3, nh * fh, nw * fw)).transpose(1, 2, 0)
else:
image = image.reshape((nh * fh, nw * fw))
if MONO:
ne.evaluate('255 - (image * 255)', out=image)
image = image.astype(np.uint8)
else:
image = image[:h, :w]
colors = colors[:h, :w]
image = ne.evaluate('255 - image * colors').astype(np.uint8)
# If ASCII mode is on convert frame to ascii and display, otherwise display video stream.
if TEXT:
image = image[[i for i in range(h) if i % 4]]
image = image.astype(np.uint32)
image *= len(chars)
image >>= 8
image_label.pack_forget()
ascii_label.pack()
# Update label with new ASCII image.
ascii_label.config(
text='\n'.join(''.join(x) for x in chars[image]),
font=('courier', (BLOCKS * 4) + 2)
)
ascii_label.after(1, stream)
else:
ascii_label.pack_forget()
image_label.pack()
frame_image = ImageTk.PhotoImage(Image.fromarray(image))
image_label.config(image=frame_image)
image_label.image = frame_image
image_label.after(1, stream)
stream()
root.state('zoomed')
root.mainloop()
if __name__ == '__main__':
main() |
import aioredis
from aiohttp.web import Application
def setup_redis(app: Application):
app.on_startup.append(_init_redis)
app.on_shutdown.append(_close_redis)
async def _init_redis(app: Application):
conf = app['config']['redis']
redis = await aioredis.create_pool((conf['host'], conf['port']),
db=conf['db'])
app['redis'] = redis
async def _close_redis(app: Application):
app['redis'].close()
await app['redis'].wait_closed()
|
# Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import yaml
from tasklib import config
from tasklib.tests import base
@mock.patch('tasklib.task.os.path.exists')
class TestConfig(base.BaseUnitTest):
def test_default_config_when_no_file_exists(self, mexists):
mexists.return_value = False
conf = config.Config(config_file='/etc/tasklib/test.yaml')
self.assertEqual(conf.default_config, conf.config)
def test_default_when_no_file_provided(self, mexists):
conf = config.Config()
self.assertEqual(conf.default_config, conf.config)
def test_non_default_config_from_valid_yaml(self, mexists):
mexists.return_value = True
provided = {'library_dir': '/var/run/tasklib',
'puppet_manifest': 'init.pp'}
mopen = mock.mock_open(read_data=yaml.dump(provided))
with mock.patch('tasklib.config.open', mopen, create=True):
conf = config.Config(config_file='/etc/tasklib/test.yaml')
self.assertNotEqual(
conf.config['library_dir'], conf.default_config['library_dir'])
self.assertEqual(
conf.config['library_dir'], provided['library_dir'])
self.assertNotEqual(
conf.config['puppet_manifest'],
conf.default_config['puppet_manifest'])
self.assertEqual(
conf.config['puppet_manifest'], provided['puppet_manifest'])
|
# ------------------------------------------------------------------------------
# _dm.py
#
# Parser for the jam 'm' debug flag output - which contains details of
# timestamps, whether or not a file was updated, and gristed targets to file
# bindings.
#
# November 2015, Antony Wallace
# ------------------------------------------------------------------------------
"""jam -dm output parser"""
__all__ = ("DMParser",)
import re
import logging
import time
from datetime import datetime
from .. import database
from ._base import BaseParser
class DMParser(BaseParser):
"""
Parse the jam 'm' debug flag output from a logfile into the DB supplied at
initialisation.
.. attribute:: name
Name of this parser.
"""
_made_re = re.compile("made[+*]?\s+([a-z]+)\s+(.+)")
_time_re = re.compile("time\s+--\s+(.+):\s+(.+)")
_bind_re = re.compile("bind\s+--\s+(.+):\s+(.+)")
def __init__(self, db):
self.db = db
self.name = "jam -dm parser"
self.logger = logging.getLogger()
self.logger.setLevel(logging.INFO)
def parse_logfile(self, filename):
"""Open the supplied logfile and parse any '-dm' debug output into
the DB"""
# Open the file
try:
f = open(filename, errors="ignore")
except:
# The file cannot be opened.
print("Unable to open file %s" % filename)
raise
else:
# Read each line in from the logfile
for line in f:
self.parse_line(line)
f.close()
def parse_line(self, line):
"""Read the supplied line from a jam debug log file and parse it
for -dm debug to update the DB with."""
# The output we are interested in takes one of the following forms:
# make -- <target>
# time -- <target>:timestamp
# made [stable|update] <target>
# bind -- <target>:filename
#
# Call the parse functions for each of these in turn (apart from the
# 'make' line which is entirely uninteresting).
self.parse_time_line(line)
self.parse_made_line(line)
self.parse_bind_line(line)
def parse_time_line(self, line):
m = self._time_re.match(line)
if m:
target_name = m.group(1)
timestamp = m.group(2)
target = self.db.get_target(target_name)
# See `target_bind` in jam. A timestamp is output only for "exists"
# and not the other binding states.
if timestamp not in {"missing", "unbound", "parents"}:
dt = datetime.strptime(timestamp, "%a %b %d %H:%M:%S %Y")
target.set_timestamp(dt)
def parse_bind_line(self, line):
m = self._bind_re.match(line)
if m:
target_name = m.group(1)
bind_target = m.group(2)
target = self.db.get_target(target_name)
target.set_binding(bind_target)
def parse_made_line(self, line):
m = self._made_re.match(line)
if m:
fate_name = m.group(1)
target_name = m.group(2)
target = self.db.get_target(target_name)
fate = database.Fate(fate_name)
target.set_fate(fate)
|
from django.test import TestCase
# local test models
from models import Holder, Inner, InnerInline
from models import Holder2, Inner2, Holder3, Inner3
class TestInline(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
self.change_url = '/test_admin/admin/admin_inlines/holder/%i/' % holder.id
result = self.client.login(username='super', password='secret')
self.failUnlessEqual(result, True)
def tearDown(self):
self.client.logout()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
response = self.client.get(self.change_url)
inner_formset = response.context[-1]['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
inner = Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get('/test_admin/admin/admin_inlines/holder/%i/'
% holder.id)
self.assertContains(response, '<label>Inner readonly label:</label>')
class TestInlineMedia(TestCase):
fixtures = ['admin-views-users.xml']
def setUp(self):
result = self.client.login(username='super', password='secret')
self.failUnlessEqual(result, True)
def tearDown(self):
self.client.logout()
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = '/test_admin/admin/admin_inlines/holder/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = '/test_admin/admin/admin_inlines/holder3/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = '/test_admin/admin/admin_inlines/holder2/%i/' % holder.id
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
|
FILE_NAME = "[a-zA-Z0-9\-\_\.]+"
LEGAL_FILENAME_REGEX = '[a-zA-Z0-9 \._-]+$'
LEGAL_PATHNAME_REGEX = '[a-zA-Z0-9 \._-]+$'
LEGAL_FILENAME_URL_REGEX = LEGAL_FILENAME_REGEX[:-1]
LEGAL_PATHNAME_URL_REGEX = LEGAL_PATHNAME_REGEX[:-1]
UUID_URL_REGEX = '[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}'
HEX_REGEX = '[a-fA-F0-9]+$'
HASH_URL_REGEX = '[a-fA-F0-9]+' |
#coding:utf-8
#
# id: bugs.core_4806
# title: Regression: generators can be seen/modified by unprivileged users
# decription:
# We create sequence ('g') and three users and one role.
# First user ('big_brother') is granted to use generator directly.
# Second user ('bill_junior') is gratned to use generator via ROLE ('stockmgr').
# Third user ('maverick') has no grants to use neither on role nor on generator.
# Then we try to change value of generator by call gen_id(g,1) by create apropriate
# connections (for each of these users).
# First and second users must have ability both to change generator and to see its
# values using command 'SHOW SEQUENCE'.
# Also, we do additional check for second user: try to connect WITHOUT specifying role
# and see/change sequence. Error must be in this case (SQLSTATE = 28000).
# Third user must NOT see neither value of generator nor to change it (SQLSTATE = 28000).
#
# :::::::::::::::::::::::::::::::::::::::: NB ::::::::::::::::::::::::::::::::::::
# 18.08.2020. FB 4.x has incompatible behaviour with all previous versions since build 4.0.0.2131 (06-aug-2020):
# statement 'CREATE SEQUENCE <G>' will create generator with current value LESS FOR 1 then it was before.
# Thus, 'create sequence g;' followed by 'show sequence;' will output "current value: -1" (!!) rather than 0.
# See also CORE-6084 and its fix: https://github.com/FirebirdSQL/firebird/commit/23dc0c6297825b2e9006f4d5a2c488702091033d
# ::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
# This is considered as *expected* and is noted in doc/README.incompatibilities.3to4.txt
#
# Because of this, it was decided to filter out concrete values that are produced in 'SHOW SEQUENCE' command.
#
# Checked on:
# 4.0.0.2164
# 3.0.7.33356
#
# tracker_id: CORE-4806
# min_versions: ['3.0']
# versions: 3.0
# qmid: None
import pytest
from firebird.qa import db_factory, isql_act, Action, user_factory, User, role_factory, Role
# version: 3.0
# resources: None
substitutions_1 = [('-Effective user is.*', ''), ('current value.*', 'current value')]
init_script_1 = """"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
test_script_1 = """
set wng off;
recreate sequence g;
commit;
grant usage on sequence g to big_brother;
grant usage on sequence g to role stockmgr;
grant stockmgr to Bill_Junior;
commit;
show grants;
set list on;
connect '$(DSN)' user 'BIG_BROTHER' password '456';
select current_user, current_role from rdb$database;
show sequ g;
select gen_id(g, -111) as new_gen from rdb$database;
commit;
connect '$(DSN)' user 'BILL_JUNIOR' password '789' role 'STOCKMGR'; -- !! specify role in UPPER case !!
select current_user, current_role from rdb$database;
show sequ g;
select gen_id(g, -222) as new_gen from rdb$database;
commit;
connect '$(DSN)' user 'BILL_JUNIOR' password '789';
select current_user, current_role from rdb$database;
-- 'show sequ' should produce error:
-- Statement failed, SQLSTATE = 28000
-- no permission for USAGE access to GENERATOR G
-- There is no generator G in this database
-- (for user 'Bill_Junior' who connects w/o ROLE and thus has NO rights to see that sequence)
show sequ g;
-- 'select gen_id(...)' should produce error:
-- Statement failed, SQLSTATE = 28000
-- no permission for USAGE access to GENERATOR G
-- (for user 'Bill_Junior' who connects w/o ROLE and thus has NO rights to see that sequence)
select gen_id(g, -333) as new_gen from rdb$database;
commit;
connect '$(DSN)' user 'MAVERICK' password '123';
select current_user, current_role from rdb$database;
-- 'show sequ' should produce error:
-- Statement failed, SQLSTATE = 28000
-- no permission for USAGE access to GENERATOR G
-- There is no generator G in this database
-- (for user 'maverick' who has NO rights at all)
show sequ g;
-- 'select gen_id(...)' should produce error:
-- Statement failed, SQLSTATE = 28000
-- no permission for USAGE access to GENERATOR G
-- (for user 'maverick' who has NO rights at all)
select gen_id(g, -444) as new_gen from rdb$database;
commit;
"""
act_1 = isql_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
/* Grant permissions for this database */
GRANT STOCKMGR TO BILL_JUNIOR
GRANT USAGE ON SEQUENCE G TO USER BIG_BROTHER
GRANT USAGE ON SEQUENCE G TO ROLE STOCKMGR
USER BIG_BROTHER
ROLE NONE
Generator G, current value: 0, initial value: 0, increment: 1
NEW_GEN -111
USER BILL_JUNIOR
ROLE STOCKMGR
Generator G, current value: -111, initial value: 0, increment: 1
NEW_GEN -333
USER BILL_JUNIOR
ROLE NONE
USER MAVERICK
ROLE NONE
"""
expected_stderr_1 = """
Statement failed, SQLSTATE = 28000
no permission for USAGE access to GENERATOR G
There is no generator G in this database
Statement failed, SQLSTATE = 28000
no permission for USAGE access to GENERATOR G
Statement failed, SQLSTATE = 28000
no permission for USAGE access to GENERATOR G
There is no generator G in this database
Statement failed, SQLSTATE = 28000
no permission for USAGE access to GENERATOR G
"""
user_1a = user_factory('db_1', name='Maverick', password='123')
user_1b = user_factory('db_1', name='Big_Brother', password='456')
user_1c = user_factory('db_1', name='Bill_Junior', password='789')
role_1 = role_factory('db_1', name='stockmgr')
@pytest.mark.version('>=3.0')
def test_1(act_1: Action, user_1a: User, user_1b: User, user_1c: User, role_1: Role):
act_1.expected_stdout = expected_stdout_1
act_1.expected_stderr = expected_stderr_1
act_1.execute()
assert act_1.clean_stderr == act_1.clean_expected_stderr
assert act_1.clean_stdout == act_1.clean_expected_stdout
|
import requests
import json
import urllib3
import socket
import time
from rmf_door_msgs.msg import DoorMode
class DoorClientAPI:
def __init__(self,url,api_key,api_value,door_id):
self.url = url
self.header = {api_key:api_value}
self.data = {"id": door_id}
count = 0
self.connected = True
while not self.check_connection():
if count >= 5:
print("Unable to connect to door client API.")
self.connected = False
break
else:
print("Unable to connect to door client API. Attempting to reconnect...")
count += 1
time.sleep(1)
def check_connection(self):
''' Return True if connection to the door API server is successful'''
## ------------------------ ##
## IMPLEMENT YOUR CODE HERE ##
## ------------------------ ##
return False
def open_door(self):
''' Return True if the door API server is successful receive open door command'''
## ------------------------ ##
## IMPLEMENT YOUR CODE HERE ##
## ------------------------ ##
return False
def close_door(self):
''' Return True if the door API server is successful receive open door command'''
## ------------------------ ##
## IMPLEMENT YOUR CODE HERE ##
## ------------------------ ##
return False
def get_mode(self):
''' Return the door status with reference rmf_door_msgs.
Return DoorMode.MODE_CLOSED when door status is closed.
Return DoorMode.MODE_MOVING when door status is moving.
Return DoorMode.MODE_OPEN when door status is open.
Return DoorMode.MODE_OFFLINE when door status is offline.
Return DoorMode.MODE_UNKNOWN when door status is unknown'''
## ------------------------ ##
## IMPLEMENT YOUR CODE HERE ##
## ------------------------ ##
return False
|
import imageio
import glob
import os
images = []
for filename in glob.glob(os.path.join(r"C:\Users\emrea\Desktop\'22 codes\deep-ternaloc\deep-ternaloc\Studies\emre\data\plt_files",'*.png')):
images.append(imageio.imread(filename))
imageio.mimsave('movie6eniyi.gif', images,duration=1)
|
import bpy
from bpy.types import (
AddonPreferences,
Curve,
Material,
Menu,
Object,
Operator,
Panel,
PropertyGroup,
UIList
)
from bpy.props import (
BoolProperty,
CollectionProperty,
EnumProperty,
FloatProperty,
FloatVectorProperty,
IntProperty,
PointerProperty,
StringProperty
)
from mathutils import Vector
from random import random
from math import (radians, floor)
from contextlib import redirect_stdout
import io
from . import border
# HACK reload
import importlib
border = importlib.reload(border)
# Properties
class SCENE_PG_BorderSettings(PropertyGroup):
bl_idname = 'SCENE_PG_BorderSettings'
flip: BoolProperty(
name='Flip?',
description='Whether or not this border is flipped from its default orientation'
)
def validate_curve(self, curve):
return curve is not None and border.is_valid_curve(curve)
def validate_deformable_curve(self, deformable):
return deformable is not None and border.is_valid_curve(deformable) and len(deformable.splines[0].bezier_points) == 2
curve: PointerProperty(
type=Curve,
name='Curve',
description='2D curve defining shape of border.',
poll=validate_curve
)
deformable: PointerProperty(
type=Curve,
name='Deformable',
description='2D curve with only 4 control points defining how border should be deformed along other adjacent borders',
poll=validate_deformable_curve
)
def draw_border(layout, border, name):
layout.ui_units_x = 0.8
row = layout.row(align=True)
row = row.split(factor=0.23, align=True)
row.label(text=name + ':')
row = row.split(factor=0.4, align=True)
row.prop(border, 'deformable', text='')
row = row.split(factor=0.66, align=True)
row.prop(border, 'curve', text='')
row.prop(border, 'flip')
def generate_surface_screw(context):
scene = context.scene
settings = scene.trackmania_surface_settings
print('generating screw surface')
pass
def create_pivot(context, name):
obj = bpy.data.objects.new(name, None)
obj.empty_display_type = 'PLAIN_AXES'
obj.trackmania_pivot.is_pivot = True
context.scene.collection.objects.link(obj)
return obj
def ease(x):
return 4 * pow(x, 3) if x < 0.5 else 1 - pow(-2 * x + 2, 3) / 2
def generate_surface_connector(context):
scene = context.scene
settings = scene.trackmania_surface_settings
object = settings.surface
# East border
if settings.border_0.deformable is None:
return 'East border is not set'
if not border.is_valid_curve(settings.border_0.deformable):
return 'East border is invalid'
east_border = border.Border.from_curve(settings.border_0.deformable, settings.border_0.flip)
# West border
if settings.border_1.deformable is None:
return 'West border is not set'
if not border.is_valid_curve(settings.border_1.deformable):
return 'West border is invalid'
west_border = border.Border.from_curve(settings.border_1.deformable, settings.border_1.flip)
# East & West points
north_south_same = (settings.border_2.deformable == settings.border_3.deformable) and (settings.border_2.flip == settings.border_3.flip)
east_west_subdivisions = settings.grid_subdivisions_flat if (east_border.is_flat and west_border.is_flat and north_south_same) else settings.grid_subdivisions_semi_flat if east_border.is_flat and west_border.is_flat else settings.grid_subdivisions_curved
east_points = east_border.sample(east_west_subdivisions, settings.bezier_precision)
west_points = west_border.sample(east_west_subdivisions, settings.bezier_precision)
east_offsets = [Vector((0, 0)) for point in east_points]
west_offsets = [Vector((0, 0)) for point in west_points]
if settings.border_0.curve is not None:
real_east_border = border.Border.from_curve(settings.border_0.curve, settings.border_0.flip)
real_east_points = real_east_border.sample(east_west_subdivisions, settings.bezier_precision)
east_offsets = [real_east_point - east_point for real_east_point, east_point in zip(real_east_points, east_points)]
if settings.border_1.curve is not None:
real_west_border = border.Border.from_curve(settings.border_1.curve, settings.border_1.flip)
real_west_points = real_west_border.sample(east_west_subdivisions, settings.bezier_precision)
west_offsets = [real_west_point - west_point for real_west_point, west_point in zip(real_west_points, west_points)]
m = len(east_points)
# North border
if settings.border_2.deformable is None:
return 'North border is not set'
if not border.is_valid_curve(settings.border_2.deformable):
return 'North border is invalid'
north_border = border.Border.from_curve(settings.border_2.deformable, settings.border_2.flip)
# South border
if settings.border_3.deformable is None:
return 'South border is not set'
if not border.is_valid_curve(settings.border_3.deformable):
return 'South border is invalid'
south_border = border.Border.from_curve(settings.border_3.deformable, settings.border_3.flip)
# North & South points
east_west_same = (settings.border_0.deformable == settings.border_1.deformable) and (settings.border_0.flip == settings.border_1.flip)
north_south_subdivisions = settings.grid_subdivisions_flat if (north_border.is_flat and south_border.is_flat and east_west_same) else settings.grid_subdivisions_semi_flat if north_border.is_flat and south_border.is_flat else settings.grid_subdivisions_curved
north_points = north_border.sample(north_south_subdivisions, settings.bezier_precision)
south_points = south_border.sample(north_south_subdivisions, settings.bezier_precision)
north_offsets = [Vector((0, 0)) for point in north_points]
south_offsets = [Vector((0, 0)) for point in south_points]
if settings.border_2.curve is not None:
real_north_border = border.Border.from_curve(settings.border_2.curve, settings.border_2.flip)
real_north_points = real_north_border.sample(north_south_subdivisions, settings.bezier_precision)
north_offsets = [real_north_point - north_point for real_north_point, north_point in zip(real_north_points, north_points)]
if settings.border_3.curve is not None:
real_south_border = border.Border.from_curve(settings.border_3.curve, settings.border_3.flip)
real_south_points = real_south_border.sample(north_south_subdivisions, settings.bezier_precision)
south_offsets = [real_south_point - south_point for real_south_point, south_point in zip(real_south_points, south_points)]
n = len(north_points)
# Validate borders join
epsilon = 0.001
x_diff = abs(east_border.size.x - west_border.size.x)
if x_diff > epsilon:
return 'East and West borders have different length: {}'.format(x_diff)
y_diff = abs(north_border.size.x - south_border.size.x)
if y_diff > epsilon:
return 'North and South borders have different length: {}'.format(y_diff)
z_diff = abs(east_border.size.y - west_border.size.y + north_border.size.y - south_border.size.y)
if z_diff > epsilon:
return 'Borders cannot join in altitude: {}'.format(z_diff)
# Create Mesh
'''
top face: 0 -> m*n
bottom face: m*n -> 2*m*n
east face: 2*m*n -> 2*m*n+2*m
west face: 2*m*n+2*m -> 2*m*n+4*m
north face: 2*m*n+4*m -> 2*m*n+4*m+2*n
north face: 2*m*n+4*m+2*n -> 2*m*n+4*m+4*n
'''
## Top / Bottom borders
vertices = []
tmp_vertices = []
faces = []
tmp_faces = []
for i, east_point in enumerate(east_points):
west_point = west_points[i]
blend_factor = i / (m - 1)
blend_factor = ease(blend_factor)
#blend_factor = ease(blend_factor)
north_border_cpy = north_border.resized(west_point.y + west_offsets[i].y + south_border.size.y - east_point.y - east_offsets[i].y)
south_border_cpy = south_border.resized(west_point.y + west_offsets[i].y + south_border.size.y - east_point.y - east_offsets[i].y)
global_offset = Vector((0, east_point.y)) + east_offsets[i]
'''
cc1 = north_border_cpy.to_curve()
oo1 = context.blend_data.objects.new('Curve', cc1)
context.scene.collection.objects.link(oo1)
oo1.location = Vector((east_point.x,0.0,east_point.y+settings.height))
oo1.rotation_euler.z = radians(90)
cc2 = south_border_cpy.to_curve()
oo2 = context.blend_data.objects.new('Curve', cc2)
context.scene.collection.objects.link(oo2)
oo2.location = Vector((east_point.x,0.0,east_point.y+settings.height))
oo2.rotation_euler.z = radians(90)
'''
north_points_cpy = [point + global_offset + offset for point, offset in zip(north_border_cpy.sample(north_south_subdivisions, settings.bezier_precision), north_offsets)]
south_points_cpy = [point + global_offset + offset for point, offset in zip(south_border_cpy.sample(north_south_subdivisions, settings.bezier_precision), south_offsets)]
vertices.extend([Vector((
east_point.x,
blend_factor * north_point.x + (1 - blend_factor) * south_point.x,
blend_factor * north_point.y + (1 - blend_factor) * south_point.y + settings.height
)) for north_point, south_point in zip(north_points_cpy, south_points_cpy)])
tmp_vertices.extend([Vector((
east_point.x,
blend_factor * north_point.x + (1 - blend_factor) * south_point.x,
blend_factor * north_point.y + (1 - blend_factor) * south_point.y
)) for north_point, south_point in zip(north_points_cpy, south_points_cpy)])
if i != 0:
faces.extend([((i-1)*n+j,i*n+j,i*n+j+1,(i-1)*n+j+1) for j in range(n-1)])
tmp_faces.extend([(m*n+(i-1)*n+j,m*n+(i-1)*n+j+1,m*n+i*n+j+1,m*n+i*n+j) for j in range(n-1)])
for j, south_point in enumerate(south_points):
north_point = north_points[j]
blend_factor = ease(j / (n - 1))
west_border_cpy = west_border.resized(north_point.y + north_offsets[j].y + east_border.size.y - south_point.y - south_offsets[j].y)
east_border_cpy = east_border.resized(north_point.y + north_offsets[j].y + east_border.size.y - south_point.y - south_offsets[j].y)
global_offset = Vector((0, south_point.y)) + south_offsets[j]
'''
cc1 = east_border_cpy.to_curve()
oo1 = context.blend_data.objects.new('Curve', cc1)
context.scene.collection.objects.link(oo1)
oo1.location = Vector((east_point.x,0.0,east_point.y+settings.height))
oo1.rotation_euler.z = radians(90)
cc2 = west_border_cpy.to_curve()
oo2 = context.blend_data.objects.new('Curve', cc2)
context.scene.collection.objects.link(oo2)
oo2.location = Vector((0.0,south_point.x,south_point.y+settings.height))
'''
east_points_cpy = [point + global_offset + offset for point, offset in zip(east_border_cpy.sample(east_west_subdivisions, settings.bezier_precision), east_offsets)]
west_points_cpy = [point + global_offset + offset for point, offset in zip(west_border_cpy.sample(east_west_subdivisions, settings.bezier_precision), west_offsets)]
for i, east_point in enumerate(east_points_cpy):
west_point = west_points_cpy[i]
vertices[i*n+j] = 0.5 * (vertices[i*n+j] + Vector((
blend_factor * west_point.x + (1 - blend_factor) * east_point.x,
south_point.x,
blend_factor * west_point.y + (1 - blend_factor) * east_point.y + settings.height
)))
tmp_vertices[i*n+j] = 0.5 * (tmp_vertices[i*n+j] + Vector((
blend_factor * west_point.x + (1 - blend_factor) * east_point.x,
south_point.x,
blend_factor * west_point.y + (1 - blend_factor) * east_point.y
)))
vertices.extend(tmp_vertices)
faces.extend(tmp_faces)
v = 2*m*n
## East / West borders
vertices.extend([Vector((
east_point.x,
0.0,
east_point.y + settings.height
)) for east_point in east_points])
vertices.extend([Vector((
east_point.x,
0.0,
east_point.y
)) for east_point in east_points])
faces.extend([(v+i, v+m+i, v+m+i+1, v+i+1) for i in range(m-1)])
v += 2*m
vertices.extend([Vector((
west_point.x,
north_border.length,
west_point.y + south_border.height + settings.height
)) for west_point in west_points])
vertices.extend([Vector((
west_point.x,
north_border.length,
west_point.y + south_border.height
)) for west_point in west_points])
faces.extend([(v+i, v+i+1, v+m+i+1, v+m+i) for i in range(m-1)])
v += 2*m
## South / North borders
vertices.extend([Vector((
0.0,
south_point.x,
south_point.y + settings.height
)) for south_point in south_points])
vertices.extend([Vector((
0.0,
south_point.x,
south_point.y
)) for south_point in south_points])
faces.extend([(v+j, v+j+1, v+n+j+1, v+n+j) for j in range(n-1)])
v += 2*n
vertices.extend([Vector((
east_border.length,
north_point.x,
north_point.y + east_border.height + settings.height
)) for north_point in north_points])
vertices.extend([Vector((
east_border.length,
north_point.x,
north_point.y + east_border.height
)) for north_point in north_points])
faces.extend([(v+j, v+n+j, v+n+j+1, v+j+1) for j in range(n-1)])
v += 2*n
## Offset Z so lowest corner at 0
z_corners = [0, east_border.size.y, east_border.size.y + north_border.size.y, south_border.size.y]
offset = Vector((0, 0, -min(z_corners)))
for vertice in vertices: vertice += offset
# Replace current surface mesh, materials, and normals
mesh = bpy.data.meshes.new('Surface')
mesh.from_pydata(vertices, [], faces)
if settings.top_material is None:
settings.top_material = bpy.data.materials.new('TopMaterial')
mesh.materials.append(settings.top_material)
if settings.bottom_material is None:
settings.bottom_material = bpy.data.materials.new('BottomMaterial')
mesh.materials.append(settings.bottom_material)
if settings.side_material is None:
settings.side_material = bpy.data.materials.new('SideMaterial')
mesh.materials.append(settings.side_material)
old_mesh = object.data
object.data = mesh
bpy.data.meshes.remove(old_mesh)
for f, face in enumerate(mesh.polygons):
face.use_smooth = True
if f < (m-1)*(n-1):
face.material_index = 0
elif f < 2*(m-1)*(n-1):
face.material_index = 1
else:
face.material_index = 2
# Set Pivots
if settings.pivot_0 is None or settings.pivot_0.name not in context.scene.collection.objects:
if settings.pivot_0 is not None:
context.blend_data.objects.remove(settings.pivot_0)
settings.pivot_0 = create_pivot(context, 'Pivot_0')
settings.pivot_0.location = object.matrix_world @ mesh.vertices[m*n].co
if settings.pivot_1 is None or settings.pivot_1.name not in context.scene.collection.objects:
if settings.pivot_1 is not None:
context.blend_data.objects.remove(settings.pivot_1)
settings.pivot_1 = create_pivot(context, 'Pivot_1')
settings.pivot_1.location = object.matrix_world @ mesh.vertices[m*n+n-1].co
if settings.pivot_2 is None or settings.pivot_2.name not in context.scene.collection.objects:
if settings.pivot_2 is not None:
context.blend_data.objects.remove(settings.pivot_2)
settings.pivot_2 = create_pivot(context, 'Pivot_2')
settings.pivot_2.location = object.matrix_world @ mesh.vertices[m*n+(m-1)*n].co
if settings.pivot_3 is None or settings.pivot_3.name not in context.scene.collection.objects:
if settings.pivot_3 is not None:
context.blend_data.objects.remove(settings.pivot_3)
settings.pivot_3 = create_pivot(context, 'Pivot_3')
settings.pivot_3.location = object.matrix_world @ mesh.vertices[m*n+m*n-1].co
# Set BaseMaterial's UVs
uv_base_material = mesh.uv_layers.new(name='BaseMaterial')
mesh.uv_layers.active = uv_base_material
for f, face in enumerate(mesh.polygons):
if f < 2*(m-1)*(n-1): # Top or Bottom
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
uv_base_material.data[loop_id].uv = (vertex.co[0] / 32, vertex.co[1] / 32)
elif f < 2*(m-1)*(n-1)+(m-1): # East
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
z = 0 if vertex_id >= 2*m*n+m else settings.height
uv_base_material.data[loop_id].uv = (vertex.co[0] / 32, z / 32)
elif f < 2*(m-1)*(n-1)+2*(m-1): # West
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
z = 0 if vertex_id >= 2*m*n+3*m else settings.height
uv_base_material.data[loop_id].uv = (vertex.co[0] / 32, z / 32)
elif f < 2*(m-1)*(n-1)+2*(m-1)+(n-1): # South
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
z = 0 if vertex_id >= 2*m*n+4*m+n else settings.height
uv_base_material.data[loop_id].uv = (vertex.co[1] / 32, z / 32)
else: # North
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
z = 0 if vertex_id >= 2*m*n+4*m+3*n else settings.height
uv_base_material.data[loop_id].uv = (vertex.co[1] / 32, z / 32)
# Set Normals & Lightmap's UV
uv_lightmap = mesh.uv_layers.new(name='Lightmap')
mesh.uv_layers.active = uv_lightmap
margin = settings.lightmap_margin / 100 * 0.25 / 2
for f, face in enumerate(mesh.polygons):
if f < (m-1)*(n-1): # Top
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
x = margin + (vertex.co[0] / east_border.length) * (0.5 - 2 * margin)
y = margin + (vertex.co[1] / north_border.length) * (0.5 - 2 * margin)
uv_lightmap.data[loop_id].uv = (x, y)
elif f < 2*(m-1)*(n-1): # Bottom
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
x = margin + (vertex.co[0] / east_border.length) * (0.5 - 2 * margin)
y = 0.5 + margin + (vertex.co[1] / north_border.length) * (0.5 - 2 * margin)
uv_lightmap.data[loop_id].uv = (x, y)
elif f < 2*(m-1)*(n-1)+(m-1): # East
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
x = 0.5 + margin if vertex_id >= 2*m*n+m else 0.75 - margin
y = 0.0 + margin + (vertex.co[0] / east_border.length) * (0.5 - 2 * margin)
uv_lightmap.data[loop_id].uv = (x, y)
elif f < 2*(m-1)*(n-1)+2*(m-1): # West
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
x = 0.75 + margin if vertex_id >= 2*m*n+3*m else 1.0 - margin
y = 0.0 + margin + (vertex.co[0] / east_border.length) * (0.5 - 2 * margin)
uv_lightmap.data[loop_id].uv = (x, y)
elif f < 2*(m-1)*(n-1)+2*(m-1)+(n-1): # South
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
x = 0.5 + margin if vertex_id >= 2*m*n+4*m+n else 0.75 - margin
y = 0.5 + margin + (vertex.co[1] / north_border.length) * (0.5 - 2 * margin)
uv_lightmap.data[loop_id].uv = (x, y)
else: # North
for vertex_id, loop_id in zip(face.vertices, face.loop_indices):
vertex = mesh.vertices[vertex_id]
x = 0.75 + margin if vertex_id >= 2*m*n+4*m+3*n else 1.0 - margin
y = 0.5 + margin + (vertex.co[1] / north_border.length) * (0.5 - 2 * margin)
uv_lightmap.data[loop_id].uv = (x, y)
# Set Item settings
scene.trackmania_item.ghost_mode = True
scene.trackmania_item.fly_step = 8
scene.trackmania_item.grid_horizontal_step = 32
scene.trackmania_item.grid_vertical_step = 8
def generate_surface(context):
scene = context.scene
settings = scene.trackmania_surface_settings
if settings.surface is None or scene.collection.objects.get(settings.surface.name) is None:
mesh = context.blend_data.meshes.new(scene.name)
object = context.blend_data.objects.new(scene.name, mesh)
scene.collection.objects.link(object)
settings.surface = object
if settings.surface_type == 'SCREW':
return generate_surface_screw(context)
elif settings.surface_type == 'CONNECTOR':
return generate_surface_connector(context)
return 'Surface type is None'
class SCENE_PG_SurfaceSettings(PropertyGroup):
bl_idname = 'SCENE_PG_SurfaceSettings'
def __init__(self):
super().__init__()
for _ in range(4):
self.borders.add()
def update(self, context):
if self.enable_continuous_update:
generate_surface(context)
enable_continuous_update: BoolProperty(
name='Enable Continuous Update',
description='When set, will update shape whenever one of its attributes is changed',
default=False,
update=update
)
# Common properties
surface_type: EnumProperty(
name='Type',
description='How the surface is generated',
items=(
('NONE', 'None', 'No surface generated'),
('SCREW', 'Screw', 'Translate and/or rotate a border'),
('CONNECTOR', 'Connector', 'Connect 4 borders')),
update=update
)
grid_subdivisions_flat: IntProperty(
name='Flat Subdivisions',
description='Number of subdivisions per grid unit when surface is flat (TM vanilla uses 4)',
min=1,
soft_max=16,
default=4,
update=update
)
grid_subdivisions_semi_flat: IntProperty(
name='Semi-flat Subdivisions',
description='Number of subdivisions per grid unit when borders along this direction are flat but between differently shaped borders',
min=1,
soft_max=32,
default=4,
update=update
)
grid_subdivisions_curved: IntProperty(
name='Curved Subdivisions',
description='Number of subdivisions per grid unit when surface is curved (TM vanilla uses 32)',
min=1,
soft_max=64,
default=32,
update=update
)
height: FloatProperty(
name='Height',
description='How high surface is compared to ground',
min=0,
max=8,
default=2,
update=update
)
bezier_precision: IntProperty(
name='Bezier Precision',
description='How many time bezier curved is sambled before surface is projected on it',
min=0,
soft_max=256,
default=128,
update=update
)
lightmap_margin: FloatProperty(
name='Lightmap Margin (%)',
description='Island margin for Lightmap UV unwrapping',
min=0,
max=100,
default=10,
update=update
)
top_material: PointerProperty(
type=Material,
name='Top Material',
description='Material of generated top surface'
)
bottom_material: PointerProperty(
type=Material,
name='Bottom Material',
description='Material of generated bottom surface'
)
side_material: PointerProperty(
type=Material,
name='Side Material',
description='Material of generated side surfaces'
)
border_0: PointerProperty(
type=SCENE_PG_BorderSettings,
name='Border 0',
description='Border 0 of generated surface',
update=update
)
border_1: PointerProperty(
type=SCENE_PG_BorderSettings,
name='Border 1',
description='Border 1 of generated surface',
update=update
)
border_2: PointerProperty(
type=SCENE_PG_BorderSettings,
name='Border 2',
description='Border 2 of generated surface',
update=update
)
border_3: PointerProperty(
type=SCENE_PG_BorderSettings,
name='Border 3',
description='Border 3 of generated surface',
update=update
)
surface: PointerProperty(type=Object)
pivot_0: PointerProperty(type=Object)
pivot_1: PointerProperty(type=Object)
pivot_2: PointerProperty(type=Object)
pivot_3: PointerProperty(type=Object)
# Screw properties
# Connector properties
preserve_tangents: BoolProperty(
name='Preserve Tangents',
description='Whether or not border tangents must be preserved when ran across surface. Both opposit borders must have same tangents at their extremities for it to work'
)
class SCENE_PG_MultiSurfaceMaterial(PropertyGroup):
bl_idname = 'SCENE_PG_MultiSurfaceMaterial'
material: PointerProperty(
type=Material
)
export_name: StringProperty(name='Export Name')
class VIEW3D_OT_MultiSurfaceBordersEdit(Operator):
bl_idname = 'scene.tm_multi_surface_borders_edit'
bl_label = 'Multi Surface - Edit Borders'
bl_options = {'REGISTER'}
action: EnumProperty(
items=(
('UP', 'Up', ''),
('DOWN', 'Down', ''),
('REMOVE', 'Remove', ''),
('ADD', 'Add', '')
)
)
@classmethod
def poll(cls, context):
return context.object is None or context.object.mode == 'OBJECT'
def invoke(self, context, event):
scene = context.scene
settings = scene.trackmania_multi_surface
index = settings.selected_border
try:
item = settings.borders[index]
except IndexError:
pass
else:
if self.action == 'DOWN' and index < len(settings.borders) - 1:
settings.borders.move(index, index+1)
settings.selected_border += 1
elif self.action == 'UP' and index >= 1:
settings.borders.move(index, index-1)
settings.selected_border -= 1
elif self.action == 'REMOVE':
settings.selected_border -= 1
settings.borders.remove(index)
if self.action == 'ADD':
settings.borders.add()
return {'FINISHED'}
class VIEW3D_UL_MultiSurfaceBorderItems(UIList):
bl_idname = 'VIEW3D_UL_MultiSurfaceBorderItems'
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
row = layout.row()
row.prop(item, 'deformable')
row.prop(item, 'curve')
row.prop(item, 'flip')
def invoke(self, context, event):
pass
class VIEW3D_OT_MultiSurfaceMaterialsEdit(Operator):
bl_idname = 'scene.tm_multi_surface_materials_edit'
bl_label = 'Multi Surface - Edit Materials'
bl_options = {'REGISTER'}
action: EnumProperty(
items=(
('UP', 'Up', ''),
('DOWN', 'Down', ''),
('REMOVE', 'Remove', ''),
('ADD', 'Add', '')
)
)
@classmethod
def poll(cls, context):
return context.object is None or context.object.mode == 'OBJECT'
def invoke(self, context, event):
scene = context.scene
settings = scene.trackmania_multi_surface
index = settings.selected_material
try:
item = settings.materials[index]
except IndexError:
pass
else:
if self.action == 'DOWN' and index < len(settings.materials) - 1:
settings.materials.move(index, index+1)
settings.selected_material += 1
elif self.action == 'UP' and index >= 1:
settings.materials.move(index, index-1)
settings.selected_material -= 1
elif self.action == 'REMOVE':
settings.selected_material -= 1
settings.materials.remove(index)
if self.action == 'ADD':
settings.materials.add()
return {'FINISHED'}
class VIEW3D_UL_MultiSurfaceMaterialItems(UIList):
bl_idname = 'VIEW3D_UL_MultiSurfaceMaterialItems'
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index):
row = layout.row()
row.prop(item, 'material', text='')
row.prop(item, 'export_name', text='Name')
def invoke(self, context, event):
pass
class SCENE_PG_MultiSurfaceSettings(PropertyGroup):
bl_idname = 'SCENE_PG_MultiSurfaceSettings'
borders: CollectionProperty(
type=SCENE_PG_BorderSettings,
name='Borders'
)
selected_border: IntProperty()
materials: CollectionProperty(
type=SCENE_PG_MultiSurfaceMaterial
)
selected_material: IntProperty()
simplify_threshold: FloatProperty(
name='Simplify Threshold',
description='Deviation angle within witch 2 adjacent faces are considered co-planar',
min=0,
max=180,
default=1
)
class VIEW3D_OT_MultiSurfaceGenerate(Operator):
bl_idname = 'scene.tm_multi_surface_generate'
bl_label = 'Generate'
bl_options = {'REGISTER'}
@classmethod
def poll(cls, context):
return context.object is None or context.object.mode == 'OBJECT'
def execute(self, context):
scene = context.scene
settings = scene.trackmania_surface_settings
borders = scene.trackmania_multi_surface.borders
materials = scene.trackmania_multi_surface.materials
simplify_threshold = radians(scene.trackmania_multi_surface.simplify_threshold)
eps = 0.001
count = 0
min_index = 100
stdout_dump = io.StringIO()
material_count = len(materials)
border_count = len(borders)
max_count = material_count * pow(border_count, 4)
raw_vertices = 0
raw_edges = 0
raw_polygons = 0
simplified_vertices = 0
simplified_edges = 0
simplified_polygons = 0
scene_name = scene.name
for m in range(len(materials)):
material = materials[m]
if material.material is None:
continue
material_name = material.export_name if material.export_name else material.material.name
settings.top_material = material.material
base_path = scene_name + '/' + material_name + '/'
index0 = 100
prev0 = -1
for c0 in range(len(borders)):
index1= 100
prev1 = -1
## TMP ##
base_path = scene_name + '_{}_{}of{}'.format(material_name, floor(c0/2)+1, floor((len(borders)-1)/4+1)) + '/' + material_name + '/'
for c1 in range(len(borders)):
index2 = 100
for c2 in range(len(borders)):
for c3 in range(len(borders)):
current_count = m*pow(border_count,4)+c0*pow(border_count,3)+c1*pow(border_count, 2)+c2*border_count+c3
print('{}/{} ({}%)'.format(current_count, max_count, round(current_count/max_count*100)))
curve0 = borders[c0].deformable
curve1 = borders[c1].deformable
curve2 = borders[c2].deformable
curve3 = borders[c3].deformable
b0 = border.Border.from_curve(curve0, borders[c0].flip)
b1 = border.Border.from_curve(curve1, borders[c1].flip)
b2 = border.Border.from_curve(curve2, borders[c2].flip)
b3 = border.Border.from_curve(curve3, borders[c3].flip)
if abs(b0.length - b2.length) > eps or abs(b1.length - b3.length) > eps:
continue
z1 = 00 + b0.height
z2 = z1 + b1.height
z3 = z2 + b2.height
z0 = z3 + b3.height
if z1 < -eps or z2 < -eps or z3 < -eps or (z3 <= eps and (z1 > eps or z2 > eps)) or abs(z0) > eps:
continue
if z3 < eps and (z1 > eps or z2 > eps):
continue
if prev0 != c0:
index0 -= 1
index1 = 99
index2 = 99
elif prev1 != c1:
index1 -= 1
index2 = 99
else:
index2 -= 1
prev0 = c0
prev1 = c1
if index0 < min_index:
min_index = index0
if index1 < min_index:
min_index = index1
if index2 < min_index:
min_index = index2
name = base_path + '{}-{}{}/{}-{}{}/{}-{}-{}-{}{}-{}{}-{}{}-{}{}'.format(
index0,
curve0.name, 'f' if borders[c0].flip else '',
index1,
curve1.name, 'f' if borders[c1].flip else '',
index2,
scene_name,
material_name,
curve0.name, 'f' if borders[c0].flip else '',
curve1.name, 'f' if borders[c1].flip else '',
curve2.name, 'f' if borders[c2].flip else '',
curve3.name, 'f' if borders[c3].flip else ''
)
scene.trackmania_item.export_path = name
settings.border_0.deformable = borders[c0].deformable
settings.border_0.curve = curve0
settings.border_0.flip = borders[c0].flip
settings.border_2.deformable = borders[c1].deformable
settings.border_2.curve = curve1
settings.border_2.flip = borders[c1].flip
settings.border_1.deformable = borders[c2].deformable
settings.border_1.curve = curve2
settings.border_1.flip = not borders[c2].flip
settings.border_3.deformable = borders[c3].deformable
settings.border_3.curve = curve3
settings.border_3.flip = not borders[c3].flip
print('Generating and exporting: {}'.format(name))
with redirect_stdout(stdout_dump):
bpy.ops.scene.tm_surface_update()
# Simplify mesh
raw_vertices += len(settings.surface.data.vertices)
raw_edges += len(settings.surface.data.edges)
raw_polygons += len(settings.surface.data.polygons)
'''
settings.surface.select_set(True)
context.view_layer.objects.active = settings.surface
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_mode(type='VERT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.dissolve_limited(angle_limit=simplify_threshold, use_dissolve_boundaries=True)
bpy.ops.object.mode_set(mode='OBJECT')
'''
simplified_vertices += len(settings.surface.data.vertices)
simplified_edges += len(settings.surface.data.edges)
simplified_polygons += len(settings.surface.data.polygons)
bpy.ops.trackmania.export()
count += 1
print('Generated {} surface items.'.format(count))
print('Max node per folder was {}'.format(100 - min_index))
print('Generated/Exported vertices: {}/{} (saved {}%)'.format(simplified_vertices, raw_vertices,
round((raw_vertices - simplified_vertices) * 100 / raw_vertices)))
print('Generated/Exported edges: {}/{} (saved {}%)'.format(simplified_edges, raw_edges,
round((raw_edges - simplified_edges) * 100 / raw_edges)))
print('Generated/Exported polygons: {}/{} (saved {}%)'.format(simplified_polygons, raw_polygons,
round((raw_polygons - simplified_polygons) * 100 / raw_polygons)))
return {'FINISHED'}
# Panels
class VIEW3D_OT_UpdateActiveSurface(Operator):
bl_idname = 'scene.tm_surface_update'
bl_label = 'Update'
bl_options = {'REGISTER'}
@classmethod
def poll(cls, context):
return context.object is None or context.object.mode == 'OBJECT'
def execute(self, context):
fail_reason = generate_surface(context)
if fail_reason is not None:
self.report({'ERROR'}, fail_reason)
return {'CANCELLED'}
return {'FINISHED'}
class VIEW3D_PT_TM_Surface(Panel):
bl_idname = 'VIEW3D_PT_TM_Surface'
bl_label = 'Surface'
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'Trackmania'
@classmethod
def poll(cls, context):
return context.object is None or context.object.mode == 'OBJECT'
def draw(self, context):
pass
class VIEW3D_PT_TM_MultiSurface(Panel):
bl_parent_id = VIEW3D_PT_TM_Surface.bl_idname
bl_idname = 'VIEW3D_PT_TM_MultiSurface'
bl_label = 'Multi Surface'
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'Trackmania'
def draw(self, context):
layout = self.layout
scene = context.scene
settings = scene.trackmania_multi_surface
# Borders
layout.label(text='Borders:')
row = layout.row()
row.template_list('VIEW3D_UL_MultiSurfaceBorderItems', '', settings, 'borders', settings, 'selected_border', rows=4)
col = row.column(align=True)
col.operator(VIEW3D_OT_MultiSurfaceBordersEdit.bl_idname, text='+').action = 'ADD'
col.operator(VIEW3D_OT_MultiSurfaceBordersEdit.bl_idname, text='-').action = 'REMOVE'
col.separator()
col.operator(VIEW3D_OT_MultiSurfaceBordersEdit.bl_idname, icon='TRIA_UP', text='').action = 'UP'
col.operator(VIEW3D_OT_MultiSurfaceBordersEdit.bl_idname, icon='TRIA_DOWN', text='').action = 'DOWN'
# Materials
layout.label(text='Top Materials:')
row = layout.row()
row.template_list('VIEW3D_UL_MultiSurfaceMaterialItems', '', settings, 'materials', settings, 'selected_material', rows=4)
col = row.column(align=True)
col.operator(VIEW3D_OT_MultiSurfaceMaterialsEdit.bl_idname, text='+').action = 'ADD'
col.operator(VIEW3D_OT_MultiSurfaceMaterialsEdit.bl_idname, text='-').action = 'REMOVE'
col.separator()
col.operator(VIEW3D_OT_MultiSurfaceMaterialsEdit.bl_idname, icon='TRIA_UP', text='').action = 'UP'
col.operator(VIEW3D_OT_MultiSurfaceMaterialsEdit.bl_idname, icon='TRIA_DOWN', text='').action = 'DOWN'
layout.prop(settings, 'simplify_threshold')
layout.operator(VIEW3D_OT_MultiSurfaceGenerate.bl_idname)
class VIEW3D_PT_TM_ActiveSurface(Panel):
bl_parent_id = VIEW3D_PT_TM_Surface.bl_idname
bl_idname = 'VIEW3D_PT_TM_ActiveSurface'
bl_label = 'Active'
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'Trackmania'
@classmethod
def poll(cls, context):
return context.object is None or context.object.mode == 'OBJECT'
def draw_common_settings(self, layout, settings):
common = layout.column()
header = common.row()
header.alignment = 'CENTER'
header.label(text='Common')
body = common.column(align=True)
body.prop(settings, 'enable_continuous_update')
body.prop(settings, 'bezier_precision')
body.prop(settings, 'grid_subdivisions_flat')
body.prop(settings, 'grid_subdivisions_semi_flat')
body.prop(settings, 'grid_subdivisions_curved')
body.prop(settings, 'height')
body.prop(settings, 'lightmap_margin')
body.prop(settings, 'top_material')
body.prop(settings, 'bottom_material')
body.prop(settings, 'side_material')
def draw_screw(self, layout, settings):
screw = layout.column()
header = screw.row()
header.alignment = 'CENTER'
header.label(text='Screw')
body = screw.column(align=True)
draw_border(body, settings.border_0, 'Border')
def draw_connector(self, layout, settings):
connector = layout.column()
header = connector.row()
header.alignment = 'CENTER'
header.label(text='Connector')
body = connector.column(align=True)
draw_border(body, settings.border_0, 'East Border')
draw_border(body, settings.border_1, 'West Border')
draw_border(body, settings.border_2, 'North Border')
draw_border(body, settings.border_3, 'South Border')
row = body.row()
row.prop(settings, 'preserve_tangents')
def draw(self, context):
layout = self.layout
settings = context.scene.trackmania_surface_settings
layout.prop(settings, 'surface_type')
if settings.surface_type != 'NONE':
self.draw_common_settings(layout, settings)
if settings.surface_type == 'SCREW':
self.draw_screw(layout, settings)
elif settings.surface_type == 'CONNECTOR':
self.draw_connector(layout, settings)
if settings.surface_type != 'NONE':
layout.operator(VIEW3D_OT_UpdateActiveSurface.bl_idname)
classes = (
SCENE_PG_BorderSettings,
SCENE_PG_MultiSurfaceMaterial,
SCENE_PG_SurfaceSettings,
SCENE_PG_MultiSurfaceSettings,
VIEW3D_OT_MultiSurfaceBordersEdit,
VIEW3D_OT_MultiSurfaceMaterialsEdit,
VIEW3D_OT_MultiSurfaceGenerate,
VIEW3D_UL_MultiSurfaceBorderItems,
VIEW3D_UL_MultiSurfaceMaterialItems,
VIEW3D_OT_UpdateActiveSurface,
VIEW3D_PT_TM_Surface,
VIEW3D_PT_TM_MultiSurface,
VIEW3D_PT_TM_ActiveSurface
)
def register():
for cls in classes:
bpy.utils.register_class(cls)
bpy.types.Scene.trackmania_surface_settings = PointerProperty(type=SCENE_PG_SurfaceSettings)
bpy.types.Scene.trackmania_multi_surface = PointerProperty(type=SCENE_PG_MultiSurfaceSettings)
def unregister():
for cls in classes:
bpy.utils.unregister_class(cls)
del bpy.types.Scene.trackmania_surface_settings
del bpy.types.Scene.trackmania_multi_surface
|
from django.db import models
from django.conf import settings
from mainapp.models import Product
class Basket(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='basket')
product = models.ForeignKey(Product, on_delete=models.CASCADE)
quantity = models.PositiveIntegerField(verbose_name='количество', default=1)
add_datetime = models.DateTimeField(verbose_name='время', auto_now_add=True)
def __str__(self):
return '{} - {}'.format(self.user.username, self.product.name)
@property
def product_cost(self):
return self.quantity * self.product.price |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.