content
stringlengths 5
1.05M
|
|---|
import os
os.environ['ComplianceMode'] = 'True'
os.environ['SendToSlack'] = 'True'
os.environ['SendToSNS'] = 'True'
os.environ['RoleName'] = 'HUITPublicResourceCompliance-us-east-1'
os.environ['LogLevel'] = 'INFO'
os.environ['Topic'] = 'arn:aws:sns:us-east-1:077179288803:dynamodb'
os.environ['SlackURL'] = 'https://hooks.slack.com/services/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'
os.environ['DynamoTable'] = 'HUITPublicResourceCheck'
# org-id o-xukgm413dr
from aws_lambda_context import LambdaContext
from huit_public_compliance import lambda_handler
# event = {
# "version": "0",
# "id": "ee376907-2647-4179-9203-343cfb3017a4",
# "detail-type": "EC2 Instance State-change Notification",
# "source": "aws.ec2",
# "account": "929976461491",
# "time": "2021-03-10T12:52:00Z",
# "region": "us-east-1",
# "resources": [
# "arn:aws:ec2:us-east-1:929976461491:instance/i-010fc37020416ca41"
# ],
# "detail": {
# "instance-id": "i-010fc37020416ca41",
# "state": "running"
# }
# }
# event = {
# "version": "0",
# "id": "ee376907-2647-4179-9203-343cfb3017a4",
# "detail-type": "EC2 Instance State-change Notification",
# "source": "aws.ec2",
# "account": "459273849936",
# "time": "2021-02-02T21:30:34Z",
# "region": "us-east-1",
# "resources": [
# "arn:aws:ec2:us-east-1:459273849936:instance/i-011996651ee44c891"
# ],
# "detail": {
# "instance-id": "i-011996651ee44c891",
# "state": "running"
# }
# }
event = {
"version":"0",
"id":"b921dc29-8bce-8330-6fe6-a5aaed8619d4",
"detail-type":"RDS DB Instance Event",
"source":"aws.rds",
"account":"929976461491",
"time":"2021-03-15T17:43:35Z",
"region":"us-east-1",
"resources":[
"arn:aws:rds:us-east-1:929976461491:db:database-2"
],
"detail":{
"EventCategories":[
"empty"
],
"SourceType":"DB_INSTANCE",
"SourceArn":"arn:aws:rds:us-east-1:929976461491:db:database-2",
"Date":"2021-03-15T17:43:35.870Z",
"Message":"Finished moving DB instance to target VPC",
"SourceIdentifier":"database-2",
"EventID":null
}
}
event = {
"version":"0",
"id":"9dbdce5f-29f1-1e4a-119e-08ea62e15ce4",
"detail-type":"RDS DB Instance Event",
"source":"aws.rds",
"account":"929976461491",
"time":"2021-03-10T01:21:29Z",
"region":"us-east-1",
"resources":[
"arn:aws:rds:us-east-1:929976461491:db:database-4"
],
"detail":{
"EventCategories":[
"creation"
],
"SourceType":"DB_INSTANCE",
"SourceArn":"arn:aws:rds:us-east-1:929976461491:db:database-4",
"Date":"2021-03-10T01:21:29.548Z",
"Message":"DB instance created",
"SourceIdentifier":"database-4",
"EventID":"RDS-EVENT-0005"
}
}
context = LambdaContext()
response = lambda_handler(event, context)
|
# SPDX-FileCopyrightText: 2021 Melissa LeBlanc-Williams for Adafruit Industries
#
# SPDX-License-Identifier: MIT
import sys
import time
from adafruit_blinka import agnostic
import board
import digitalio
# from Adafruit_GPIO import Platform
# print("Platform = ", Platform.platform_detect(), Platform.pi_version())
print("hello blinka!")
print(
"Found system type: %s (sys.platform %s implementation %s) "
% (agnostic.board_id, sys.platform, sys.implementation.name)
)
print("board contents: ", dir(board))
led = digitalio.DigitalInOut(board.D4)
led.direction = digitalio.Direction.OUTPUT
button = digitalio.DigitalInOut(board.D18)
button.direction = digitalio.Direction.INPUT
button.pull = digitalio.Pull.DOWN
while True:
led.value = button.value
time.sleep(0.1)
|
import math
import torch
import torch.nn as nn
from mmcv.runner import load_checkpoint
from mmedit.models.common import (PixelShufflePack, ResidualBlockNoBN,
make_layer)
from mmedit.models.registry import BACKBONES
from mmedit.utils import get_root_logger
class UpsampleModule(nn.Sequential):
"""Upsample module used in EDSR.
Args:
scale (int): Scale factor. Supported scales: 2^n and 3.
mid_channels (int): Channel number of intermediate features.
"""
def __init__(self, scale, mid_channels):
modules = []
if (scale & (scale - 1)) == 0: # scale = 2^n
for _ in range(int(math.log(scale, 2))):
modules.append(
PixelShufflePack(
mid_channels, mid_channels, 2, upsample_kernel=3))
elif scale == 3:
modules.append(
PixelShufflePack(
mid_channels, mid_channels, scale, upsample_kernel=3))
else:
raise ValueError(f'scale {scale} is not supported. '
'Supported scales: 2^n and 3.')
super(UpsampleModule, self).__init__(*modules)
@BACKBONES.register_module()
class EDSR(nn.Module):
"""EDSR network structure.
Paper: Enhanced Deep Residual Networks for Single Image Super-Resolution.
Ref repo: https://github.com/thstkdgus35/EDSR-PyTorch
Args:
in_channels (int): Channel number of inputs.
out_channels (int): Channel number of outputs.
mid_channels (int): Channel number of intermediate features.
Default: 64.
num_blocks (int): Block number in the trunk network. Default: 16.
upscale_factor (int): Upsampling factor. Support 2^n and 3.
Default: 4.
res_scale (float): Used to scale the residual in residual block.
Default: 1.
rgb_mean (tuple[float]): Image mean in RGB orders.
Default: (0.4488, 0.4371, 0.4040), calculated from DIV2K dataset.
rgb_std (tuple[float]): Image std in RGB orders. In EDSR, it uses
(1.0, 1.0, 1.0). Default: (1.0, 1.0, 1.0).
"""
def __init__(self,
in_channels,
out_channels,
mid_channels=64,
num_blocks=16,
upscale_factor=4,
res_scale=1,
rgb_mean=(0.4488, 0.4371, 0.4040),
rgb_std=(1.0, 1.0, 1.0)):
super(EDSR, self).__init__()
self.in_channels = in_channels
self.out_channels = out_channels
self.mid_channels = mid_channels
self.num_blocks = num_blocks
self.upscale_factor = upscale_factor
self.mean = torch.Tensor(rgb_mean).view(1, 3, 1, 1)
self.std = torch.Tensor(rgb_std).view(1, 3, 1, 1)
self.conv_first = nn.Conv2d(in_channels, mid_channels, 3, padding=1)
self.body = make_layer(
ResidualBlockNoBN,
num_blocks,
mid_channels=mid_channels,
res_scale=res_scale)
self.conv_after_body = nn.Conv2d(mid_channels, mid_channels, 3, 1, 1)
self.upsample = UpsampleModule(upscale_factor, mid_channels)
self.conv_last = nn.Conv2d(
mid_channels, out_channels, 3, 1, 1, bias=True)
def forward(self, x):
"""Forward function.
Args:
x (Tensor): Input tensor with shape (n, c, h, w).
Returns:
Tensor: Forward results.
"""
self.mean = self.mean.to(x)
self.std = self.std.to(x)
x = (x - self.mean) / self.std
x = self.conv_first(x)
res = self.conv_after_body(self.body(x))
res += x
x = self.conv_last(self.upsample(res))
x = x * self.std + self.mean
return x
def init_weights(self, pretrained=None, strict=True):
"""Init weights for models.
Args:
pretrained (str, optional): Path for pretrained weights. If given
None, pretrained weights will not be loaded. Defaults to None.
strict (boo, optional): Whether strictly load the pretrained model.
Defaults to True.
"""
if isinstance(pretrained, str):
logger = get_root_logger()
load_checkpoint(self, pretrained, strict=strict, logger=logger)
elif pretrained is None:
pass # use default initialization
else:
raise TypeError('"pretrained" must be a str or None. '
f'But received {type(pretrained)}.')
|
#!/usr/bin/env python
# Copyright 2016 Andy Chu. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
"""
oil.py - A busybox-like binary for oil.
Based on argv[0], it acts like a few different programs.
Builtins that can be exposed:
- test / [ -- call BoolParser at runtime
- 'time' -- because it has format strings, etc.
- find/xargs equivalents (even if they are not compatible)
- list/each/every
- echo: most likely don't care about this
"""
from __future__ import print_function
import os
import sys
import time # for perf measurement
# TODO: Set PYTHONPATH from outside?
this_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
sys.path.append(os.path.join(this_dir, '..'))
_trace_path = os.environ.get('_PY_TRACE')
if _trace_path:
from benchmarks import pytrace
_tracer = pytrace.Tracer()
_tracer.Start()
else:
_tracer = None
# Uncomment this to see startup time problems.
if os.environ.get('OIL_TIMING'):
start_time = time.time()
def _tlog(msg):
pid = os.getpid() # TODO: Maybe remove PID later.
print('[%d] %.3f %s' % (pid, (time.time() - start_time) * 1000, msg))
else:
def _tlog(msg):
pass
_tlog('before imports')
import errno
#import traceback # for debugging
# Set in Modules/main.c.
HAVE_READLINE = os.getenv('_HAVE_READLINE') != ''
from osh import parse_lib
from core import alloc
from core import args
from core import builtin
from core import completion
from core import cmd_exec
from core import dev
from core import legacy
from core import main_loop
from core import process
from core import reader
from core import state
from core import word_eval
from core import ui
from core import util
if HAVE_READLINE:
import readline
else:
readline = None
from tools import deps
from tools import osh2oil
from tools import readlink
log = util.log
_tlog('after imports')
def _ShowVersion():
util.ShowAppVersion('Oil')
OSH_SPEC = args.FlagsAndOptions()
OSH_SPEC.ShortFlag('-c', args.Str, quit_parsing_flags=True) # command string
OSH_SPEC.ShortFlag('-i') # interactive
# TODO: -h too
OSH_SPEC.LongFlag('--help')
OSH_SPEC.LongFlag('--version')
# the output format when passing -n
OSH_SPEC.LongFlag('--ast-format',
['text', 'abbrev-text', 'html', 'abbrev-html', 'oheap', 'none'],
default='abbrev-text')
OSH_SPEC.LongFlag('--print-status') # TODO: Replace with a shell hook
OSH_SPEC.LongFlag('--hijack-shebang') # TODO: Implement this
OSH_SPEC.LongFlag('--debug-file', args.Str)
OSH_SPEC.LongFlag('--xtrace-to-debug-file')
# For benchmarks/*.sh
OSH_SPEC.LongFlag('--parser-mem-dump', args.Str)
OSH_SPEC.LongFlag('--runtime-mem-dump', args.Str)
# For bash compatibility
OSH_SPEC.LongFlag('--norc')
builtin.AddOptionsToArgSpec(OSH_SPEC)
def OshMain(argv0, argv, login_shell):
arg_r = args.Reader(argv)
try:
opts = OSH_SPEC.Parse(arg_r)
except args.UsageError as e:
ui.usage('osh usage error: %s', e)
return 2
if opts.help:
loader = util.GetResourceLoader()
builtin.Help(['osh-usage'], loader)
return 0
if opts.version:
# OSH version is the only binary in Oil right now, so it's all one version.
_ShowVersion()
return 0
# TODO: This should be in interactive mode only?
builtin.RegisterSigIntHandler()
if arg_r.AtEnd():
dollar0 = argv0
has_main = False
else:
dollar0 = arg_r.Peek() # the script name, or the arg after -c
has_main = True
pool = alloc.Pool()
arena = pool.NewArena()
# NOTE: has_main is only for ${BASH_SOURCE[@} and family. Could be a
# required arg.
mem = state.Mem(dollar0, argv[arg_r.i + 1:], os.environ, arena,
has_main=has_main)
funcs = {}
comp_lookup = completion.CompletionLookup()
fd_state = process.FdState()
exec_opts = state.ExecOpts(mem, readline)
builtin.SetExecOpts(exec_opts, opts.opt_changes)
aliases = {} # feedback between runtime and parser
parse_ctx = parse_lib.ParseContext(arena, aliases)
if opts.debug_file:
debug_f = util.DebugFile(fd_state.Open(opts.debug_file, mode='w'))
else:
debug_f = util.NullDebugFile()
debug_f.log('Debug file is %s', opts.debug_file)
# Controlled by env variable, flag, or hook?
dumper = dev.CrashDumper(os.getenv('OSH_CRASH_DUMP_DIR', ''))
if opts.xtrace_to_debug_file:
trace_f = debug_f
else:
trace_f = util.DebugFile(sys.stderr)
devtools = dev.DevTools(dumper, debug_f, trace_f)
ex = cmd_exec.Executor(mem, fd_state, funcs, comp_lookup, exec_opts,
parse_ctx, devtools)
# NOTE: The rc file can contain both commands and functions... ideally we
# would only want to save nodes/lines for the functions.
try:
rc_path = 'oilrc'
arena.PushSource(rc_path)
with open(rc_path) as f:
rc_line_reader = reader.FileLineReader(f, arena)
_, rc_c_parser = parse_ctx.MakeParser(rc_line_reader)
try:
status = main_loop.Batch(ex, rc_c_parser, arena)
finally:
arena.PopSource()
except IOError as e:
if e.errno != errno.ENOENT:
raise
# Needed in non-interactive shells for @P
prompt = ui.Prompt(arena, parse_ctx, ex)
ui.PROMPT = prompt
if opts.c is not None:
arena.PushSource('<command string>')
line_reader = reader.StringLineReader(opts.c, arena)
if opts.i: # -c and -i can be combined
exec_opts.interactive = True
elif opts.i: # force interactive
arena.PushSource('<stdin -i>')
line_reader = reader.InteractiveLineReader(arena, prompt)
exec_opts.interactive = True
else:
try:
script_name = arg_r.Peek()
except IndexError:
if sys.stdin.isatty():
arena.PushSource('<interactive>')
line_reader = reader.InteractiveLineReader(arena, prompt)
exec_opts.interactive = True
else:
arena.PushSource('<stdin>')
line_reader = reader.FileLineReader(sys.stdin, arena)
else:
arena.PushSource(script_name)
try:
f = fd_state.Open(script_name)
except OSError as e:
util.error("Couldn't open %r: %s", script_name, os.strerror(e.errno))
return 1
line_reader = reader.FileLineReader(f, arena)
# TODO: assert arena.NumSourcePaths() == 1
# TODO: .rc file needs its own arena.
w_parser, c_parser = parse_ctx.MakeParser(line_reader)
if exec_opts.interactive:
# NOTE: We're using a different evaluator here. The completion system can
# also run functions... it gets the Executor through Executor._Complete.
if HAVE_READLINE:
splitter = legacy.SplitContext(mem) # TODO: share with executor.
ev = word_eval.CompletionWordEvaluator(mem, exec_opts, splitter, arena)
progress_f = ui.StatusLine()
var_action = completion.VariablesActionInternal(ex.mem)
root_comp = completion.RootCompleter(ev, comp_lookup, var_action,
parse_ctx, progress_f, debug_f)
completion.Init(readline, root_comp, debug_f)
from core import comp_builtins
# register builtins and words
comp_builtins.Complete(['-E', '-A', 'command'], ex, comp_lookup)
# register path completion
comp_builtins.Complete(['-D', '-A', 'file'], ex, comp_lookup)
# TODO: Move this into demo/slow-completion.sh
if 1:
# Something for fun, to show off. Also: test that you don't repeatedly hit
# the file system / network / coprocess.
A1 = completion.WordsAction(['foo.py', 'foo', 'bar.py'])
A2 = completion.WordsAction(['m%d' % i for i in range(5)], delay=0.1)
C1 = completion.ChainedCompleter([A1, A2])
comp_lookup.RegisterName('slowc', C1)
return main_loop.Interactive(opts, ex, c_parser, arena)
# TODO: Remove this after removing it from benchmarks/osh-runtime. It's no
# longer relevant with main_loop.
if opts.parser_mem_dump:
# This might be superstition, but we want to let the value stabilize
# after parsing. bash -c 'cat /proc/$$/status' gives different results
# with a sleep.
time.sleep(0.001)
input_path = '/proc/%d/status' % os.getpid()
with open(input_path) as f, open(opts.parser_mem_dump, 'w') as f2:
contents = f.read()
f2.write(contents)
log('Wrote %s to %s (--parser-mem-dump)', input_path,
opts.parser_mem_dump)
nodes_out = [] if exec_opts.noexec else None
_tlog('Execute(node)')
status = main_loop.Batch(ex, c_parser, arena, nodes_out=nodes_out)
if nodes_out is not None:
ui.PrintAst(nodes_out, opts)
# NOTE: 'exit 1' is ControlFlow and gets here, but subshell/commandsub
# don't because they call sys.exit().
if opts.runtime_mem_dump:
# This might be superstition, but we want to let the value stabilize
# after parsing. bash -c 'cat /proc/$$/status' gives different results
# with a sleep.
time.sleep(0.001)
input_path = '/proc/%d/status' % os.getpid()
with open(input_path) as f, open(opts.runtime_mem_dump, 'w') as f2:
contents = f.read()
f2.write(contents)
log('Wrote %s to %s (--runtime-mem-dump)', input_path,
opts.runtime_mem_dump)
# NOTE: We haven't closed the file opened with fd_state.Open
return status
# TODO: Does oil have the same -o syntax? I probably want something else.
OIL_SPEC = args.FlagsAndOptions()
# TODO: -h too
OIL_SPEC.LongFlag('--help')
OIL_SPEC.LongFlag('--version')
#builtin.AddOptionsToArgSpec(OIL_SPEC)
def OilMain(argv):
arg_r = args.Reader(argv)
try:
opts = OIL_SPEC.Parse(arg_r)
except args.UsageError as e:
ui.usage('oil usage error: %s', e)
return 2
if opts.help:
loader = util.GetResourceLoader()
builtin.Help(['oil-usage'], loader)
return 0
if opts.version:
# OSH version is the only binary in Oil right now, so it's all one version.
_ShowVersion()
return 0
raise NotImplementedError('oil')
return 0
def WokMain(main_argv):
raise NotImplementedError('wok')
def BoilMain(main_argv):
raise NotImplementedError('boil')
# TODO: Hook up to completion.
SUBCOMMANDS = [
'translate', 'arena', 'spans', 'format', 'deps', 'undefined-vars'
]
def OshCommandMain(argv):
"""Run an 'oshc' tool.
'osh' is short for "osh compiler" or "osh command".
TODO:
- oshc --help
oshc deps
--path: the $PATH to use to find executables. What about libraries?
NOTE: we're leaving out su -c, find, xargs, etc.? Those should generally
run functions using the $0 pattern.
--chained-command sudo
"""
try:
action = argv[0]
except IndexError:
raise args.UsageError('oshc: Missing required subcommand.')
if action not in SUBCOMMANDS:
raise args.UsageError('oshc: Invalid subcommand %r.' % action)
try:
script_name = argv[1]
except IndexError:
script_name = '<stdin>'
f = sys.stdin
else:
try:
f = open(script_name)
except IOError as e:
util.error("Couldn't open %r: %s", script_name, os.strerror(e.errno))
return 2
pool = alloc.Pool()
arena = pool.NewArena()
arena.PushSource(script_name)
line_reader = reader.FileLineReader(f, arena)
aliases = {} # Dummy value; not respecting aliases!
parse_ctx = parse_lib.ParseContext(arena, aliases)
_, c_parser = parse_ctx.MakeParser(line_reader)
try:
node = main_loop.ParseWholeFile(c_parser)
except util.ParseError as e:
ui.PrettyPrintError(e, arena, sys.stderr)
return 2
assert node is not None
f.close()
# Columns for list-*
# path line name
# where name is the binary path, variable name, or library path.
# bin-deps and lib-deps can be used to make an app bundle.
# Maybe I should list them together? 'deps' can show 4 columns?
#
# path, line, type, name
#
# --pretty can show the LST location.
# stderr: show how we're following imports?
if action == 'translate':
osh2oil.PrintAsOil(arena, node)
elif action == 'arena': # for debugging
osh2oil.PrintArena(arena)
elif action == 'spans': # for debugging
osh2oil.PrintSpans(arena)
elif action == 'format':
# TODO: autoformat code
raise NotImplementedError(action)
elif action == 'deps':
deps.Deps(node)
elif action == 'undefined-vars': # could be environment variables
raise NotImplementedError
else:
raise AssertionError # Checked above
return 0
# The valid applets right now.
# TODO: Hook up to completion.
APPLETS = ['osh', 'oshc']
def AppBundleMain(argv):
login_shell = False
b = os.path.basename(argv[0])
main_name, ext = os.path.splitext(b)
if main_name.startswith('-'):
login_shell = True
main_name = main_name[1:]
if main_name == 'oil' and ext: # oil.py or oil.ovm
try:
first_arg = argv[1]
except IndexError:
raise args.UsageError('Missing required applet name.')
if first_arg in ('-h', '--help'):
builtin.Help(['bundle-usage'], util.GetResourceLoader())
sys.exit(0)
if first_arg in ('-V', '--version'):
_ShowVersion()
sys.exit(0)
main_name = first_arg
if main_name.startswith('-'): # TODO: Remove duplication above
login_shell = True
main_name = main_name[1:]
argv0 = argv[1]
main_argv = argv[2:]
else:
argv0 = argv[0]
main_argv = argv[1:]
if main_name in ('osh', 'sh'):
status = OshMain(argv0, main_argv, login_shell)
_tlog('done osh main')
return status
elif main_name == 'oshc':
return OshCommandMain(main_argv)
elif main_name == 'oil':
return OilMain(main_argv)
elif main_name == 'wok':
return WokMain(main_argv)
elif main_name == 'boil':
return BoilMain(main_argv)
# For testing latency
elif main_name == 'true':
return 0
elif main_name == 'false':
return 1
elif main_name == 'readlink':
return readlink.main(main_argv)
else:
raise args.UsageError('Invalid applet name %r.' % main_name)
def main(argv):
try:
sys.exit(AppBundleMain(argv))
except NotImplementedError as e:
raise
except args.UsageError as e:
#builtin.Help(['oil-usage'], util.GetResourceLoader())
log('oil: %s', e)
sys.exit(2)
except RuntimeError as e:
log('FATAL: %s', e)
sys.exit(1)
finally:
_tlog('Exiting main()')
if _trace_path:
_tracer.Stop(_trace_path)
if __name__ == '__main__':
# NOTE: This could end up as opy.InferTypes(), opy.GenerateCode(), etc.
if os.getenv('CALLGRAPH') == '1':
from opy import callgraph
callgraph.Walk(main, sys.modules)
else:
main(sys.argv)
|
import numpy as np
import pandas as pd
from ..lsa import LSA
from ..utils import PrimitiveT, find_applicable_primitives, valid_dfs
class TestLSA(PrimitiveT):
primitive = LSA
def test_strings(self):
x = pd.Series(['The dogs ate food.',
'She ate a pineapple',
'Consume Electrolytes, he told me.',
'Hello'])
primitive_func = self.primitive().get_function()
answers = pd.Series(
[[0.06130623793383833, 0.01745556451033845, 0.0057337659660533094, 0.0002763538434776728],
[-0.04393122671005984, 0.04819242528049181, 0.01643423390395579, 0.0011141016579207792]])
results = primitive_func(x)
np.testing.assert_array_almost_equal(np.concatenate(([np.array(answers[0])], [np.array(answers[1])]), axis=0),
np.concatenate(([np.array(results[0])], [np.array(results[1])]), axis=0),
decimal=2)
def test_nan(self):
x = pd.Series([np.nan,
'#;.<',
'This IS a STRING.'])
primitive_func = self.primitive().get_function()
answers = pd.Series(
[[np.nan, 0, 0.074],
[np.nan, 0, -0.024]])
results = primitive_func(x)
np.testing.assert_array_almost_equal(np.concatenate(([np.array(answers[0])], [np.array(answers[1])]), axis=0),
np.concatenate(([np.array(results[0])], [np.array(results[1])]), axis=0),
decimal=2)
def test_with_featuretools(self, es):
transform, aggregation = find_applicable_primitives(self.primitive)
primitive_instance = self.primitive()
transform.append(primitive_instance)
valid_dfs(es, aggregation, transform, self.primitive.name.upper(), multi_output=True)
|
from dataclasses import dataclass, field
from sys import getsizeof
from typing import Any, Dict, List
from paralleldomain.model.annotation.common import Annotation
from paralleldomain.model.geometry.point_3d import Point3DGeometry
@dataclass
class Point3D(Point3DGeometry):
"""Represents a 3D Point.
Args:
x: :attr:`~.Point3D.x`
y: :attr:`~.Point3D.y`
class_id: :attr:`~.Point3D.class_id`
instance_id: :attr:`~.Point3D.instance_id`
attributes: :attr:`~.Point3D.attributes`
Attributes:
x: coordinate along x-axis in image pixels
y: coordinate along y-axis in image pixels
class_id: Class ID of the point. Can be used to lookup more details in :obj:`ClassMap`.
instance_id: Instance ID of annotated object. Can be used to cross-reference with
other instance annotation types, e.g., :obj:`InstanceSegmentation3D` or :obj:`InstanceSegmentation3D`.
If unknown defaults to -1.
attributes: Dictionary of arbitrary object attributes.
"""
class_id: int
instance_id: int = -1
attributes: Dict[str, Any] = field(default_factory=dict)
def __sizeof__(self):
return getsizeof(self.attributes) + 2 * 8 + super().__sizeof__() # 2 * 8 bytes ints or floats
@dataclass
class Points3D(Annotation):
"""Collection of 3D Points
Args:
points: :attr:`~.Points3D.points`
Attributes:
points: Unordered list of :obj:`Point3D` instances
"""
points: List[Point3D]
|
from django.db.models.signals import post_save
from django.contrib.auth.models import User
from django.dispatch import receiver
from .models import Profile
from .tokens import account_activation_token
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.conf import settings
from django.core.mail import send_mail
from django.template.loader import render_to_string
import datetime
from django.contrib.sites.shortcuts import get_current_site
def create_profile(sender, created=False, **kwargs):
user = kwargs["instance"]
if created:
profile = Profile(user=user)
token = account_activation_token.make_token(user)
uid = urlsafe_base64_encode(force_bytes(user.id))
# current_site = get_current_site(sender.request)
message = render_to_string('users/acc_active_email.html', {
'user': user,
'time': datetime.datetime.now(),
'domain': "localhost:8000",
'uid': uid,
'token': token,
})
# message = "http://localhost:8000/activate/" + str(uid) + "/" + str(token)
send_mail("Confirm your registration", "", settings.EMAIL_HOST_USER,
[user.email], fail_silently=True, html_message=message)
profile.save()
post_save.connect(create_profile, sender=User)
|
from lml.plugin import PluginInfo
from moban import constants
@PluginInfo(
constants.TEMPLATE_ENGINE_EXTENSION, tags=[constants.TEMPLATE_COPY]
)
class ContentForwardEngine(object):
"""
Does no templating, works like 'copy'.
Respects templating directories, for example: naughty.template
could exist in any of template directires: dir1,
dir2, dir3, and this engine will find it for you. With conventional
copy command, the source file path must be known.
And this engine does not really touch the dest file but only read
the source file. Everything else is taken care of by moban
templating mechanism.
"""
ACTION_IN_PRESENT_CONTINUOUS_TENSE = "Copying"
ACTION_IN_PAST_TENSE = "Copied"
def __init__(self, template_fs, extensions=None):
self.template_fs = template_fs
def get_template(self, template_file):
return self.template_fs.readbytes(template_file)
def get_template_from_string(self, string):
return string
def apply_template(self, template, *_):
return template
|
# -*- coding: utf-8 -*-
"""
Feature Engineering
"""
import numpy as np
from functions import inv_log
from helpers import *
from preprocessing import *
def build_poly(x, degree, roots=False):
"""polynomial basis functions for input data x, for j=0 up to j=degree."""
assert type(degree) == int, "degree must be of type int"
assert degree >= 1, "degree must non-negative"
poly = []
for deg in range(1, degree + 1):
if roots:
deg = 1 / deg
poly.append(np.power(x, deg))
return np.concatenate(poly, axis=1)
def build_x(x_train, x_test, degree, root=None, replace_by="mf", f_mask=None,
log_degree=None, inv_log_degree=None, tanh_degree=None, fn_log=True, fn_inv_log=True,
fn_tanh=True, functions=None, invalid_value=-999, print_=False):
if print_:
print(degree, root, log_degree, inv_log_degree, tanh_degree, fn_log, fn_inv_log, fn_tanh, functions)
assert f_mask == None or len(f_mask) == x.shape[1]
assert log_degree == None or type(log_degree) == int
assert inv_log_degree == None or type(inv_log_degree) == int
train_size = x_train.shape[0]
test_size = x_test.shape[0]
x = np.concatenate((x_train, x_test))
# Preprocessing
if print_:
print("Starting pre-processing")
if f_mask != None:
x = x[:, f_mask]
x = replace_invalid(x, x != invalid_value, replace_by="mf")
x_non_negative = x - x.min(axis=0)
x_std = standardize(x.copy())
# Features Engineering
# poly
if print_:
print("Starting poly")
x = build_poly(x_std, degree)
if tanh_degree != None:
x_tanh = standardize(np.tanh(x_std))
x = np.concatenate((x, build_poly(x_tanh, tanh_degree)), axis=1)
if log_degree != None:
x_log = standardize(np.log(1 + x_non_negative))
x = np.concatenate((x, build_poly(x_log, log_degree)), axis=1)
if inv_log_degree != None:
x_inv_log = standardize(inv_log(x_non_negative))
x = np.concatenate((x, build_poly(x_inv_log, inv_log_degree)), axis=1)
if root != None:
x = np.concatenate((x, build_poly(x_non_negative, root, roots=True)[:, x_non_negative.shape[1]:]), axis=1)
# combinations with functions
if print_:
print("Starting combinations")
if functions != None:
x_comb = x_std.copy()
if fn_tanh:
x_tanh = standardize(np.tanh(x_std))
x_comb = np.concatenate((x_comb, x_tanh), axis=1)
if fn_log:
x_log = np.log(1 + x_non_negative)
x_comb = np.concatenate((x_comb, x_log), axis=1)
if fn_inv_log:
x_inv_log = standardize(inv_log(x_non_negative))
x_comb = np.concatenate((x_comb, x_inv_log), axis=1)
for fn in functions:
x = np.concatenate((x, combinations_of(x_comb, fn, create_pairs(x_comb.shape[1], x_comb.shape[1]))), axis=1)
x = np.concatenate((np.ones(x.shape[0]).reshape((x.shape[0], 1)), x), axis=1)
if print_:
print("Final shape: {}".format(x.shape))
return x[:train_size], x[train_size:]
|
"""
Demo/test program for the DS18B20 driver.
See https://github.com/sensemakersamsterdam/astroplant_explorer
"""
#
# (c) Sensemakersams.org and others. See https://github.com/sensemakersamsterdam/astroplant_explorer
# Author: Ted van der Togt
#
# Warning: if import of ae_* modules fails, then you need to set up PYTHONPATH.
# To test start python, import sys and type sys.path. The ae module directory
# should be included.
from time import sleep
from ae_drivers.ds18b20 import AE_DS18B20
ds18b20_1 = AE_DS18B20('ds18b20_1', '(Water)Temperature')
ds18b20_1.setup()
print('DS18B20 demo. ds18b20_1 prints as:', ds18b20_1)
print('and its description is:', ds18b20_1.description)
for _ in range(10):
print('Water temperature= %s Celsius' % ds18b20_1.value())
sleep(1)
|
#!/usr/bin/python3
#Copyright 2018 Nicolas Bonnand
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import (QFileDialog,QInputDialog,QAbstractScrollArea,QAbstractItemView,QDockWidget,QCompleter,QItemDelegate,QHeaderView,QDesktopWidget,QTreeView,QSizePolicy,QTableView)
from PyQt5.QtCore import (QTimer,QStringListModel,QSettings, QPoint, QSize, QVariant, QSortFilterProxyModel)
from PyQt5.QtGui import (QStandardItemModel,QStandardItem,QPixmap,QPalette)
from PyQt5.QtCore import QT_VERSION_STR
from PyQt5.Qt import PYQT_VERSION_STR
from sip import SIP_VERSION_STR
from PyQt5.QtCore import pyqtRemoveInputHook
import random
import sys
import time
import os
from os.path import expanduser
from pathlib import Path
from lxml import etree
import subprocess
import re
import tempfile
import inspect # for debugging
import json
import paramiko
import socket
import tarfile
import io
import platform
#for debugging only ( remove it)
from pprint import pprint
from pdb import set_trace
|
def moduluo(dividend, divisor):
return dividend%divisor == 0
for i in range(1, 101):
output = ''
if moduluo(i, 3):
output += 'Fizz'
if moduluo(i, 5):
output += 'Buzz'
if i != 100:
print(f"{i}," if output == '' else f"{output},", end=" ")
else:
print(output)
|
import os
def breakpoint():
import rpdb2
os.system("winpdb -r &")
rpdb2.start_embedded_debugger("password")
|
from django.shortcuts import render, reverse
from django.core.paginator import Paginator
from django.http import HttpResponseRedirect
from django.views import View
from django.contrib.auth import logout
from django.contrib.auth.password_validation import validate_password
from django.contrib.auth import get_user_model
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic.base import RedirectView
from django.views.generic import TemplateView
from django.contrib import messages
from django.http import JsonResponse
from django.template.loader import render_to_string
from random import randint
from .models import Beaver, ResetPasswordModel
from .forms import BeaverForm, UpdateForm, PasswordForm
from .constants import AuthConstants
from .auth_forms import UserLoginForm, UserSignUpForm
from .utils import getBeaverInstance
from .tasks import sendEmail
User = get_user_model()
class LandingView(View):
template_name = "loginsignup/landing.html"
def get(self, request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("posts:feed"))
return render(request, self.template_name)
class LoginView(View):
template_name = "loginsignup/loginpage_.html"
form_class = UserLoginForm
def get(self, request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("posts:feed"))
return render(request, self.template_name)
def post(self, request):
userLoginForm = self.form_class(request.POST)
if userLoginForm.login_user(request):
# Use a redirect to the feed page
if Beaver.objects.filter(user=request.user).exists():
next = request.GET.get("next")
if next:
return HttpResponseRedirect(next)
return HttpResponseRedirect(reverse("posts:feed"))
else:
return HttpResponseRedirect(reverse("loginsignup:complete"))
else:
kwargs = {"form": userLoginForm}
return render(request, self.template_name, kwargs)
class SignUpView(View):
template_name = "loginsignup/signup_quiver.html"
form_class = UserSignUpForm
def get(self, request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("posts:create-post"))
return render(request, self.template_name)
def post(self, request):
userSignUpForm = self.form_class(request.POST)
if userSignUpForm.signUpUser(request):
return HttpResponseRedirect(reverse("loginsignup:complete"))
else:
kwargs = {"form": userSignUpForm}
return render(request, self.template_name, kwargs)
# TODO : Refactor this view
class ResetPasswordView(View):
template_name = "loginsignup/reset_password.html"
def get(self, request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse("posts:create-post"))
securityKeyDisplay = False
# Pass the resetpassword page here along with the above variable
return render(
request,
self.template_name,
{"securityKey": securityKeyDisplay, "PasswordKey": False},
)
def post(self, request):
validate = request.POST.get("validate")
if validate not in "True":
# Ask for username ( required )
username = request.POST.get("username")
user = User.objects.filter(username=username)
if not user.exists():
securityKeyDisplay = False
errorMessage = AuthConstants.noUser.value
# Pass the error message to the render function
return render(
request,
self.template_name,
{
"securityKey": securityKeyDisplay,
"PasswordKey": False,
"error": errorMessage,
},
)
resetlink, created = ResetPasswordModel.objects.get_or_create(user=user[0])
if created:
securityCode = randint(100000, 999999) # 6 digit security code
resetlink.securityCode = securityCode
resetlink.save()
message_to_be_sent = f"Verification Code : {resetlink.securityCode}"
# log.error(resetlink.securityCode)
sendEmail.delay(
user.first().email, "Password Update Request", message_to_be_sent
)
messages.info(request, AuthConstants.codeMail.value, fail_silently=True)
return render(
request,
self.template_name,
{"securityKey": True, "PasswordKey": False, "user": username},
)
# Mail this security code to the client
# Pass a flag to this page so that the username entry becomes
# disabled and enable password create field
# If all of them match
else:
username = request.POST.get("user")
user = User.objects.get(username=username)
passwordKey = request.POST.get("passkey")
if passwordKey not in "True":
securityCodeReceived = int(request.POST.get("securityCode"))
check = ResetPasswordModel.validateCode(securityCodeReceived, user)
if check["status"]:
return render(
request,
self.template_name,
{"securityKey": True, "PasswordKey": True, "user": username},
)
else:
errorMessage = check["error"]
return render(
request,
self.template_name,
{
"securityKey": True,
"PasswordKey": False,
"error": errorMessage.value,
"user": username,
},
)
else:
password = request.POST.get("password")
try:
validate_password(password)
except Exception as error:
errorMessage = list(error)[0]
return render(
request,
self.template_name,
{
"securityKey": True,
"PasswordKey": True,
"error": errorMessage,
"user": username,
},
)
user.set_password(password)
user.save()
messages.success(
request, AuthConstants.passwordUpdated.value, fail_silently=True
)
return HttpResponseRedirect(reverse("loginsignup:login"))
class ResendCodeView(RedirectView):
permanent = False
pattern_name = "loginsignup:reset"
def dispatch(self, request, *args, **kwargs):
message = AuthConstants.askUsername.value
messages.success(request, message, fail_silently=True)
return super().dispatch(request, *args, **kwargs)
class LogoutView(RedirectView):
permanent = False
pattern_name = "loginsignup:login"
def dispatch(self, request, *args, **kwargs):
logout(request)
message = AuthConstants.sucessLogout.value
messages.success(request, message, fail_silently=True)
return super().dispatch(request, *args, **kwargs)
class CompleteView(LoginRequiredMixin, View):
form_class = BeaverForm
template_name = "loginsignup/completeprofile.html"
redirect_field_name = "next"
def get(self, request):
return render(request, self.template_name)
def post(self, request):
beaverForm = self.form_class(request.POST, request.FILES)
if beaverForm.checkProfile(request):
return HttpResponseRedirect(reverse("posts:feed"))
else:
kwargs = {"form": beaverForm}
return render(request, self.template_name, kwargs)
class FriendsListView(LoginRequiredMixin, TemplateView):
template_name = "loginsignup/friend_filter.html"
redirect_field_name = "next"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
user = self.request.user
beaver = Beaver.objects.get(user=user)
search = self.request.GET.get("search") or ""
friends = beaver.friends.filter(user__username__icontains=search)
friend_list = []
for friend in friends:
friend_list.append(
{
"name": friend.user.username,
"profile_photo": friend.profile_photo,
"bio": friend.bio,
}
)
paginator = Paginator(friend_list, 10) # Show 10 contacts per page.
page_number = self.request.GET.get("page")
page_obj = paginator.get_page(page_number)
context["page_obj"] = page_obj
return context
def filter_friends(request):
if request.is_ajax():
user = request.user
if user is None:
return JsonResponse({})
beaver = Beaver.objects.get(user=user)
search = request.GET.get("search") or ""
friend_list = []
friends = beaver.friends.filter(user__username__icontains=search)
for friend in friends:
friend_list.append(
{
"name": friend.user.username,
"profile_photo": friend.profile_photo,
"bio": friend.bio,
}
)
paginator = Paginator(friend_list, 10) # Show 10 contacts per page.
page_number = request.GET.get("page")
page_obj = paginator.get_page(page_number)
data = render_to_string(
template_name="loginsignup/friend_filter_partial.html",
context={"page_obj": page_obj},
)
return JsonResponse(data, safe=False)
def unfriend(request):
if request.is_ajax():
user = request.user
if user is None:
return JsonResponse({})
beaver = Beaver.objects.get(user=user)
username = request.GET.get("username")
if username:
user = User.objects.get(username=username)
Beaver.remove_friend(user, beaver)
friend_list = []
friends = beaver.friends.all()
for friend in friends:
friend_list.append(
{
"name": friend.user.username,
"profile_photo": friend.profile_photo,
"bio": friend.bio,
}
)
paginator = Paginator(friend_list, 10) # Show 10 contacts per page.
page_number = request.GET.get("page")
page_obj = paginator.get_page(page_number)
data = render_to_string(
template_name="loginsignup/friend_filter_partial.html",
context={"page_obj": page_obj},
)
return JsonResponse(data, safe=False)
class BeaverListView(LoginRequiredMixin, TemplateView):
template_name = "loginsignup/discover_filter.html"
redirect_field_name = "next"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
search = self.request.GET.get("search") or ""
friends = Beaver.objects.filter(user__username__icontains=search)
friend_list = []
for friend in friends:
friend_list.append(
{
"name": friend.user.username,
"profile_photo": friend.profile_photo,
"bio": friend.bio,
}
)
paginator = Paginator(friend_list, 10) # Show 10 contacts per page.
page_number = self.request.GET.get("page")
page_obj = paginator.get_page(page_number)
context["page_obj"] = page_obj
return context
def beaver_filter(request):
if request.is_ajax():
user = request.user
if user is None:
return JsonResponse({})
search = request.GET.get("search") or ""
friend_list = []
friends = Beaver.objects.filter(user__username__icontains=search)
for friend in friends:
friend_list.append(
{
"name": friend.user.username,
"profile_photo": friend.profile_photo,
"bio": friend.bio,
}
)
paginator = Paginator(friend_list, 10) # Show 10 contacts per page.
page_number = request.GET.get("page")
page_obj = paginator.get_page(page_number)
data = render_to_string(
template_name="loginsignup/discover_filter_partial.html",
context={"page_obj": page_obj},
)
return JsonResponse(data, safe=False)
class UpdateProfileView(LoginRequiredMixin, View):
template_name = "loginsignup/updateprofile.html"
redirect_field_name = "next"
def get(self, request):
user = request.user
beaver = getBeaverInstance(request)
kwargs = {
"user": user,
"beaver": beaver,
}
return render(request, self.template_name, kwargs)
def post(self, request):
updateForm = UpdateForm(request.POST, request.FILES)
if updateForm.update(request):
message = "Profile updated successfully"
messages.success(request, message, fail_silently=True)
return HttpResponseRedirect(reverse("personal"))
user = request.user
beaver = getBeaverInstance(request)
kwargs = {"form": updateForm, "user": user, "beaver": beaver}
return render(request, self.template_name, kwargs)
class UpdatePasswordView(LoginRequiredMixin, View):
template_name = "loginsignup/update_password.html"
redirect_field_name = "next"
form_class = PasswordForm
def get(self, request):
return render(request, self.template_name)
def post(self, request):
passwordForm = self.form_class(request.POST)
if passwordForm.updatePassword(request):
message = "Password updated successfully"
messages.success(request, message, fail_silently=True)
return HttpResponseRedirect(reverse("personal"))
else:
kwargs = {"form": passwordForm}
return render(request, self.template_name, kwargs)
|
import json
import os
import threading
import time
from queue import Queue
from flask import Flask
app = Flask(__name__)
tasks = {}
@app.route('/deploy/<name>/', methods=['GET', 'POST'])
def run_script(name):
try:
config_file_path = None
for file_path in ['/etc/hook.json', './hook.json']:
if os.path.exists(file_path):
config_file_path = file_path
break
with open(config_file_path, 'r') as f:
hook_config = json.load(f)
if name in hook_config and 'cmd' in hook_config[name]:
tasks.setdefault(name, Queue()).put(hook_config[name]['cmd'])
except Exception as e:
print(e)
return name
def task_consumer():
while True:
time.sleep(2)
try:
for q in tasks.values():
if not q.empty():
cmd = q.get()
print(cmd)
os.system(cmd)
except Exception as e:
print(e)
if __name__ == '__main__':
t = threading.Thread(target=task_consumer)
t.daemon = True
t.start()
app.run(host='0.0.0.0', debug=True)
|
"""
Bombs away
Ported from BASIC to Python3 by Bernard Cooke (bernardcooke53)
Tested with Python 3.8.10, formatted with Black and type checked with mypy.
"""
import random
from typing import Iterable
def _stdin_choice(prompt: str, *, choices: Iterable[str]) -> str:
ret = input(prompt)
while ret not in choices:
print("TRY AGAIN...")
ret = input(prompt)
return ret
def player_survived() -> None:
print("YOU MADE IT THROUGH TREMENDOUS FLAK!!")
def player_death() -> None:
print("* * * * BOOM * * * *")
print("YOU HAVE BEEN SHOT DOWN.....")
print("DEARLY BELOVED, WE ARE GATHERED HERE TODAY TO PAY OUR")
print("LAST TRIBUTE...")
def mission_success() -> None:
print(f"DIRECT HIT!!!! {int(100 * random.random())} KILLED.")
print("MISSION SUCCESSFUL.")
def death_with_chance(p_death: float) -> bool:
"""
Takes a float between 0 and 1 and returns a boolean
if the player has survived (based on random chance)
Returns True if death, False if survived
"""
return p_death > random.random()
def commence_non_kamikazi_attack() -> None:
while True:
try:
nmissions = int(input("HOW MANY MISSIONS HAVE YOU FLOWN? "))
while nmissions >= 160:
print("MISSIONS, NOT MILES...")
print("150 MISSIONS IS HIGH EVEN FOR OLD-TIMERS")
nmissions = int(input("NOW THEN, HOW MANY MISSIONS HAVE YOU FLOWN? "))
break
except ValueError:
# In the BASIC implementation this
# wasn't accounted for
print("TRY AGAIN...")
continue
if nmissions >= 100:
print("THAT'S PUSHING THE ODDS!")
if nmissions < 25:
print("FRESH OUT OF TRAINING, EH?")
print()
return (
mission_success() if nmissions >= 160 * random.random() else mission_failure()
)
def mission_failure() -> None:
weapons_choices = {
"1": "GUNS",
"2": "MISSILES",
"3": "BOTH",
}
print(f"MISSED TARGET BY {int(2 + 30 * random.random())} MILES!")
print("NOW YOU'RE REALLY IN FOR IT !!")
print()
enemy_weapons = _stdin_choice(
prompt="DOES THE ENEMY HAVE GUNS(1), MISSILES(2), OR BOTH(3)? ",
choices=weapons_choices,
)
# If there are no gunners (i.e. weapon choice 2) then
# we say that the gunners have 0 accuracy for the purposes
# of calculating probability of player death
enemy_gunner_accuracy = 0.0
if enemy_weapons != "2":
# If the enemy has guns, how accurate are the gunners?
while True:
try:
enemy_gunner_accuracy = float(
input("WHAT'S THE PERCENT HIT RATE OF ENEMY GUNNERS (10 TO 50)? ")
)
break
except ValueError:
# In the BASIC implementation this
# wasn't accounted for
print("TRY AGAIN...")
continue
if enemy_gunner_accuracy < 10:
print("YOU LIE, BUT YOU'LL PAY...")
return player_death()
missile_threat_weighting = 0 if enemy_weapons == "1" else 35
death = death_with_chance(
p_death=(enemy_gunner_accuracy + missile_threat_weighting) / 100
)
return player_survived() if not death else player_death()
def play_italy() -> None:
targets_to_messages = {
# 1 - ALBANIA, 2 - GREECE, 3 - NORTH AFRICA
"1": "SHOULD BE EASY -- YOU'RE FLYING A NAZI-MADE PLANE.",
"2": "BE CAREFUL!!!",
"3": "YOU'RE GOING FOR THE OIL, EH?",
}
target = _stdin_choice(
prompt="YOUR TARGET -- ALBANIA(1), GREECE(2), NORTH AFRICA(3)",
choices=targets_to_messages,
)
print(targets_to_messages[target])
return commence_non_kamikazi_attack()
def play_allies() -> None:
aircraft_to_message = {
"1": "YOU'VE GOT 2 TONS OF BOMBS FLYING FOR PLOESTI.",
"2": "YOU'RE DUMPING THE A-BOMB ON HIROSHIMA.",
"3": "YOU'RE CHASING THE BISMARK IN THE NORTH SEA.",
"4": "YOU'RE BUSTING A GERMAN HEAVY WATER PLANT IN THE RUHR.",
}
aircraft = _stdin_choice(
prompt="AIRCRAFT -- LIBERATOR(1), B-29(2), B-17(3), LANCASTER(4): ",
choices=aircraft_to_message,
)
print(aircraft_to_message[aircraft])
return commence_non_kamikazi_attack()
def play_japan() -> None:
print("YOU'RE FLYING A KAMIKAZE MISSION OVER THE USS LEXINGTON.")
first_mission = input("YOUR FIRST KAMIKAZE MISSION? (Y OR N): ")
if first_mission.lower() == "n":
return player_death()
return mission_success() if random.random() > 0.65 else player_death()
def play_germany() -> None:
targets_to_messages = {
# 1 - RUSSIA, 2 - ENGLAND, 3 - FRANCE
"1": "YOU'RE NEARING STALINGRAD.",
"2": "NEARING LONDON. BE CAREFUL, THEY'VE GOT RADAR.",
"3": "NEARING VERSAILLES. DUCK SOUP. THEY'RE NEARLY DEFENSELESS.",
}
target = _stdin_choice(
prompt="A NAZI, EH? OH WELL. ARE YOU GOING FOR RUSSIA(1),\nENGLAND(2), OR FRANCE(3)? ",
choices=targets_to_messages,
)
print(targets_to_messages[target])
return commence_non_kamikazi_attack()
def play_game() -> None:
print("YOU ARE A PILOT IN A WORLD WAR II BOMBER.")
sides = {"1": play_italy, "2": play_allies, "3": play_japan, "4": play_germany}
side = _stdin_choice(
prompt="WHAT SIDE -- ITALY(1), ALLIES(2), JAPAN(3), GERMANY(4): ", choices=sides
)
return sides[side]()
if __name__ == "__main__":
again = True
while again:
play_game()
again = input("ANOTHER MISSION? (Y OR N): ").upper() == "Y"
|
import unittest, os
import nbformat as nbf
import sys, os
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + '/../journal_migration/')
sys.path.insert(0, myPath + '/../util/')
from journal_markdown import journalMarkdown
class testJournalMarkdown(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_generate_new_text(self):
input_keys = ['jm', 'jm2', 'jm3', 'jm4']
input_dict = {
'jm': journalMarkdown(0, "++ test\n", 'heading2', "./tmp/"),
'jm2': journalMarkdown(0, "*test*", 'bold', "./tmp/"),
'jm3': journalMarkdown(0, "* test", 'bullet', "./tmp/"),
'jm4': journalMarkdown(0, "# test\n# test2\n# test3", 'number', "./tmp/"),
'jm5': journalMarkdown(0, "+ test", 'heading1', "./tmp/"),
'jm6': journalMarkdown(0, "+++ test", 'heading3', "./tmp/"),
'jm7': journalMarkdown(0, "++++ test", 'heading4', "./tmp/"),
'jm8': journalMarkdown(0, "<<*test*>>", 'no_format', "./tmp/"),
}
output = {}
xpctdout = {
'jm': '## test',
'jm2': '*test*',
'jm3': '- test',
'jm4': '1. test\n2. test2\n3. test3',
'jm5': '# test',
'jm6': '### test',
'jm7': '#### test',
'jm8': 'test'
}
for i in input_keys:
output[i] = input_dict[i].get_new_text()
for key in input_keys:
self.assertEqual(xpctdout[key], output[key], 'not equal')
def test_convert_heading1(self):
input = [
'+ test\n',
'+ test/test2/test3\n',
'+ test the testing testers\n'
]
output = []
xpctdout = [
'# test',
'# test/test2/test3',
'# test the testing testers'
]
for i in input:
output.append(journalMarkdown(0, i, 'heading1', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_convert_heading2(self):
input = [
'++ test\n',
'++ test/test2/test3\n',
'++ test the testing testers\n'
]
output = []
xpctdout = [
'## test',
'## test/test2/test3',
'## test the testing testers'
]
for i in input:
output.append(journalMarkdown(0, i, 'heading2', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_convert_heading3(self):
input = [
'+++ test\n',
'+++ test/test2/test3\n',
'+++ test the testing testers\n'
]
output = []
xpctdout = [
'### test',
'### test/test2/test3',
'### test the testing testers'
]
for i in input:
output.append(journalMarkdown(0, i, 'heading3', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_convert_heading4(self):
input = [
'++++ test\n',
'++++ test/test2/test3\n',
'++++ test the testing testers\n'
]
output = []
xpctdout = [
'#### test',
'#### test/test2/test3',
'#### test the testing testers'
]
for i in input:
output.append(journalMarkdown(0, i, 'heading4', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_convert_bold_text(self):
input = [
'*test*',
'*test/test2/test3*',
'*test the testing testers*'
]
output = []
xpctdout = [
'*test*',
'*test/test2/test3*',
'*test the testing testers*'
]
for i in input:
output.append(journalMarkdown(0, i, 'bold', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_convert_bulleted_text(self):
input = [
'* test',
'* test/test2/test3',
'* test the testing testers'
]
output = []
xpctdout = [
'- test',
'- test/test2/test3',
'- test the testing testers'
]
for i in input:
output.append(journalMarkdown(0, i, 'bullet', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_convert_numbered_text(self):
input = [
' # test\n',
' # test\n # test2\n # test3\n',
' # test\n # the\n # testing\n # testers\n # test2\n'
]
output = []
xpctdout = [
' 1. test\n',
' 1. test\n 2. test2\n 3. test3\n',
' 1. test\n 1. the\n 2. testing\n 3. testers\n 2. test2\n'
]
for i in input:
output.append(journalMarkdown(0, i, 'number', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
def test_find_col_count(self):
input = [
' #',
' #',
' #',
' #',
'\t\t\t#',
'\t #'
]
output = []
xpctdout = [
1,
2,
3,
4,
3,
1
]
for i in input:
output.append(journalMarkdown(0, i, 'bullet', "./tmp/").find_col_count(i, 4))
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], str(ndx) + ' not equal')
def test_convert_no_formatted_text(self):
input = [
'<test>',
'<test/test2/test3>',
'<test the testing testers>'
]
output = []
xpctdout = [
'test',
'test/test2/test3',
'test the testing testers'
]
for i in input:
output.append(journalMarkdown(0, i, 'no_format', "./tmp/").get_new_text())
for ndx in range(len(xpctdout)):
self.assertEqual(xpctdout[ndx], output[ndx], 'not equal')
if __name__ == '__main__':
unittest.main()
|
import os
import requests
# configuration, set DOMAIN and NAMECHEAP_DDNS_PW as env vars
HOST = '@'
DOMAIN = os.environ['DOMAIN']
PW = os.environ['NAMECHEAP_DDNS_PW']
ENDPOINT = 'https://dynamicdns.park-your-domain.com/update?'
# get the IP address of the system
def get_ip():
return requests.get('https://dynamicdns.park-your-domain.com/getip').text
# update Namecheap with your current IP address
def main():
payload = {'host': HOST, 'domain': DOMAIN, 'password': PW, 'ip': get_ip()}
r = requests.get(ENDPOINT, params=payload)
print(r.url)
print(r.status_code)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-08 04:56
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('instagram', '0002_auto_20171128_2207'),
]
operations = [
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.CharField(blank=True, max_length=140)),
],
),
migrations.AddField(
model_name='post',
name='downvote_count',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='post',
name='upvote_count',
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name='review',
name='photos',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vote', to='instagram.Post'),
),
migrations.AddField(
model_name='review',
name='users',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='vote', to=settings.AUTH_USER_MODEL),
),
]
|
{
'targets': [
{
'target_name': 'addon',
'sources': [
'../../tmp/linear_model.cc',
'../../tmp/particle.cc',
'../../tmp/fastslam.cc',
'addon.cc'
],
'include_dirs': [
'src',
'.',
'/opt/intel/compilers_and_libraries/linux/mkl/include/',
'/usr/include/hdf5/serial'
],
"link_settings": {
"libraries": [
"-lmkl_core",
"-lmkl_def",
"-lmkl_intel_thread",
"-lmkl_core",
"-liomp5",
"-larmadillo",
"-lm",
"-ldl",
"-lhdf5_cpp",
"-lhdf5"
],
"ldflags": [
"-L.",
"-L/app",
"-L/usr/local/hdf5/lib",
"-L./build/Release"
"-L./build/Release/obj.target",
"-L/opt/intel/compilers_and_libraries/linux/mkl/lib/intel64_lin",
"-L/opt/intel/compilers_and_libraries/linux/lib/intel64",
"-L/usr/lib/x86_64-linux-gnu/hdf5/serial",
"-Wl,-rpath=./build/Release",
"-Wl,-rpath=/app",
"-Wl,-rpath,@loader_path",
"-Wl,-rpath=/usr/local/lib64",
"-Wl,-rpath=/usr/local/lib",
"-Wl,-rpath=/usr/lib",
"-Wl,-rpath=/opt/intel/compilers_and_libraries/linux/mkl/lib/intel64_lin",
"-Wl,-rpath=/opt/intel/compilers_and_libraries/linux/lib/intel64"
]
},
'configurations': {
'Release': {
'cflags_cc': [
'-O3',
'-std=c++17',
'-m64'
],
'cflags_cc!': [
'-fno-rtti',
'-pthread',
'-fno-omit-frame-pointer',
'-fno-exceptions',
'-std=gnu++1y',
]
}
}
}
]
}
|
""""
Script for preprocessing and train-test splitting the white wine dataset.
Usage:
preprocess.py <data_folder> <raw_data_file>
Options:
"""
from docopt import docopt
from sklearn.model_selection import train_test_split, cross_val_score, cross_validate
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import make_pipeline
from sklearn.compose import ColumnTransformer
import pandas as pd
import os
import random
def preprocess_data(data_folder, raw_data_file):
"""Function for preprocessing the white wine quality dataset
Parameters:
data_folder (str): data directory
raw_data_file (str): raw data file
Returns:
train_df (pandas.core.frame.DataFrame): training data
test_df (pandas.core.frame.DataFrame): training data
Example:
train_df, test_df = preprocess_data('data', 'raw_data.csv')
"""
random.seed(2020)
if not type(data_folder) == str:
raise ValueError("data_folder argument should be passed as str.")
if not type(raw_data_file) == str:
raise ValueError("raw_data_file argument should be passed as str.")
# Read raw data from data folder:
raw_data = pd.read_csv(os.path.join(data_folder, raw_data_file), index_col=0)
numeric_features = [
"fixed acidity",
"volatile acidity",
"citric acid",
"residual sugar",
"chlorides",
"free sulfur dioxide",
"total sulfur dioxide",
"density",
"pH",
"sulphates",
"alcohol",
]
# Set up preprocessing pipeline:
numeric_transformer = make_pipeline(StandardScaler())
preprocessor = ColumnTransformer(
transformers=[("num", numeric_transformer, numeric_features),],
remainder="passthrough",
) # quality should passthrough pipeline since it is the target
# Keep 25% of data for testing and split data into X and y:
train_df, test_df = train_test_split(raw_data, test_size=0.25, random_state=123)
# Apply the preprocessor to the data:
columns = [
"fixed acidity",
"volatile acidity",
"citric acid",
"residual sugar",
"chlorides",
"free sulfur dioxide",
"total sulfur dioxide",
"density",
"pH",
"sulphates",
"alcohol",
"quality",
]
train_df = pd.DataFrame(preprocessor.fit_transform(train_df), columns=columns)
test_df = pd.DataFrame(preprocessor.transform(test_df), columns=columns)
return train_df, test_df
if __name__ == "__main__":
args = docopt(__doc__)
train_df, test_df = preprocess_data(args["<data_folder>"], args["<raw_data_file>"])
train_df.to_feather(
os.path.join(args["<data_folder>"], "train_df.feather"),
compression="uncompressed",
)
test_df.to_feather(
os.path.join(args["<data_folder>"], "test_df.feather"),
compression="uncompressed",
)
|
number = ["1", "2", "3", "4", "5", "6","7", "8", "9", "10"]; # inputing datas to number
print (number[0]); # print the 0 value from array output : 1
print (number[1]); # print the 1 value from array output : 2
print (number[2]); # print the 2 value from array output : 3
print (number[3]); # print the 3 value from array output : 4
print (number[4]); # print the 4 value from array output : 5
print (number[5]); # print the 5 value from array output : 6
print (number[6]); # print the 6 value from array output : 7
print (number[7]); # print the 7 value from array output : 8
print (number[8]); # print the 8 value from array output : 9
print (number[9]); # print the 9 value from array output : 10
|
from ...abc import Expression
class CONTAINS(Expression):
def __init__(self, app, *, arg_what, arg_substring):
super().__init__(app)
self.Value = arg_what
self.Substring = arg_substring
def __call__(self, context, event, *args, **kwargs):
value = self.evaluate(self.Value, context, event, *args, **kwargs)
substr = self.evaluate(self.Substring, context, event, *args, **kwargs)
return value.contains(substr)
|
# https://leetcode.com/problems/search-in-rotated-sorted-array/
from typing import List
import unittest
MINIMUM_ARRAY_LENGTH = 1
MAXIMUM_ARRAY_LENGTH = 5 * 10**3
MINIMUM_NUMBER_VALUE = -10**4
MAXIMUM_NUMBER_VALUE = 10**4
class SolutionValidator(object):
def validate_array_length(self, array_len: int) -> None:
if MINIMUM_ARRAY_LENGTH > array_len or array_len > MAXIMUM_ARRAY_LENGTH:
raise ValueError('Invalid array length')
def validate_number_value(self, number: int) -> None:
if MINIMUM_NUMBER_VALUE > number or number > MAXIMUM_NUMBER_VALUE:
raise ValueError('Invalid number value')
def validate_unique_numbers(self, len_array: int, len_set: int) -> None:
if len_array != len_set:
raise ValueError('The array numbers are not unique')
class Solution:
def __init__(self):
""" There is an integer array nums sorted in ascending order (with distinct values).
Prior to being passed to your function, nums is possibly rotated at an unknown
pivot index k (1 <= k < nums.length) such that the resulting array is
[nums[k], nums[k+1], ..., nums[n-1], nums[0], nums[1], ..., nums[k-1]] (0-indexed).
For example, [0,1,2,4,5,6,7] might be rotated at pivot index 3 and become [4,5,6,7,0,1,2].
Given the array nums after the possible rotation and an integer target, return the index
of target if it is in nums, or -1 if it is not in nums.
You must write an algorithm with O(log n) runtime complexity.
"""
self.validator = SolutionValidator()
def search(self, nums: List[int], target: int) -> int:
# Basic validations
len_nums = len(nums)
self.validator.validate_array_length(len_nums)
self.validator.validate_unique_numbers(len_nums, len(set(nums)))
self.validator.validate_number_value(target)
result = -1 # target not in array
end = len_nums -1
for start in range(len_nums//2 + 1):
self.validator.validate_number_value(nums[start])
if nums[start] == target:
return start
self.validator.validate_number_value(nums[end - start])
if nums[end - start] == target:
return end - start
return result
def main():
tc = unittest.TestCase()
sol = Solution()
print('Example 1')
tc.assertEqual(sol.search([4,5,6,7,0,1,2], 0), 4)
print('Example 2')
tc.assertEqual(sol.search([4,5,6,7,0,1,2], 3), -1)
print('Example 3')
tc.assertEqual(sol.search([1], 0), -1)
print('Example 4')
tc.assertEqual(sol.search([9], 0), -1)
print('Example 5')
tc.assertEqual(sol.search([94,95,96,97,98,99,0,1,2,3,4,5,6,7,8,9,10], 5), 11)
if __name__ == "__main__":
main()
|
import cv2
import numpy as np
#load colored image
img_1 = cv2.imread("Images\\sunflower.png", 1)
#load grayscale image
img_2 = cv2.imread("Images\\sunflower.png", 0)
#resizing images
resized_img_1 = cv2.resize(img_1, (int(img_1.shape[1]/2), int(img_1.shape[0]/2)))
#printing images' shape(dimension)
print(img_1.shape)
print(img_2.shape)
#displaying the loaded images
cv2.imshow("Colored Image", img_1)
cv2.imshow("Grayscale Image", img_2)
cv2.imshow("Resized Image", resized_img_1)
cv2.waitKey(0)
#cv2.waitKey(2000)
cv2.destroyAllWindows()
|
import contact_angle as cnt
import mdtraj as md
import numpy as np
from contact_angle.utils.general import get_fn
def test_flipped():
"""Same trajectory, but rotated around x-axis - should give same contact angle
"""
traj = md.load(get_fn('chol-tail-original.dcd'),
top=get_fn('chol-wetting.hoomdxml'))
ca = cnt.calc_contact_angle(traj.xyz[:, 14400:]*6, guess_R=4.0, guess_z0=1.8,
guess_rho_n=1.0, left_tol=0.1, z_range=(-0.1, 9), surface_normal='z',
n_bins=100, fit_range=(2, 4.0), droplet_location='above')
traj = md.load(get_fn('chol-tail-rotated.dcd'),
top=get_fn('chol-wetting.hoomdxml'))
ca2 = cnt.calc_contact_angle(traj.xyz[:, 14400:]*6, guess_R=4.0, guess_z0=1.8,
guess_rho_n=1.0, left_tol=0.1, z_range=(-7.0, 0.3), surface_normal='z',
n_bins=100, fit_range=(-6.0, -1.0), droplet_location='below')
assert(np.absolute(ca['theta'] - ca2['theta']) < 5.0)
|
# *****************************************************************************
# Copyright (c) 2019, Intel Corporation All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *****************************************************************************
"""
Performance tests runner runs performance tests implemented as class with attributes:
params (list of lists): values of test parameters
param_names (list): names of test parameters
def setup(): method which runs before each test case
def time_*(): test case
def teardown(): method which runs after each test case
E.g:
class Methods:
params = [
[5000513],
[1, 3, 5, 9, 17, 33],
['interpreted_python', 'compiled_python']
]
param_names = ['size', 'nchars', 'implementation']
def setup(self, size, nchars, implementation):
self.series = StringSeriesGenerator(size=size, nchars=nchars).generate()
@staticmethod
@hpat.jit
def _len(series):
return series.str.len()
def time_len(self, size, nchars, implementation):
if implementation == Impl.compiled_python.value:
return self._len(self.series)
if implementation == Impl.interpreted_python.value:
return self.series.str.len()
Example usages:
1. Run all:
python runner.py
1. Run tests/strings.py:
python runner.py --bench tests.strings
"""
import argparse
import inspect
import itertools
import json
import logging
import pkgutil
import platform
import statistics
import subprocess
import tempfile
from collections import defaultdict, OrderedDict
from importlib import import_module
from pathlib import Path
from tests_perf.benchmark import BenchmarksType, TimeBenchmark
EXECUTABLE = 'python'
SCRIPT = 'benchmark.py'
def setup_logging():
"""Setup logger"""
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logger = logging.getLogger(__name__)
logger.setLevel(level=logging.INFO)
logger.addHandler(stream_handler)
return logger
def discover_modules(mnodule_name):
"""
Recursively import a module and all sub-modules in the module
:param mnodule_name: module name
:return: modules from the package
"""
module = import_module(mnodule_name)
yield module
if getattr(module, '__path__', None):
for _, name, _ in pkgutil.iter_modules(module.__path__, f'{mnodule_name}.'):
yield from discover_modules(name)
def discover_benchmarks(module_name, type_=BenchmarksType.TIME.value, repeat=10, number=1):
"""
Discover benchmarks in the module
:param module_name: benchmarks module
:param type_: benchmark type
:return: time benchmarks
"""
for module in discover_modules(module_name):
for attr_name, module_attr in module.__dict__.items():
if attr_name.startswith('_'):
# skip attributes which start with underscore
continue
if inspect.isclass(module_attr):
for name, class_attr in inspect.getmembers(module_attr):
if not name.startswith(f'{type_}_'):
continue
name_parts = module.__name__.split('.', 1)[1:] + [module_attr.__name__, name]
benchmark_name = '.'.join(name_parts)
func = inspect.getattr_static(module_attr, name)
params = inspect.getattr_static(module_attr, 'params', [[]])
for param in itertools.product(*params):
yield TimeBenchmark(benchmark_name, func, param, module_attr, repeat=repeat, number=number)
def run_benchmark(benchmark):
"""
Run specified benchmark in separate process
:param benchmark: benchmark object
:param env_name: Conda environment name
:param executable: Executable
:return: samples of the run
"""
logger = logging.getLogger(__name__)
bench_file_name = benchmark.name.replace('.', '_')
with tempfile.TemporaryDirectory() as temp_dir:
bench_pickle = Path(temp_dir) / f'{bench_file_name}.pickle'
benchmark.to_pickle(bench_pickle)
samples_json = Path(temp_dir) / f'{bench_file_name}.json'
cmd = [EXECUTABLE, SCRIPT, '--bench-pickle', str(bench_pickle), '--res-json', str(samples_json)]
logger.info('Running "%s"', subprocess.list2cmdline(cmd))
subprocess.run(cmd, check=True, shell=True)
with samples_json.open(encoding='utf-8') as fd:
return json.load(fd)
def compute_stats(samples):
"""Statistical analysis of the samples"""
return {
'min': min(samples),
'max': max(samples),
'mean': statistics.mean(samples),
'std': statistics.stdev(samples)
}
def dump_results(results, file_path):
"""Dump benchmarking results to json-file"""
file_path.parent.mkdir(parents=True, exist_ok=True)
with file_path.open('w', encoding='utf-8') as fd:
json.dump(results, fd)
def parse_args():
"""Parse command line arguments"""
parser = argparse.ArgumentParser()
parser.add_argument('--bench', default='tests', help='Module with performance tests')
parser.add_argument('--number', default=1, type=int, help='Repeat count')
parser.add_argument('--repeat', default=10, type=int, help='Number of executions')
parser.add_argument('--results-dir', default='../build/tests_perf', type=Path,
help='Path to directory with benchmarking results')
return parser.parse_args()
def main():
args = parse_args()
logger = setup_logging()
results = defaultdict(list)
logger.info('Running benchmarks in "%s"...', args.bench)
for benchmark in discover_benchmarks(args.bench, repeat=args.repeat, number=args.number):
samples = run_benchmark(benchmark)
results[benchmark.name].append(
{'result': statistics.median(samples), 'stats': compute_stats(samples), 'params': benchmark.param}
)
logger.info('%s%s: %ss', benchmark.name, benchmark.param, round(statistics.median(samples), 5))
formatted_results = {}
for name, res in results.items():
formatted_results[name] = {
'result': [r['result'] for r in res],
'stats': [r['stats'] for r in res],
'params': [list(OrderedDict.fromkeys(y)) for y in zip(*[r['params'] for r in res])],
}
data = {'results': formatted_results}
results_json = args.results_dir / platform.node() / 'results.json'
dump_results(data, results_json)
logger.info('Results dumped to "%s"', results_json)
if __name__ == '__main__':
main()
|
from pydantic import EmailStr, SecretStr
from server.domain.common.types import ID
from server.seedwork.application.commands import Command
class CreateUser(Command[ID]):
email: EmailStr
password: SecretStr
class DeleteUser(Command[None]):
id: ID
class ChangePassword(Command[None]):
email: EmailStr
password: SecretStr
|
from rich.console import Console
from rich.progress import Progress, BarColumn, TextColumn, TimeRemainingColumn
from rich.table import Table
from rich.text import Text
from rich.markdown import Markdown
from sheraf.health.utils import discover_models
from .checks import check_conflict_resolution, check_attributes_index, check_model_index
# Set the list of check functions to be run
INSTANCE_CHECK_FUNCS = {}
ATTRIBUTE_CHECK_FUNCS = {
"index": check_attributes_index,
}
MODEL_CHECK_FUNCS = {"index": check_model_index}
OTHER_CHECK_FUNCS = {"check_conflict_resolution": check_conflict_resolution}
def check_health(
*args,
model_checks=None,
instance_checks=None,
attribute_checks=None,
other_checks=None,
console=None,
):
"""
Takes some modules in parameters.
:param model_checks: If None, check also model consistency (see constant model_check_funcs)
:param instance_checks: If None, check all instance consistency rules. Else, give the list of wanted
checking rules (see constant instance_check_funcs)
:param attribute_checks: If None, check all attribute consistency rules. Else, give the list of wanted
checking rules (see constant instance_check_funcs)
The function will discover models in the modules, analyze every
model instance, and return a health report in JSON. Depending on the
result, you may need to apply some fixes.
"""
models = discover_models(*args)
health_report = {}
instance_checks = instance_checks or []
attribute_checks = attribute_checks or []
model_checks = model_checks or []
other_checks = other_checks or []
if not instance_checks and not attribute_checks and not model_checks:
instance_checks = INSTANCE_CHECK_FUNCS.keys()
attribute_checks = ATTRIBUTE_CHECK_FUNCS.keys()
model_checks = MODEL_CHECK_FUNCS.keys()
other_checks = OTHER_CHECK_FUNCS.keys()
for check_key in other_checks:
health_report[check_key] = OTHER_CHECK_FUNCS[check_key]()
with Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(bar_width=None),
TextColumn("{task.completed}", justify="right"),
TextColumn("/"),
TextColumn("{task.total}"),
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
TimeRemainingColumn(),
console=console,
transient=True,
) as progress:
tasks = {}
for model_path, model in models:
tasks[model] = progress.add_task(model.__name__)
for model_path, model in models:
progress.update(tasks[model], total=model.count())
for model_path, model in models:
progress.start_task(tasks[model])
for check_key in instance_checks:
instance_check_func = INSTANCE_CHECK_FUNCS[check_key]
health_report.setdefault(instance_check_func.__name__, {}).setdefault(
model_path, {"ok": 0, "ko": 0}
)
for check_key in model_checks:
model_check_func = MODEL_CHECK_FUNCS[check_key]
health_report.setdefault(model_check_func.__name__, {})[
model_path
] = model_check_func(model)
# Iterate on instances
for m in model.all():
progress.update(tasks[model], advance=1)
for check_key in instance_checks:
check_func = INSTANCE_CHECK_FUNCS[check_key]
model_instance_result = health_report[check_func.__name__][
model_path
]
if check_func(m):
model_instance_result["ok"] += 1
else:
model_instance_result["ko"] += 1
for check_key in attribute_checks:
check_func = ATTRIBUTE_CHECK_FUNCS[check_key]
health_report.setdefault(check_func.__name__, {})
for attribute_name, bool_value in check_func(m).items():
attribute_result = (
health_report[check_func.__name__]
.setdefault(model_path, {})
.setdefault(attribute_name, {"ok": 0, "ko": 0})
)
if bool_value:
attribute_result["ok"] += 1
else:
attribute_result["ko"] += 1
return health_report
def _print_check_other_health_result(console, check_reason, health_table, help):
table = Table(
show_header=True,
header_style="bold magenta",
title=check_reason,
expand=True,
caption=help,
)
table.add_column("Check")
table.add_column("State")
table.add_row(check_reason, "OK" if health_table[check_reason] else "KO")
console.print(table)
def _print_check_model_health_result(console, check_reason, health_table, help):
table_key = "check_model_" + check_reason
table = Table(
show_header=True,
header_style="bold magenta",
title=table_key,
expand=True,
caption=help,
)
table.add_column("Model")
table.add_column("KO")
table.add_column("OK")
for model_path, attributes in health_table.get(table_key, {}).items():
table.add_row(
model_path,
str(sum(values["ko"] for values in attributes.values())),
str(sum(values["ok"] for values in attributes.values())),
)
for attribute_name, values in attributes.items():
table.add_row(f" - {attribute_name}", str(values["ko"]), str(values["ok"]))
if health_table.get(table_key):
console.print(table)
else:
console.print(" No model to visit.")
def _print_check_instance_health_result(console, check_reason, health_table, help):
"""
:param check_reason: one among model_checks keys
:param health_table: result of a check function
"""
table_key = "check_instance_" + check_reason
table = Table(
show_header=True,
header_style="bold magenta",
title=table_key,
expand=True,
caption=help,
)
table.add_column("Model")
table.add_column("KO")
table.add_column("OK")
for model_path, values in health_table.get(table_key, {}).items():
table.add_row(model_path, str(values["ko"]), str(values["ok"]))
console.print(table)
def _print_check_attribute_health_result(console, check_reason, health_table, help):
table_key = "check_attributes_" + check_reason
table = Table(
show_header=True,
header_style="bold magenta",
title=table_key,
expand=True,
caption=help,
)
table.add_column("Model")
table.add_column("KO")
table.add_column("OK")
for model_path, attributes in health_table.get(table_key, {}).items():
table.add_row(
model_path,
str(sum(values["ko"] for values in attributes.values())),
str(sum(values["ko"] for values in attributes.values())),
)
for attribute_name, values in attributes.items():
table.add_row(f" - {attribute_name}", str(values["ko"]), str(values["ok"]))
if health_table.get(table_key):
console.print(table)
else:
console.print(" No model to visit.")
def print_health(
*args,
model_checks=None,
instance_checks=None,
attribute_checks=None,
other_checks=None,
):
console = Console(width=100)
"""Takes some modules in parameters (e.g. "american._class.cowboy_module").
The function will discover models in the modules, analyze every model instance, and return
a health report in a human readable format. Depending on the result, you may need to apply some fixes.
This function does not edit any data and is safe to be executed in a production shell.
"""
console.print(
Text(
"====== _ ================== __ ==\n"
"= | | / _| =\n"
"= ___| |__ ___ _ __ __ _| |_ =\n"
"= / __| '_ \\ / _ \\ '__/ _` | _| =\n"
"= \\__ \\ | | | __/ | | (_| | | =\n"
"= |___/_| |_|\\___|_| \\__,_|_| =\n"
"==================================",
justify="center",
)
)
instance_checks = instance_checks or []
attribute_checks = attribute_checks or []
model_checks = model_checks or []
other_checks = other_checks or []
if not instance_checks and not attribute_checks and not model_checks:
instance_checks = INSTANCE_CHECK_FUNCS.keys()
attribute_checks = ATTRIBUTE_CHECK_FUNCS.keys()
model_checks = MODEL_CHECK_FUNCS.keys()
other_checks = OTHER_CHECK_FUNCS.keys()
health = check_health(
*args,
model_checks=model_checks,
instance_checks=instance_checks,
attribute_checks=attribute_checks,
other_checks=other_checks,
console=console,
)
for other_check_type in other_checks:
console.print()
_print_check_other_health_result(
console,
other_check_type,
health,
Markdown(OTHER_CHECK_FUNCS[other_check_type].__doc__),
)
console.print()
for model_check_type in model_checks:
console.print()
_print_check_model_health_result(
console,
model_check_type,
health,
Markdown(MODEL_CHECK_FUNCS[model_check_type].__doc__),
)
console.print()
for instance_check_type in instance_checks:
console.print()
_print_check_instance_health_result(
console,
instance_check_type,
health,
Markdown(INSTANCE_CHECK_FUNCS[instance_check_type].__doc__),
)
console.print()
for attribute_check_type in attribute_checks:
console.print()
_print_check_attribute_health_result(
console,
attribute_check_type,
health,
Markdown(ATTRIBUTE_CHECK_FUNCS[attribute_check_type].__doc__),
)
console.print()
|
from datetime import date, datetime
from typing import Optional
import numpy as np
import pandas as pd
from autumn.tools.inputs.database import get_input_db
from autumn.tools.inputs.demography.queries import get_population_by_agegroup
from autumn.tools.utils.utils import check_list_increasing
TINY_NUMBER = 1e-6
def get_btn_testing_numbers(subregion: Optional[str]):
"""
Returns number of tests administered in Bhutan or Thimphu.
"""
subregion = "Bhutan" if subregion is False else subregion
cond_map = {
"region": subregion,
}
input_db = get_input_db()
df = input_db.query(
"covid_btn_test", columns=["date_index", "total_tests"], conditions=cond_map
)
df.dropna(inplace=True)
test_dates = df.date_index.to_numpy()
values = df["total_tests"].to_numpy() + TINY_NUMBER
values = values / 6.5
return test_dates, values
def get_btn_vac_coverage(
region: str,
dose: int,
) -> pd.Series:
"""Calculates the vaccination coverage for Bhutan and Thimphu
Args:
region (str): Can be {"Bhutan"|"Thimphu"}.
dose (int): Can be {1|2|3}.
Returns:
pd.Series: A Pandas series of dates and coverage values
"""
# Get the total population
pop_region = {"Bhutan": None, "Thimphu": "Thimphu"}
population = get_population_by_agegroup([0], "BTN", pop_region[region], 2022)
input_db = get_input_db()
cond_map = {
"dose_num": str(dose),
"region": region,
}
df = input_db.query(
"covid_btn_vac", columns=["date_index", "num"], conditions=cond_map
)
df = df.groupby("date_index", as_index=False).sum()
# Calculate the coverage
df["coverage"] = df["num"] / population
vac_dates = df["date_index"].to_numpy()
vac_coverage = df["coverage"].to_numpy()
coverage_too_large = any(vac_coverage >= 0.99)
not_increasing_coverage = check_list_increasing(vac_coverage)
# Validation
if any([coverage_too_large, not_increasing_coverage]):
AssertionError("Unrealistic coverage")
return pd.Series(vac_coverage, index=vac_dates)
|
import os
class Config:
"""
This is the parent configuration class that contains the main configurations that are to work on the application
"""
NEWS_API = os.environ.get('NEWS_API_KEY')
SECRET_KEY = os.environ.get('SECRET_KEY')
BASE_URL = 'https://newsapi.org/v2/{}?q={}&apiKey={}'
class ProdConfig(Config):
"""
This is the child of config class and contains confugurations to used in production
"""
pass
class DevConfig(Config):
"""
This is a child of config class and contains configurations for the development
"""
DEBUG = True
config_options = {
'development' : DevConfig,
'production' : ProdConfig
}
|
#!/usr/bin/python
# -'''- coding: utf-8 -'''-
import sys, os
from PySide.QtCore import *
from PySide.QtGui import *
class Launcher(QDialog):
nukepressed = Signal()
mayapressed = Signal()
def __init__(self, parent=None):
super(Launcher, self).__init__(parent)
# Define Widgets
# --- BUTTONS ---
self.nukebtn = QPushButton("Nuke")
self.mayabtn = QPushButton("Maya")
# --- LAYOUTS ---
hbox_Nuke = QHBoxLayout()
hbox_Nuke.addStretch(1)
hbox_Nuke.addWidget(self.nukebtn)
hbox_Nuke.addStretch(1)
hbox_Maya = QHBoxLayout()
hbox_Maya.addStretch(1)
hbox_Maya.addWidget(self.mayabtn)
hbox_Maya.addStretch(1)
layout = QVBoxLayout()
layout.addLayout(hbox_Nuke)
layout.addLayout(hbox_Maya)
self.setLayout(layout)
# Define Signals
self.nukebtn.clicked.connect(self.emitnuke)
self.mayabtn.clicked.connect(self.emitmaya)
def emitnuke(self):
self.nukepressed.emit()
def emitmaya(self):
self.mayapressed.emit()
|
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\sims\template_affordance_provider\tunable_affordance_template_discipline.py
# Compiled at: 2017-02-28 00:30:55
# Size of source mod 2**32: 2653 bytes
from interactions.utils.loot_element import LootElement
from sims.template_affordance_provider.tunable_affordance_template_base import TunableAffordanceTemplateBase
from sims4.localization import TunableLocalizedStringFactory
from sims4.tuning.tunable import HasTunableSingletonFactory, AutoFactoryInit, TunableReference, TunableList, TunableVariant
import services
class TunableDisciplineBasicExtras(TunableList):
def __init__(self, **kwargs):
(super().__init__)(description='\n Basic Extras to run at the outcome of this template interaction.\n ',
tunable=TunableVariant(loot=(LootElement.TunableFactory())), **kwargs)
class TunableAffordanceTemplateDiscipline(HasTunableSingletonFactory, AutoFactoryInit, TunableAffordanceTemplateBase):
FACTORY_TUNABLES = {'template_affordance':TunableReference(description='\n The affordance to use as a template.\n ',
manager=services.affordance_manager(),
class_restrictions=('DisciplineTemplateSuperInteraction', 'DisciplineTemplateSocialSuperInteraction'),
pack_safe=True),
'display_name_override':TunableLocalizedStringFactory(description='\n The name to use for this template interaction.\n '),
'outcome_basic_extras':TunableDisciplineBasicExtras()}
def get_template_affordance(self):
return self.template_affordance
def get_template_kwargs(self):
return {'template_display_name':self.display_name_override,
'template_outcome_basic_extras':self.outcome_basic_extras}
|
_=[print("%s: %d points"%T)for T in sorted([(T[0],sum([int(s)for s in T[1].split()]))for R in __import__("sys").stdin for T in[R.split(":")]if all(x.isdigit()for x in T[1].split())],key=lambda x:-x[1])]
|
import unittest
from flaskr.textHandler import TextHandler
class TestTextHandler(unittest.TestCase):
EXPECTED = "français"
def test_remove(self):
result = TextHandler.remove_indicator(TextHandler.HEADER + self.EXPECTED + TextHandler.FOOTER)
self.assertEqual(self.EXPECTED, result)
def test_insert(self):
result = TextHandler.insert_indicator(self.EXPECTED)
self.assertEqual(TextHandler.HEADER + self.EXPECTED + TextHandler.FOOTER, result)
def test_encode(self):
receive_res = TextHandler.encode_msg(self.EXPECTED)
expected_res = [
[0, 1, 1, 0, 0, 1, 1, 0],
[0, 1, 1, 1, 0, 0, 1, 0],
[0, 1, 1, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 1, 1, 0],
[1, 1, 0, 0, 0, 0, 1, 1],
[1, 0, 1, 0, 0, 1, 1, 1],
[0, 1, 1, 0, 0, 0, 0, 1],
[0, 1, 1, 0, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 0, 1, 1]
]
self.assertEqual(expected_res, receive_res)
def test_decode(self):
expected_res = TextHandler.HEADER + self.EXPECTED + TextHandler.FOOTER
received_res = TextHandler.decode_msg(TextHandler.encode_msg(expected_res))
self.assertEqual(expected_res, received_res)
|
from thundra.application.application_info_provider import ApplicationInfoProvider
from thundra.config import config_names
from thundra.config.config_provider import ConfigProvider
class GlobalApplicationInfoProvider(ApplicationInfoProvider):
def __init__(self, application_info_provider=None):
self.application_info = {}
self.application_info_provider = application_info_provider
if self.application_info_provider:
self.application_info = self.application_info_provider.get_application_info()
app_info_from_config = self.get_application_info_from_config()
self.update(app_info_from_config)
def get_application_info(self):
return self.application_info
def get_application_tags(self):
return self.application_info.get('applicationTags', {}).copy()
@staticmethod
def get_application_info_from_config():
return {
'applicationId': ConfigProvider.get(config_names.THUNDRA_APPLICATION_ID),
'applicationInstanceId': ConfigProvider.get(config_names.THUNDRA_APPLICATION_INSTANCE_ID),
'applicationDomainName': ConfigProvider.get(config_names.THUNDRA_APPLICATION_DOMAIN_NAME),
'applicationClassName': ConfigProvider.get(config_names.THUNDRA_APPLICATION_CLASS_NAME),
'applicationName': ConfigProvider.get(config_names.THUNDRA_APPLICATION_NAME),
'applicationVersion': ConfigProvider.get(config_names.THUNDRA_APPLICATION_VERSION, ''),
'applicationStage': ConfigProvider.get(config_names.THUNDRA_APPLICATION_STAGE, ''),
'applicationRegion': ConfigProvider.get(config_names.THUNDRA_APPLICATION_REGION, ''),
'applicationRuntime': ApplicationInfoProvider.APPLICATION_RUNTIME,
'applicationRuntimeVersion': ApplicationInfoProvider.APPLICATION_RUNTIME_VERSION,
'applicationTags': ApplicationInfoProvider.parse_application_tags()
}
def update(self, opts):
filtered_opts = {k: v for k, v in opts.items() if v is not None}
self.application_info.update(filtered_opts)
|
#!/usr/bin/env python
import argparse
import asyncio
import json
import statistics
import time
from datetime import datetime
from typing import Any, Dict
import aiohttp
import uvicorn
from loguru import logger
from starlette.applications import Starlette
from starlette.responses import PlainTextResponse
from starlette.routing import Route
async def startup():
logger.info("The app is starting up")
async def shutdown():
logger.info("The app is shutting down")
msgs: Dict[int, Dict[str, Any]] = {}
dateformat = "%Y-%m-%dT%H:%M:%S.%f"
async def start(request):
eb_url = request.query_params["eb_url"]
concurrent_size = (
int(request.query_params["cc_size"])
if "concurrent_size" in request.query_params
else 30
)
req_num = (
int(request.query_params["req_num"])
if "req_num" in request.query_params
else 100
)
async def send_one_req(session: aiohttp.ClientSession, data: Any, id: int):
start_time = time.time()
async with session.post(eb_url, data=data) as resp:
msgs[id]["ct"] = time.time() - start_time
logger.info("Resp: {} - {}", resp.status, await resp.text())
async with aiohttp.ClientSession() as session:
tasks = []
for i in range(100, req_num + 100):
pb = datetime.now()
new_msg = {
"id": i,
"title": f"stress_test.event_{i}",
"published": pb.strftime(dateformat),
}
msgs[i] = {"pb": pb}
data = json.dumps(new_msg)
tasks.append(asyncio.create_task(send_one_req(session, data, i)))
if len(tasks) >= concurrent_size:
await asyncio.gather(*tasks)
tasks = []
if tasks:
await asyncio.gather(*tasks)
stats = [m["ct"] for _, m in msgs.items()]
return PlainTextResponse(
f"Total: {len(msgs)}\n Max: {max(stats)}\n Median: {statistics.median(stats)}\n Mean: {statistics.mean(stats)}\n Min: {min(stats)}"
)
async def receive_events(request):
request_body = await request.body()
data = json.loads(request_body)
if isinstance(data, dict):
id = int(data["id"])
# new_pb = datetime.strptime(data["published"], dateformat)
new_pb = datetime.now()
msgs[id]["rt"] = (new_pb - msgs[id]["pb"]).microseconds * pow(10, -6)
return PlainTextResponse("ok")
async def status(request):
ct_stats = [m["ct"] for _, m in msgs.items() if "ct" in m]
rt_stats = [m["rt"] for _, m in msgs.items() if "rt" in m]
status = f"""
Request Count: {len(ct_stats)}
Request Time:
- Max: {max(ct_stats)}
- Median: {statistics.median(ct_stats)}
- Mean: {statistics.mean(ct_stats)}
- Min: {min(ct_stats)}
Received Count: {len(rt_stats)}
Received Time:
- Max: {max(rt_stats)}
- Median: {statistics.median(rt_stats)}
- Mean: {statistics.mean(rt_stats)}
- Min: {min(rt_stats)}
"""
return PlainTextResponse(status)
def home(request):
return PlainTextResponse("ok")
async def reset(request):
global msgs
msgs = {}
return PlainTextResponse("done")
routes = [
Route("/", home),
Route("/start", start),
Route("/receive_events", receive_events, methods=["POST"]),
Route("/status", status),
Route("/reset", reset),
]
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="EventBus 3 stress test script")
parser.add_argument("-p", "--port", type=int, help="Listening port", default=8081)
args = parser.parse_args()
app = Starlette(
debug=False,
routes=routes,
on_startup=[startup],
on_shutdown=[shutdown],
)
logger.info(
"The app is starting, you must set callback address to {} with POST, then call {} with GET",
f"http://hostname:{args.port}/receive_events",
f"http://hostname:{args.port}/start",
)
uvicorn.run(app, host="0.0.0.0", port=args.port)
|
from datetime import timedelta
import numpy as np
import pandas as pd
import pytest
from go_utils.cleanup import (
adjust_timezones,
camel_case,
filter_invalid_coords,
remove_homogenous_cols,
rename_latlon_cols,
replace_column_prefix,
round_cols,
standardize_null_vals,
)
camel_case_data = [
("abcd efg", [" "], "AbcdEfg"),
("abcd", [" "], "Abcd"),
("one two-three,four.five", [" ", ",", "-", "."], "OneTwoThreeFourFive"),
]
@pytest.mark.util
@pytest.mark.parametrize("input_text, delims, expected", camel_case_data)
def test_camel_case(input_text, delims, expected):
assert camel_case(input_text, delims) == expected
latlon_data = [
(
{
"lat": [-90, 90, 50, -9999, 0, 2, -10, 36.5, 89.999],
"lon": [-180, 180, 179.99, -179.99, -9999, 90, -90, 35.6, -17.8],
}
),
(
{
"latitude": [-90, 90, -89.999, -23.26, -9999, 12.75, -10, 36.5, 89.999],
"longitude": [-180, 22.2, -37.85, -179.99, 180, 90, -90, -9999, 179.99],
}
),
]
time_zone_data = [
(48.21, 16.36, 1), # Vienna GMT+1
(41.8781, -87.6298, -6), # Chicago GMT-6
(39.7392, -104.9903, -7), # Denver GMT-7
(40.7128, -74.0060, -5), # NYC GMT-5
(34.0522, -118.2437, -8), # LA GMT-8
(33.9249, 18.4241, 2), # Cape Town GMT+2
(3.1390, 101.6869, 7), # Kuala Lumpur GMT=7
]
@pytest.mark.util
@pytest.mark.cleanup
@pytest.mark.parametrize("lat, lon, offset", time_zone_data)
def test_datetime_convert(lat, lon, offset):
test_times = pd.to_datetime(
np.array(
[
"2021-01-05T17:42:00",
"2021-12-17T03:10:00",
"2018-07-01T00:00:00",
"2020-08-10T23:59:00",
"2020-02-29T3:00:00",
"2020-02-29T23:00:00",
]
)
)
lat_col = np.full((len(test_times)), lat)
lon_col = np.full((len(test_times)), lon)
df = pd.DataFrame.from_dict(
{"lat": lat_col, "lon": lon_col, "measuredAt": test_times}
)
print(type(df["measuredAt"].to_numpy()[0]))
output_df = adjust_timezones(df, "measuredAt", "lat", "lon")
for i, date in enumerate(test_times):
output_df.loc[i, "measuredAt"] == date + timedelta(hours=offset)
assert not output_df.equals(df)
adjust_timezones(df, "measuredAt", "lat", "lon", inplace=True)
assert output_df.equals(df)
@pytest.mark.util
@pytest.mark.cleanup
@pytest.mark.parametrize("df_dict", latlon_data)
def test_latlon_filter(df_dict):
df = pd.DataFrame.from_dict(df_dict)
latitude, longitude = df.columns
# Test exclusive filtering
filtered_df = filter_invalid_coords(df, latitude, longitude)
assert np.all(filtered_df[latitude] > -90)
assert np.all(filtered_df[latitude] < 90)
assert np.all(filtered_df[longitude] > -180)
assert np.all(filtered_df[longitude] < 180)
# Test inclusive filtering
filtered_df = filter_invalid_coords(df, latitude, longitude, inclusive=True)
assert np.all(filtered_df[latitude] >= -90)
assert np.all(filtered_df[latitude] <= 90)
assert np.all(filtered_df[longitude] >= -180)
assert np.all(filtered_df[longitude] <= 180)
# Test inplace
assert not filtered_df.equals(df)
filter_invalid_coords(df, latitude, longitude, inclusive=True, inplace=True)
assert filtered_df.equals(df)
@pytest.mark.util
@pytest.mark.cleanup
def test_homogenous_cols():
df = pd.DataFrame.from_dict({"col_1": [3, 2, 1], "col_2": [0, 0, 0]})
output_df = remove_homogenous_cols(df)
assert "col_2" not in output_df.columns
assert "col_1" in output_df.columns
assert not output_df.equals(df)
remove_homogenous_cols(df, inplace=True)
assert output_df.equals(df)
@pytest.mark.util
@pytest.mark.cleanup
def test_col_replace():
df = pd.DataFrame.from_dict({"landcoversTest1": [1], "landcoversTest2": [1]})
output_df = replace_column_prefix(df, "landcovers", "lc")
assert "landcoversTest1" not in output_df.columns
assert "landcoversTest2" not in output_df.columns
assert "lc_Test1" in output_df.columns
assert "lc_Test2" in output_df.columns
assert not output_df.equals(df)
replace_column_prefix(df, "landcovers", "lc", inplace=True)
assert output_df.equals(df)
@pytest.mark.util
@pytest.mark.cleanup
def test_lat_lon_replace():
df = pd.DataFrame.from_dict(
{
"latitude": [1],
"longitude": [2],
"testMeasurementLatitude": [3],
"testMeasurementLongitude": [4],
}
)
output_df = rename_latlon_cols(df)
assert output_df.loc[0, "Latitude"] == 3
assert output_df.loc[0, "Longitude"] == 4
assert output_df.loc[0, "MGRSLatitude"] == 1
assert output_df.loc[0, "MGRSLongitude"] == 2
assert not output_df.equals(df)
rename_latlon_cols(df, inplace=True)
assert output_df.equals(df)
@pytest.mark.util
@pytest.mark.cleanup
def test_column_round():
df = pd.DataFrame.from_dict(
{
"Latitude": [1.123456],
"longitude": [2.123],
"number": [3.212],
"text": ["text"],
}
)
output_df = round_cols(df)
assert output_df.loc[0, "Latitude"] == 1.12346
assert output_df.loc[0, "longitude"] == 2.123
assert output_df.loc[0, "number"] == 3
assert output_df.loc[0, "text"] == "text"
assert not output_df.equals(df)
round_cols(df, inplace=True)
assert output_df.equals(df)
@pytest.mark.util
@pytest.mark.cleanup
def test_null_standardize():
df = pd.DataFrame.from_dict(
{"data": ["", "nan", "null", "NaN", None, "test", 5, np.nan]}
)
# Using "." to not overlap with null values (not recommended for practical use)
output_df = standardize_null_vals(df, ".")
desired = [".", ".", ".", ".", ".", "test", 5, "."]
for i in range(len(desired)):
assert output_df.loc[i, "data"] == desired[i]
assert not output_df.equals(df)
standardize_null_vals(df, ".", inplace=True)
assert output_df.equals(df)
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 4 22:04:11 2018
first version of Master thesis codes, initial model
@author: JingQIN
"""
import math
import numpy as np
import decimal
class Quaternion(object):
"""class of Quaternion that do the simple operations
Attributes:
a -- a float parameter of real part
b -- a float parameter of fundamental quaternion unit i
c -- a float parameter of fundamental quaternion unit j
d -- a float parameter of fundamental quaternion unit k
"""
def __init__(self, a, b, c, d):
'''initial Quaternion class with 4 floats'''
self.a = decimal.Decimal(a)
self.b = decimal.Decimal(b)
self.c = decimal.Decimal(c)
self.d = decimal.Decimal(d)
def __add__(self, other):
'''compute Quaternion objects addition
arguments:
other -- another Quaternion object
'''
return Quaternion(self.a + other.a, self.b + other.b, self.c + other.c, self.d + other.d)
def __sub__(self, other):
'''compute Quaternion objects subtraction
arguments:
other -- another Quaternion object
'''
return Quaternion(self.a - other.a, self.b - other.b, self.c - other.c, self.d - other.d)
def __mul__(self, other):
'''compute Quaternion objects multiple
arguments:
other -- another Quaternion object
'''
a = self.a * other.a - self.b * other.b - self.c * other.c - self.d * other.d
b = self.a * other.b + self.b * other.a + self.c * other.d - self.d * other.c
c = self.a * other.c - self.b * other.d + self.c * other.a + self.d * other.b
d = self.a * other.d + self.b * other.c - self.c * other.b + self.d * other.a
return Quaternion(a, b, c, d)
def scalar_mul(self, scalar):
'''compute Quaternion objects multiple
arguments:
scalar -- a scalar
'''
a = self.a * scalar
b = self.b * scalar
c = self.c * scalar
d = self.d * scalar
return Quaternion(a, b, c, d)
def dot(self, other):
'''compute Quaternion objects dot production
arguments:
other -- another Quaternion object
'''
return self.a * other.a+self.b * other.b+self.c * other.c+self.d * other.d
def norm(self):
'''
compute Quaternion object norm
'''
return math.sqrt(pow(self.a, 2) + pow(self.b, 2) + pow(self.c, 2) + pow(self.d, 2))
def norm_q(self):
'''
compute normalized Quaternion
'''
mynorm = self.norm()
my_norm_q = Quaternion(self.a / mynorm, self.b / mynorm, self.c / mynorm, self.d / mynorm)
return my_norm_q
def conj(self):
'''
compute Quaternion object complex conjugate
'''
a = self.a
b = -self.b
c = -self.c
d = -self.d
return Quaternion(a, b, c, d)
def rotator(self, theta, vectors):
'''
from angle and vectors, compute a quaternion
arguments:
theta -- rotation angle, radians
vectors -- indicates rotation aixs, list, like [1, 0, 0]
'''
sum_v = sum([v * v for v in vectors])
norm_v = math.sqrt(sum_v)
vectors = [v / norm_v for v in vectors]
a = math.cos(theta / 2.)
b = vectors[0] * math.sin(theta / 2.)
c = vectors[1] * math.sin(theta / 2.)
d = vectors[2] * math.sin(theta / 2.)
a = decimal.Decimal(a)
b = decimal.Decimal(b)
c = decimal.Decimal(c)
d = decimal.Decimal(d)
r = Quaternion(a, b, c, d)
return r * self * r.conj()
def toDCM(self):
'''
compute a Quaternion object to a DCM, a list of lists
specifically, a list of three 1*3 list, normalized
'''
q0 = self.a
q1 = self.b
q2 = self.c
q3 = self.d
C11 = pow(q0, 2) + pow(q1, 2) - pow(q2, 2) - pow(q3, 2)
C12 = 2 * (q1 * q2 + q0 * q3)
C13 = 2 * (q1 * q3 - q0 * q2)
C21 = 2 * (q1 * q2 - q0 * q3)
C22 = pow(q0, 2) - pow(q1, 2) + pow(q2, 2) - pow(q3, 2)
C23 = 2 * (q2 * q3 + q0 * q1)
C31 = 2 * (q1 * q3 + q0 * q2)
C32 = 2 * (q2 * q3 - q0 * q1)
C33 = pow(q0, 2) - pow(q1, 2) - pow(q2, 2) + pow(q3, 2)
C3_norm = math.sqrt(pow(C31, 2) + pow(C32, 2) + pow(C33, 2))
C1_norm = math.sqrt(pow(C11, 2) + pow(C12, 2) + pow(C13, 2))
C2_norm = math.sqrt(pow(C21, 2) + pow(C22, 2) + pow(C23, 2))
C3_norm = decimal.Decimal(C3_norm)
C1_norm = decimal.Decimal(C1_norm)
C2_norm = decimal.Decimal(C2_norm)
DCM = [[C11 / C1_norm, C12 / C1_norm, C13 / C1_norm],[C21 / C2_norm, C22 / C2_norm, C23 / C2_norm],[C31 / C3_norm, C32 / C3_norm, C33 / C3_norm]]
return np.array(DCM)
def __str__(self):
''' document printing'''
parameters = {'':self.a, 'i':self.b, 'j':self.c, 'k':self.d}
count = 0
w = ''
for k,v in parameters.items():
if v != 0:
if count == 0:
w = w + str(v) + k
count += 1
else:
if v < 0:
w = w + str(v) + k
else:
w = w + '+' + str(v) + k
return w
|
"""
:mod:`boardinghouse.management.commands.loaddata`
This replaces the ``loaddata`` command with one that takes a new
option: ``--schema``. This is required when non-shared-models are
included in the file(s) to be loaded, and the schema with this name
will be used as a target.
After completing the load, we ensure that any schemata that were
loaded as part of this process exist as schemata in the database.
"""
import django
from django.core.management.commands import loaddata
from django.core.management.base import CommandError
from optparse import make_option
from ...schema import get_schema_model, _create_all_schemata
class Command(loaddata.Command):
if django.VERSION < (1, 8):
option_list = loaddata.Command.option_list + (
make_option('--schema', action='store', dest='schema',
help='Specify which schema to load schema-aware models to'),
)
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument('--schema', action='store', dest='schema',
help='Specify which schema to load schema-aware models to')
def handle(self, *app_labels, **options):
Schema = get_schema_model()
schema_name = options.get('schema')
if schema_name == '__template__':
# Hmm, we don't want to accidentally write data to this, so
# we should raise an exception if we are going to be
# writing any schema-aware objects.
schema = None
elif schema_name:
try:
schema = Schema.objects.get(schema=options.get('schema'))
except Schema.DoesNotExist:
raise CommandError('No Schema found named "%s"' % schema_name)
schema.activate()
# We should wrap this in a try/except, and present a reasonable
# error message if we think we tried to load data without a schema
# that required one.
super(Command, self).handle(*app_labels, **options)
Schema().deactivate()
# Ensure we create any schemata that are new.
# But, we only want to do this if the schema table has been installed.
_create_all_schemata()
|
#!/usr/bin/env python
# flake8: noqa: F401, F402, F403, F405
"""A script used to determine unique response keys for each response type"""
import sys
from itertools import chain
from pprint import pprint
from pyinaturalist.constants import PROJECT_DIR
sys.path.insert(0, PROJECT_DIR)
from test.sample_data import *
RESPONSES = {
'annotation': j_annotation_1,
'comment': j_comment_1,
'controlled_term': j_controlled_term_1,
'controlled_term_value': j_controlled_term_value_1,
'identification': j_identification_1,
'observation_field': j_obs_field_1,
'observation': j_observation_1,
'life_list': j_life_list,
'observation_field_value': j_ofv_1_numeric,
'photo': j_photo_1,
'place': j_place_1,
'places_nearby': j_places_nearby,
'project': j_project_1,
'search': j_search_results[0],
'taxon': j_taxon_2_partial,
'taxon_counts': j_obs_species_counts[0],
'user': j_user_2_partial,
}
def get_unique_keys(response_type):
keys = set(RESPONSES[response_type].keys())
other_responses = [v for k, v in RESPONSES.items() if k != response_type]
other_keys = set(chain.from_iterable([x.keys() for x in other_responses]))
return sorted(keys - other_keys)
if __name__ == '__main__':
for k in RESPONSES:
print(f'{k}: ', end='')
pprint(get_unique_keys(k))
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from networkapi.admin_permission import AdminPermission
from networkapi.ambiente.models import EnvironmentVip
from networkapi.auth import has_perm
from networkapi.equipamento.models import Equipamento
from networkapi.equipamento.models import EquipamentoError
from networkapi.equipamento.models import EquipamentoNotFoundError
from networkapi.exception import EnvironmentVipNotFoundError
from networkapi.exception import InvalidValueError
from networkapi.grupo.models import GrupoError
from networkapi.infrastructure.xml_utils import dumps_networkapi
from networkapi.infrastructure.xml_utils import loads
from networkapi.infrastructure.xml_utils import XMLError
from networkapi.ip.models import IpError
from networkapi.ip.models import IpNotFoundByEquipAndVipError
from networkapi.ip.models import NetworkIPv4Error
from networkapi.ip.models import NetworkIPv4NotFoundError
from networkapi.rest import RestResource
from networkapi.rest import UserNotAuthorizedError
from networkapi.util import is_valid_int_greater_zero_param
from networkapi.util import is_valid_regex
from networkapi.util import is_valid_string_maxsize
from networkapi.util import is_valid_string_minsize
class IPEquipEvipResource(RestResource):
log = logging.getLogger('IPEquipEvipResource')
def handle_post(self, request, user, *args, **kwargs):
"""Handles POST requests to get all Ips (v4) or (v6) of equip on Divisao DC and Ambiente Logico of fisrt Network4 and 6 (if exists) of Environment Vip.
URL: ip/getbyequipandevip/
"""
self.log.info('Get Ips by Equip - Evip')
try:
# User permission
if not has_perm(user, AdminPermission.IPS, AdminPermission.READ_OPERATION):
raise UserNotAuthorizedError(
None, u'User does not have permission to perform the operation.')
# Load XML data
xml_map, attrs_map = loads(request.raw_post_data)
# XML data format
networkapi_map = xml_map.get('networkapi')
if networkapi_map is None:
msg = u'There is no value to the networkapi tag of XML request.'
self.log.error(msg)
return self.response_error(3, msg)
ip_map = networkapi_map.get('ip_map')
if ip_map is None:
msg = u'There is no value to the ip tag of XML request.'
self.log.error(msg)
return self.response_error(3, msg)
# Get XML data
id_evip = ip_map.get('id_evip')
equip_name = ip_map.get('equip_name')
# Valid id_evip
if not is_valid_int_greater_zero_param(id_evip):
self.log.error(
u'Parameter id_evip is invalid. Value: %s.', id_evip)
raise InvalidValueError(None, 'id_evip', id_evip)
# Valid equip_name
if not is_valid_string_minsize(equip_name, 3) or not is_valid_string_maxsize(equip_name, 80) or not is_valid_regex(equip_name, '^[A-Z0-9-_]+$'):
self.log.error(
u'Parameter equip_name is invalid. Value: %s', equip_name)
raise InvalidValueError(None, 'equip_name', equip_name)
# Business Rules
# Get Environment VIp
evip = EnvironmentVip.get_by_pk(id_evip)
# Get Equipment
equip = Equipamento.get_by_name(equip_name)
lista_ips_equip = list()
lista_ipsv6_equip = list()
# GET DIVISAO DC AND AMBIENTE_LOGICO OF NET4 AND NET6
lista_amb_div_4 = list()
lista_amb_div_6 = list()
for net in evip.networkipv4_set.select_related(
'vlan',
'ambiente'
).all():
dict_div_4 = dict()
dict_div_4['divisao_dc'] = net.vlan.ambiente.divisao_dc_id
dict_div_4[
'ambiente_logico'] = net.vlan.ambiente.ambiente_logico_id
if dict_div_4 not in lista_amb_div_4:
lista_amb_div_4.append(dict_div_4)
for net in evip.networkipv6_set.select_related(
'vlan',
'ambiente'
).all():
dict_div_6 = dict()
dict_div_6['divisao_dc'] = net.vlan.ambiente.divisao_dc_id
dict_div_6[
'ambiente_logico'] = net.vlan.ambiente.ambiente_logico_id
if dict_div_6 not in lista_amb_div_6:
lista_amb_div_6.append(dict_div_6)
# Get all IPV4's Equipment
for ipequip in equip.ipequipamento_set.select_related(
'ip',
'vlan',
'ambiente'
).all():
if ipequip.ip not in lista_ips_equip:
for dict_div_amb in lista_amb_div_4:
# if ipequip.ip.networkipv4.ambient_vip is not None and
# ipequip.ip.networkipv4.ambient_vip.id == evip.id:
if (ipequip.ip.networkipv4.vlan.ambiente.divisao_dc.id == dict_div_amb.get('divisao_dc') and ipequip.ip.networkipv4.vlan.ambiente.ambiente_logico.id == dict_div_amb.get('ambiente_logico')):
lista_ips_equip.append(ipequip.ip)
# Get all IPV6'S Equipment
for ipequip in equip.ipv6equipament_set.select_related(
'ip',
'vlan',
'ambiente'
).all():
if ipequip.ip not in lista_ipsv6_equip:
for dict_div_amb in lista_amb_div_6:
# if ipequip.ip.networkipv6.ambient_vip is not None and
# ipequip.ip.networkipv6.ambient_vip.id == evip.id:
print ipequip.ip.networkipv6.vlan.ambiente.divisao_dc.id
print dict_div_amb.get('divisao_dc')
if (ipequip.ip.networkipv6.vlan.ambiente.divisao_dc.id == dict_div_amb.get('divisao_dc') and ipequip.ip.networkipv6.vlan.ambiente.ambiente_logico.id == dict_div_amb.get('ambiente_logico')):
lista_ipsv6_equip.append(ipequip.ip)
# lists and dicts for return
lista_ip_entregue = list()
lista_ip6_entregue = list()
for ip in lista_ips_equip:
dict_ips4 = dict()
dict_network = dict()
dict_ips4['id'] = ip.id
dict_ips4['ip'] = '%s.%s.%s.%s' % (
ip.oct1, ip.oct2, ip.oct3, ip.oct4)
dict_network['id'] = ip.networkipv4_id
dict_network['network'] = '%s.%s.%s.%s' % (
ip.networkipv4.oct1, ip.networkipv4.oct2, ip.networkipv4.oct3, ip.networkipv4.oct4)
dict_network['mask'] = '%s.%s.%s.%s' % (
ip.networkipv4.mask_oct1, ip.networkipv4.mask_oct2, ip.networkipv4.mask_oct3, ip.networkipv4.mask_oct4)
dict_ips4['network'] = dict_network
lista_ip_entregue.append(dict_ips4)
for ip in lista_ipsv6_equip:
dict_ips6 = dict()
dict_network = dict()
dict_ips6['id'] = ip.id
dict_ips6['ip'] = '%s:%s:%s:%s:%s:%s:%s:%s' % (
ip.block1, ip.block2, ip.block3, ip.block4, ip.block5, ip.block6, ip.block7, ip.block8)
dict_network['id'] = ip.networkipv6.id
dict_network['network'] = '%s:%s:%s:%s:%s:%s:%s:%s' % (
ip.networkipv6.block1, ip.networkipv6.block2, ip.networkipv6.block3, ip.networkipv6.block4, ip.networkipv6.block5, ip.networkipv6.block6, ip.networkipv6.block7, ip.networkipv6.block8)
dict_network['mask'] = '%s:%s:%s:%s:%s:%s:%s:%s' % (
ip.networkipv6.block1, ip.networkipv6.block2, ip.networkipv6.block3, ip.networkipv6.block4, ip.networkipv6.block5, ip.networkipv6.block6, ip.networkipv6.block7, ip.networkipv6.block8)
dict_ips6['network'] = dict_network
lista_ip6_entregue.append(dict_ips6)
lista_ip_entregue = lista_ip_entregue if len(
lista_ip_entregue) > 0 else None
lista_ip6_entregue = lista_ip6_entregue if len(
lista_ip6_entregue) > 0 else None
if (lista_ip_entregue is None and lista_ip6_entregue is None):
raise IpNotFoundByEquipAndVipError(
None, 'Ip não encontrado com equipamento %s e ambiente vip %s' % (equip_name, id_evip))
return self.response(dumps_networkapi({'ipv4': lista_ip_entregue, 'ipv6': lista_ip6_entregue}))
except IpNotFoundByEquipAndVipError:
return self.response_error(317, equip_name, id_evip)
except InvalidValueError, e:
return self.response_error(269, e.param, e.value)
except NetworkIPv4NotFoundError:
return self.response_error(281)
except EquipamentoNotFoundError:
return self.response_error(117, ip_map.get('id_equipment'))
except EnvironmentVipNotFoundError:
return self.response_error(283)
except UserNotAuthorizedError:
return self.not_authorized()
except XMLError, x:
self.log.error(u'Error reading the XML request.')
return self.response_error(3, x)
except (IpError, NetworkIPv4Error, EquipamentoError, GrupoError), e:
self.log.error(e)
return self.response_error(1)
|
brick = """
▗▄▄▄▄▄▄▄▄▄▄▄▄▖
▐████████████▌
▐████████████▌
▝▀▀▀▀▀▀▀▀▀▀▀▀▘
"""
glass = [
"""
┏━━━━━━━━━━━━┓
┃ ┃
┃ ┃
┗━━━━━━━━━━━━┛
""",
"""
┏━┳━━┳━━━━┳━━┓
┃ ┗ ┛ ┏┫
┣┛ ┃
┗━━━━━━┻━━━┻━┛
""",
"""
┏━┳━━━┳━━━┳━━┓
┃ ┗┓ ┗ ┏┛ ━┫
┣━ ┏┛ ┏━ ┏ ┃
┗━━━┻━━┻━━━┻━┛
"""
]
brick_small = [
"""
┏━━━━━━━┓
┃ ┃
┗━━━━━━━┛
""",
"""
┏━┳━┳━━━┓
┣ ┫
┗━━━┻━┻━┛
""",
"""
┏━┳━━━┳━┓
┣┓┏ ┃ ┛┗┫
┗━┻━┻━┻━┛
""",
]
ball = [
"""
◯
""",
"""
⬤
"""
]
powerup = {
'long': """
❶
""",
'short': """
➋
""",
'grab': """
➌
""",
'shoot': """
➍
""",
'thru': """
➎
""",
'mul': """
➏
""",
'many': """
➐
"""
}
paddle = [
"""
██████████
""",
"""
██████
""",
"""
████████████
"""
]
bullet = [
"""
ᐁ
""",
"""
ᐃ
"""
]
alien = [
"""
░░░░▄▄████▄▄░░░░
░░▄██████████▄░░
▄██▄██▄██▄██▄██▄
░░▀█▀░░▀▀░░▀█▀░░
""",
"""
▄░▀▄░░▄▀░▄
█▄██████▄█
███▄██▄███
▀████████▀
▄▀░░░░░░▀▄
"""]
|
""" Unittests """
|
from django.core.urlresolvers import reverse
from django_webtest import WebTest
import webtest
from django.contrib.auth.models import User
from evap.evaluation.models import Semester, Questionnaire, UserProfile, Course, Contribution
import os.path
def lastform(page):
return page.forms[max(page.forms.keys())]
class UsecaseTests(WebTest):
fixtures = ['usecase-tests']
extra_environ = {'HTTP_ACCEPT_LANGUAGE': 'en'}
def test_import(self):
page = self.app.get(reverse("fsr_root"), user='fsr.user')
# create a new semester
page = page.click("[Cc]reate [Nn]ew [Ss]emester")
semester_form = lastform(page)
semester_form['name_de'] = "Testsemester"
semester_form['name_en'] = "test semester"
page = semester_form.submit().follow()
# retrieve new semester
semester = Semester.objects.get(name_de="Testsemester",
name_en="test semester")
self.assertEqual(semester.course_set.count(), 0, "New semester is not empty.")
# safe original user count
original_user_count = User.objects.all().count()
# import excel file
page = page.click("[Ii]mport")
upload_form = lastform(page)
upload_form['vote_start_date'] = "02/29/2000"
upload_form['vote_end_date'] = "02/29/2012"
upload_form['excel_file'] = (os.path.join(os.path.dirname(__file__), "fixtures", "samples.xls"),)
page = upload_form.submit().follow()
self.assertEqual(semester.course_set.count(), 23, "Wrong number of courses after Excel import.")
self.assertEqual(User.objects.count(), original_user_count + 24, "Wrong number of users after Excel import.")
check_course = Course.objects.get(name_en="Shake")
check_contributions = Contribution.objects.filter(course=check_course)
responsible_count = 0
for contribution in check_contributions:
if contribution.responsible:
responsible_count += 1
self.assertEqual(responsible_count, 1, "Wrong number of responsible contributors after Excel import.")
check_student = User.objects.get(username="Diam.Synephebos")
self.assertEqual(check_student.first_name, "Diam")
self.assertEqual(check_student.email, "Diam.Synephebos@student.hpi.uni-potsdam.de")
check_contributor = User.objects.get(username="Sanctus.Aliquyam")
self.assertEqual(check_contributor.first_name, "Sanctus")
self.assertEqual(check_contributor.last_name, "Aliquyam")
self.assertEqual(check_contributor.email, "567@web.de")
def test_login_key(self):
environ = self.app.extra_environ
self.app.extra_environ = {}
self.assertRedirects(self.app.get(reverse("evap.results.views.index"), extra_environ={}), "/?next=/results/")
self.app.extra_environ = environ
user = User.objects.all()[0]
userprofile = UserProfile.get_for_user(user)
userprofile.generate_login_key()
userprofile.save()
url_with_key = reverse("evap.results.views.index") + "?userkey=%s" % userprofile.login_key
self.app.get(url_with_key)
def test_create_questionnaire(self):
page = self.app.get(reverse("fsr_root"), user="fsr.user")
# create a new questionnaire
page = page.click("[Cc]reate [Nn]ew [Qq]uestionnaire")
questionnaire_form = lastform(page)
questionnaire_form['name_de'] = "Test Fragebogen"
questionnaire_form['name_en'] = "test questionnaire"
questionnaire_form['public_name_de'] = "Oeffentlicher Test Fragebogen"
questionnaire_form['public_name_en'] = "Public Test Questionnaire"
questionnaire_form['question_set-0-text_de'] = "Frage 1"
questionnaire_form['question_set-0-text_en'] = "Question 1"
questionnaire_form['question_set-0-kind'] = "T"
questionnaire_form['index'] = 0
page = questionnaire_form.submit().follow()
# retrieve new questionnaire
questionnaire = Questionnaire.objects.get(name_de="Test Fragebogen", name_en="test questionnaire")
self.assertEqual(questionnaire.question_set.count(), 1, "New questionnaire is empty.")
def test_create_empty_questionnaire(self):
page = self.app.get(reverse("fsr_root"), user="fsr.user")
# create a new questionnaire
page = page.click("[Cc]reate [Nn]ew [Qq]uestionnaire")
questionnaire_form = lastform(page)
questionnaire_form['name_de'] = "Test Fragebogen"
questionnaire_form['name_en'] = "test questionnaire"
questionnaire_form['public_name_de'] = "Oeffentlicher Test Fragebogen"
questionnaire_form['public_name_en'] = "Public Test Questionnaire"
questionnaire_form['index'] = 0
page = questionnaire_form.submit()
assert "You must have at least one of these" in page
# retrieve new questionnaire
with self.assertRaises(Questionnaire.DoesNotExist):
Questionnaire.objects.get(name_de="Test Fragebogen", name_en="test questionnaire")
def test_copy_questionnaire(self):
page = self.app.get(reverse("fsr_root"), user="fsr.user")
# create a new questionnaire
page = page.click("Seminar")
page = page.click("Copy")
questionnaire_form = lastform(page)
questionnaire_form['name_de'] = "Test Fragebogen (kopiert)"
questionnaire_form['name_en'] = "test questionnaire (copied)"
questionnaire_form['public_name_de'] = "Oeffentlicher Test Fragebogen (kopiert)"
questionnaire_form['public_name_en'] = "Public Test Questionnaire (copied)"
page = questionnaire_form.submit().follow()
# retrieve new questionnaire
questionnaire = Questionnaire.objects.get(name_de="Test Fragebogen (kopiert)", name_en="test questionnaire (copied)")
self.assertEqual(questionnaire.question_set.count(), 2, "New questionnaire is empty.")
def test_assign_questionnaires(self):
page = self.app.get(reverse("fsr_root"), user="fsr.user")
# assign questionnaire to courses
page = page.click("Semester 1 \(en\)", index=0)
page = page.click("Assign Questionnaires")
assign_form = lastform(page)
assign_form['Seminar'] = [1]
assign_form['Vorlesung'] = [1]
page = assign_form.submit().follow()
# get semester and check
semester = Semester.objects.get(pk=1)
questionnaire = Questionnaire.objects.get(pk=1)
for course in semester.course_set.all():
self.assertEqual(course.general_contribution.questionnaires.count(), 1)
self.assertEqual(course.general_contribution.questionnaires.get(), questionnaire)
def test_remove_responsibility(self):
page = self.app.get(reverse("fsr_root"), user="fsr.user")
# remove responsibility in lecturer's checkbox
page = page.click("Semester 1 \(en\)", index=0)
page = page.click("Course 1 \(en\)")
form = lastform(page)
# add one questionnaire to avoid the error message preventing the responsibility error to show
form['general_questions'] = True
form['contributions-0-responsible'] = False
page = form.submit()
assert "No responsible contributor found" in page
|
# coding=utf-8
#
# @lc app=leetcode id=105 lang=python
#
# [105] Construct Binary Tree from Preorder and Inorder Traversal
#
# https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/description/
#
# algorithms
# Medium (39.51%)
# Likes: 1725
# Dislikes: 49
# Total Accepted: 226.4K
# Total Submissions: 547.8K
# Testcase Example: '[3,9,20,15,7]\n[9,3,15,20,7]'
#
# Given preorder and inorder traversal of a tree, construct the binary tree.
#
# Note:
# You may assume that duplicates do not exist in the tree.
#
# For example, given
#
#
# preorder = [3,9,20,15,7]
# inorder = [9,3,15,20,7]
#
# Return the following binary tree:
#
#
# 3
# / \
# 9 20
# / \
# 15 7
#
#
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
"""
:type preorder: List[int]
:type inorder: List[int]
:rtype: TreeNode
"""
if not preorder:
return
root_val = preorder[0]
root = TreeNode(root_val)
root_index = inorder.index(root_val)
left_inorder = inorder[:root_index]
right_inorder = inorder[root_index+1:]
left_preorder = preorder[1:1+len(left_inorder)]
right_preorder = preorder[1+len(left_inorder):]
root.left = self.buildTree(left_preorder, left_inorder)
root.right = self.buildTree(right_preorder, right_inorder)
return root
# if __name__ == '__main__':
# s = Solution()
# print s.buildTree([3,9,20,15,7], [9,3,15,20,7])
|
from CursoEmVideoPython import titulo
def cadastro(nome, idade):
f = open('pessoas_cadastradas.txt', 'a')
f.write('\n')
f.write(f'{nome:<22}{idade} anos')
print(f'{nome} de {idade} anos cadastrado com sucesso')
def exibir():
titulo.destaque('Pessoas cadastradas')
f = open('pessoas_cadastradas.txt', 'r')
for linha in f:
print(linha, end='')
print()
while True:
titulo.destaque('Menu principal')
print(f'\33[:32m1 - \33[:34mCadastrar nova pessoa')
print(f'\33[:32m2 - \33[:34mVer pessoas cadastradas')
print(f'\33[:32m3 - \33[:34mSair\33[m')
titulo.linha(30)
try:
resp = int(input('\33[:32mQual o seu comando? \33[m'))
except:
print('\33[:31mOpção Inválida!\33[m')
else:
if resp == 1:
while True:
nome = str(input('Nome: '))
if nome.isnumeric() == False and nome != '':
try:
idade = int(input('Idade: '))
except:
print('\33[:31mDigite uma idade válida!\33[m')
else:
cadastro(nome, idade)
break
else:
print('\33[:31mDigite um nome válido!\33[m')
elif resp == 2:
exibir()
elif resp == 3:
titulo.destaque('Encerrando o programa...')
break
else:
print('\33[:31mOpção Inválida!\33[m')
|
from telegram.ext import Updater, CommandHandler
from telegram import ParseMode
import conf
def start(bot, update):
update.message.reply_text("The chat id to config is: `{}`".format(update.message.chat_id),
parse_mode=ParseMode.MARKDOWN)
tg = Updater(conf.SECRET)
tg.dispatcher.add_handler(CommandHandler('start', start))
tg.start_polling()
tg.idle()
|
"""
Contains the constructs for defining the data model when deserialising
the yaml documents, and forms the basis of the intercomparison workflow.
"""
import math
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple, Union
import attr
from affine import Affine # type: ignore
import numpy # type: ignore
import rasterio # type: ignore
import structlog # type: ignore
_LOG = structlog.get_logger()
@attr.s()
class GqaColours:
"""
Colours section of the GQA report. Refers to a histogram count of
GCP's.
"""
blue: int = attr.ib(default=0)
green: int = attr.ib(default=0)
red: int = attr.ib(default=0)
teal: int = attr.ib(default=0)
yellow: int = attr.ib(default=0)
def flatten(self):
"""Flatten the class attributes into a dict."""
converted = attr.asdict(self)
result = {f"colour_{key}": val for key, val in converted.items()}
return result
@attr.s()
class AbsIterativeMean:
"""
Geometric quality: Absolute value iterative mean.
"""
x: float = attr.ib(default=math.nan)
y: float = attr.ib(default=math.nan)
xy: float = attr.ib(default=math.nan)
@attr.s()
class Abs:
"""
Geometric quality: Absolute value.
"""
x: float = attr.ib(default=math.nan)
y: float = attr.ib(default=math.nan)
xy: float = attr.ib(default=math.nan)
@attr.s()
class IterativeMean:
"""
Geometric quality: Iterative mean.
"""
x: float = attr.ib(default=math.nan)
y: float = attr.ib(default=math.nan)
xy: float = attr.ib(default=math.nan)
@attr.s()
class IterativeStddev:
"""
Geometric quality: Iterative standard deviation.
"""
x: float = attr.ib(default=math.nan)
y: float = attr.ib(default=math.nan)
xy: float = attr.ib(default=math.nan)
@attr.s()
class Mean:
"""
Geometric quality: Mean.
"""
x: float = attr.ib(default=math.nan)
y: float = attr.ib(default=math.nan)
xy: float = attr.ib(default=math.nan)
@attr.s()
class Stddev:
"""
Geometric quality: Standard deviation.
"""
x: float = attr.ib(default=math.nan)
y: float = attr.ib(default=math.nan)
xy: float = attr.ib(default=math.nan)
@attr.s()
class Cep90:
"""
Geometric quality: Circular Error Probable 90%.
"""
cep90: float = attr.ib(default=math.nan)
@attr.s(auto_attribs=True)
class Residual:
"""
Ground Control Point residual analysis section of the geometric
quality metadata.
"""
abs_iterative_mean: Union[AbsIterativeMean, None] = None
abs: Union[Abs, None] = None
iterative_mean: Union[IterativeMean, None] = None
iterative_stddev: Union[IterativeStddev, None] = None
mean: Union[Mean, None] = None
stddev: Union[Stddev, None] = None
cep90: float = attr.ib(default=math.nan)
def flatten(self):
"""Flatten the class attributes into a dict."""
converted = attr.asdict(self)
result = {}
for key in converted:
if isinstance(converted[key], dict):
for key2 in converted[key]:
result[f"{key}_{key2}"] = converted[key][key2]
else:
result[key] = converted[key]
return result
@attr.s(auto_attribs=True)
class GeometricQuality:
"""
Geometric Quality metadata of the proc-info metadata document.
Note: colors is intentially spelt the US way.
"""
colors: GqaColours = attr.ib(default=GqaColours())
error_message: str = attr.ib(default="")
final_qa_count: int = attr.ib(default=0)
granule: str = attr.ib(default="")
ref_date: str = attr.ib(default="")
ref_source: str = attr.ib(default="")
ref_source_path: str = attr.ib(default="")
residual: Union[Residual, None] = None
fields: Dict[str, Any] = attr.ib(init=False, repr=False)
def __attrs_post_init__(self):
data = self.colors.flatten()
data["final_qa_count"] = self.final_qa_count
for key, value in self.residual.flatten().items():
data[key] = value
self.fields = data
@attr.s(auto_attribs=True)
class Brdf:
"""
Refers to the Alpha-1 and Alpha-2 BRDF components for a given band.
"""
alpha_1: Union[Dict[str, float], None] = None
alpha_2: Union[Dict[str, float], None] = None
id: Union[List[Any], None] = None
tier: Union[str, None] = None
def flatten(self):
"""Flatten the class attributes into a dict."""
skip = ["id", "tier"]
data = attr.asdict(self, filter=lambda attr, value: attr.name not in skip)
result = {}
for key, value in data.items():
for key2, value2 in value.items():
result[f"{key}_{key2}"] = value2
return result
@attr.s(auto_attribs=True)
class Aerosol:
"""
Contains the information relating to the aerosol component used
within the processing pipeline.
"""
value: Union[float, None] = None
id: Union[List[Any], None] = None
tier: Union[str, None] = None
@attr.s(auto_attribs=True)
class Ozone:
"""
Contains the information relating to the ozone component used
within the processing pipeline.
"""
value: Union[float, None] = None
id: Union[List[Any], None] = None
tier: Union[str, None] = None
@attr.s(auto_attribs=True)
class WaterVapour:
"""
Contains the information relating to the water vapour component used
within the processing pipeline.
"""
value: Union[float, None] = None
id: Union[List[Any], None] = None
tier: Union[str, None] = None
@attr.s(auto_attribs=True)
class AncillaryInfo:
"""
Ancillary value information of the proc-info metadata document.
"""
aerosol: Union[Aerosol, None] = None
brdf: Union[Brdf, None] = None
ozone: Union[Ozone, None] = None
water_vapour: Union[WaterVapour, None] = None
def flatten(self) -> Dict[str, Any]:
"""
Converts the class attributes and their values to a dict.
"""
skip = ["id", "tier", "brdf"]
data = attr.asdict(self, filter=lambda attr, value: attr.name not in skip)
result = self.brdf.flatten()
for key, value in data.items():
result[key] = value["value"]
return result
@attr.s(auto_attribs=True)
class SoftwareVersion:
"""
Software version information from the proc-info metadata document.
"""
name: Union[str, None] = None
url: Union[str, None] = None
version: Union[str, None] = None
@attr.s(auto_attribs=True)
class GranuleProcInfo:
"""
Basic class containing information pertaining to the processing
info for a given granule.
"""
geometric_quality: Union[GeometricQuality, None] = None
ancillary: Union[AncillaryInfo, None] = None
software_versions: Union[Dict[str, SoftwareVersion], None] = None
def _convert_transform(transform: List[Any]) -> Affine:
"""Create Affine transform."""
if len(transform) == 9:
affine_transform = Affine(*transform[:-3])
else:
affine_transform = Affine(*transform)
return affine_transform
@attr.s(auto_attribs=True)
class Measurement:
"""
Refers to an individual measurement within an ODC document.
Attributes:
----------
:path:
Name of the file.
:parent_dir:
Name of the parent directory the file resides in.
:file_format:
E.g. GeoTIFF, HDF5 etc.
:band:
Integer band index (1, n).
:dataset_pathname:
For container files like HDF5, the pathname to the dataset
within the file.
:transform:
Affine transformation.
:shape:
Shape in (height, width) of the dataset.
:nodata:
The no data value of the dataset.
:closed:
Is the file and/or dataset openeded.
:dataset:
Data access layer.
"""
# TODO: HDF5 measurement class to simplify IO on measurements
# within the same file. Required for upcoming sensitivity analysis.
path: str
parent_dir: str
file_format: str
band: Optional[int] = attr.ib()
dataset_pathname: Optional[str] = None
transform: List = attr.ib(converter=_convert_transform, default=None)
shape: Union[Tuple[int, int], None] = attr.ib(converter=tuple, default=None)
nodata: Any = None
closed: bool = True
dataset: Any = None
@band.default
def band_default(self):
"""Set the default band id number."""
# no need to have a band index number with HDF5
if self.file_format == "HDF5":
value = None
else:
value = 1
return value
def pathname(self) -> Path:
"""Return full pathname to the file."""
pathname = Path(self.parent_dir, self.path)
return pathname
def open(self):
"""Open the dataset."""
pathname = self.pathname()
if not pathname.exists():
msg = "pathname not found"
_LOG.info(msg, pathname=str(pathname))
raise OSError(msg)
self.dataset = rasterio.open(pathname)
self.nodata = self.dataset.nodata
self.closed = False
def close(self):
"""Close the dataset."""
self.dataset.close()
self.closed = True
def read(self) -> numpy.ndarray:
"""
Basic method to read the data into memory.
Can very easily be expanded to read subsets, resample etc.
"""
data = self.dataset.read(self.band)
return data
@attr.s(auto_attribs=True)
class Granule:
"""
Basic class containing information pertaining to the granule.
"""
granule_id: str = attr.ib(default="")
region_code: str = attr.ib(default="")
product_name: str = attr.ib(default="")
parent_uuid: str = attr.ib(default="")
framing: str = attr.ib(default="")
measurements: Union[Dict[str, Measurement], None] = None
proc_info: str = attr.ib(default="")
|
import sys
# Package brave split into two paths.
# path2/brave/robin.py and path3/brave/_robin.so
sys.path.insert(0, 'path2')
sys.path.insert(0, 'path3')
from brave import robin
assert(robin.run() == "AWAY!")
|
"""empty message
Revision ID: 4b7b5a7ddc5c
Revises: 2a5fdb834d35
Create Date: 2016-02-01 18:31:56.369000
"""
# revision identifiers, used by Alembic.
revision = '4b7b5a7ddc5c'
down_revision = '2a5fdb834d35'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('restaurant', sa.Column('logo_path', sa.String(length=200), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('restaurant', 'logo_path')
### end Alembic commands ###
|
class Complex :
def __init__(self, x, y) :
self.x=x
self.y=y
def add(self,other) :
return Complex(self.x+other.x,self.y+other.y)
def subtract(self,other) :
return Complex(self.x-other.x,self.y-other.y)
def multiply(self,other) :
return Complex(self.x*other.x-self.x*other.y,
self.x*other.y-self.y*other.x)
def mod(self) :
return (self.x**2+self.y**2)**(0.5)
def conjugate(self) :
return Complex(self.x,-self.y)
def inverse(self) :
return Complex(self.x/(self.mod()**2),
-self.y/(self.mod()**2))
def display(self) :
if self.y>=0:
print("{}+{}i".format(self.x,self.y))
else :
print("{}-{}i".format(self.x,-self.y))
|
from main.base_element import BaseElement
from selenium.webdriver.common.by import By
from main.base_page import BasePage
class ProductPage(BasePage):
@property
def product_name(self):
locator = (By.CSS_SELECTOR, 'div.box h1')
return BaseElement(driver=self.driver, locator=locator)
@property
def product_price(self):
locator = (By.CSS_SELECTOR, 'div.box p#price')
return BaseElement(driver=self.driver, locator=locator)
@property
def add_to_card_button(self):
locator = (By.CSS_SELECTOR, 'a[id=buttonCart]')
return BaseElement(driver=self.driver, locator=locator)
@property
def basket_button(self):
locator = (By.XPATH, '//*[@id="numItemsInCart"]')
'''
The element is created twice to avoid selenium.common.exceptions.StaleElementReferenceException:
Message: stale element reference: element is not attached to the page document
'''
_ = BaseElement(driver=self.driver, locator=locator)
return BaseElement(driver=self.driver, locator=locator)
|
#############
# RPS LOGIC
moves = ["rock", "paper", "scissors", "lose"]
def get_win(a, b):
# Find the lower index of the array that Alicia chose vs.
# lower index of the array that Bruce chose.
lower = min(moves.index(a), moves.index(b))
# Find the higher index of the array that Alicia chose vs.
# higher index of the array that Bruce chose.
higher = max(moves.index(a), moves.index(b))
# Arbitrarily assign players to a lower or higher variable
lower_player = "Alicia"
higher_player = "Bruce"
# Compare moves by player and swap name assignments
if lower == moves.index(b):
lower_player = "Bruce"
if higher == moves.index(a):
higher_player = "Alicia"
# Compare indexes and accomodate for players choosing the same or not at all
if lower == 0 and higher == 2:
return lower_player
elif lower == higher <= 2:
return "everyone"
elif lower == 3 and higher == 3:
return "no one"
elif higher == 3 and lower <= 2:
return lower_player
else:
return higher_player
|
import codecs
import json
from collections import namedtuple
class Settings:
def __init__(self, data_dir):
self._data_dir = data_dir
self._setting_path = self._data_dir + "/settings.json"
self.default_prefix = "^"
with codecs.open(self._setting_path, "r", encoding="utf8") as f:
_json = json.load(f)
self.token = _json["token"]
self.prefix = _json["prefixes"]
try:
self.extra = namedtuple("settings", _json["extra"].keys())(*_json["extra"].values())
except KeyError:
pass
|
"""
This module handles the execution of the users module. It should ideally
be called in an subprocess (like code_runner does) in a secure enviroment
with all code files prepared.
This overhead is needed to avoid having extra testcases loaded by the grader.
`test_module` loads the tester code loaded in a file. In that For each test, an
async request is fired (run in another process). It is resolved within the
`resolve_testcase_run` function. If that call timeouts, it is then terminated.
See `resolve_testcase_run` for output format description.
"""
import grader
from time import time, sleep
from .program_container import ProgramContainer
from .utils import get_traceback, get_error_message, import_module, dump_json, load_json
RESULT_DEFAULTS = {
"log": [],
"error_message": "",
"traceback": ""
}
def call_all(function_list, *args, **kwargs):
for fun in function_list:
fun(*args)
def call_test_function(test_index, tester_path, solution_path):
""" Called in another process. Finds the test `test_name`, calls the
pre-test hooks and tries to execute it.
If an exception was raised by call, prints it to stdout """
import_module(tester_path)
test_name = grader.testcases.get_name(test_index)
test_function = grader.testcases[test_name]
# pre-test hooks
pre_hook_info = {
"test_name": test_name,
"tester_path": tester_path,
"solution_path": solution_path,
"extra_args": [],
"extra_kwargs": {}
}
call_all(grader.get_setting(test_name, "pre-hooks"), pre_hook_info)
results = RESULT_DEFAULTS.copy()
# start users program
try:
module = ProgramContainer(solution_path, results)
while not hasattr(module, "module"):
sleep(0.001)
module.condition.acquire()
test_function(
module,
*pre_hook_info["extra_args"],
**pre_hook_info["extra_kwargs"]
)
except Exception as e:
if module.caughtException is not None:
e = module.caughtException
results["error_message"] = get_error_message(e)
results["traceback"] = get_traceback(e)
raise
finally:
module.restore_io()
print(dump_json(results))
def do_testcase_run(test_name, tester_path, solution_path, options):
""" Calls the test, checking if it doesn't raise an Exception.
Returns a dictionary in the following form::
{
"success": boolean,
"traceback": string ("" if None)
"error_message: string
"time": string (execution time, rounded to 3 decimal digits)
"description": string (test name/its description)
}
If the test timeouts, traceback is set to "timeout".
Post-hooks can manipulate with the test results before returning.
"""
from grader.code_runner import call_test
options["timeout"] = grader.get_setting(test_name, "timeout")
test_index = grader.testcases.indexOf(test_name)
start = time()
success, stdout, stderr = call_test(test_index, tester_path, solution_path, options)
end = time()
result = RESULT_DEFAULTS.copy()
if (end - start) > options["timeout"]:
result["error_message"] = "Timeout"
result["traceback"] = "Timeout"
else:
try:
result = load_json(stdout)
except Exception as e:
result["traceback"] = stdout
result["stderr"] = stderr
result.update(
success=success,
description=test_name,
time=("%.3f" % (end - start))
)
# after test hooks - cleanup
call_all(grader.get_setting(test_name, "post-hooks"), result)
return result
|
import click
import logging
from .utils import get_pangea_group
from pangea_api.contrib.tagging import Tag
from pangea_api import (
Knex,
User,
)
from .api import process_group
logger = logging.getLogger(__name__) # Same name as calling module
@click.group('pangea')
def capalyzer_pangea_cli():
pass
@capalyzer_pangea_cli.command('make-tables')
@click.option('-l', '--log-level', default=20)
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.argument('org_name')
@click.argument('grp_name')
def cli_make_tables(log_level, endpoint, email, password, org_name, grp_name):
logging.basicConfig(
level=log_level,
format='%(levelname)s: %(message)s',
)
pangea_group = get_pangea_group(org_name, grp_name, email=email, password=password, endpoint=endpoint)
process_group(pangea_group)
@capalyzer_pangea_cli.command('tag')
@click.option('-l', '--log-level', default=20)
@click.option('--endpoint', default='https://pangea.gimmebio.com')
@click.option('-e', '--email', envvar='PANGEA_USER')
@click.option('-p', '--password', envvar='PANGEA_PASS')
@click.option('-t', '--tag-name', default='CAPalyzer')
@click.option('--strict/--permissive', default=False)
def cli_process_tag(log_level, endpoint, email, password, tag_name, strict):
logging.basicConfig(
level=log_level,
format='%(levelname)s: %(message)s',
)
knex = Knex(endpoint)
if email and password:
User(knex, email, password).login()
tag = Tag(knex, tag_name).get()
for pangea_group in tag.get_sample_groups():
logger.info(pangea_group)
process_group(pangea_group, strict=strict)
|
"""
Workaround for having too many threads running on 32-bit systems when
logging to buildlogger that still allows periodically flushing messages
to the buildlogger server.
This is because a utils.timer.AlarmClock instance is used for each
buildlogger.BuildloggerTestHandler, but only dismiss()ed when the Python
process is about to exit.
"""
import threading
from ..utils import queue
_LOGGER_QUEUE = queue.Queue()
_FLUSH_THREAD_LOCK = threading.Lock()
_FLUSH_THREAD = None
def start_thread():
"""
Starts the flush thread.
"""
global _FLUSH_THREAD
with _FLUSH_THREAD_LOCK:
if _FLUSH_THREAD is not None:
raise ValueError("FlushThread has already been started")
_FLUSH_THREAD = _FlushThread()
_FLUSH_THREAD.start()
def stop_thread():
"""
Signals the flush thread to stop and waits until it does.
"""
with _FLUSH_THREAD_LOCK:
if _FLUSH_THREAD is None:
raise ValueError("FlushThread hasn't been started")
# Add sentinel value to indicate when there are no more loggers to process.
_LOGGER_QUEUE.put(None)
_FLUSH_THREAD.join()
def close_later(logger):
"""
Adds 'logger' to the queue so that it is closed later by the flush
thread.
"""
_LOGGER_QUEUE.put(logger)
class _FlushThread(threading.Thread):
"""
Asynchronously flushes and closes logging handlers.
"""
def __init__(self):
"""
Initializes the flush thread.
"""
threading.Thread.__init__(self, name="FlushThread")
# Do not wait to flush the logs if interrupted by the user.
self.daemon = True
def run(self):
"""
Continuously shuts down loggers from the queue.
"""
while True:
logger = _LOGGER_QUEUE.get()
try:
if logger is None:
# Sentinel value received, so exit.
break
_FlushThread._shutdown_logger(logger)
finally:
_LOGGER_QUEUE.task_done()
@staticmethod
def _shutdown_logger(logger):
"""
Flushes and closes all handlers of 'logger'.
"""
for handler in logger.handlers:
handler.flush()
handler.close()
|
__author__ = "jacksonsr45@gmail.com"
new_skill = {
'knight': {
'attack speed': {
},
'move speed': {
},
'attack distance': {
},
'defense': {
},
'magic attack': {
},
'dodge': {
},
},
'paladin': {
'attack speed': {
},
'move speed': {
},
'attack distance': {
},
'defense': {
},
'magic attack': {
},
'dodge': {
},
},
'mage': {
'attack speed': {
},
'move speed': {
},
'attack distance': {
},
'defense': {
},
'magic attack': {
},
'dodge': {
},
},
'ranger': {
'attack speed': {
},
'move speed': {
},
'attack distance': {
},
'defense': {
},
'magic attack': {
},
'dodge': {
},
},
}
|
"""
tomaty.py
~~~~~~~~
This module is the main module for tomaty and contains Tomaty, the main contain
tomaty is a gtk based application that implements the Pomodoro technique to
help users focus better on their work and encourage healthy lifestyle habits.
This continues to be a work in progress. Expect many changes between 0.9.0dev
and 2.0.0!
:copyright: @ 2018
:author: elias julian marko garcia
:license: MIT, see LICENSE
"""
from datetime import timedelta, datetime
from simpleaudio import WaveObject
from os import path
from tomaty.tomaty_notebook import TomatyNotebook, TomatyPage
from tomaty.tomaty_label import TimerLabel, StatsLabel
from tomaty.tomaty_button import TomatyButton
from tomaty.lib.serialization import tomaty_serialization
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk, GLib
TOMA_MINUTES = 25
BREAK_MINUTES = 5
EXTENDED_BREAK_MINUTES = 30
TOMA_SET = 4
# messages and or templates with Pango markup used by app.
TIMER_FRMT = """
<span font='34'>{}</span>
"""
TOMA_MSG = """
<span font='16'>Tomatoro Done!\nStart Break?</span>"""
FINAL_TOMA_MSG = """
<span font='16'>Toma Set Completed!\nStart Extended Break?</span>"""
BREAK_MSG = """
<span font='16'>Break Over!\nStart Tomatoro?</span>"""
TOMA_RESTART_MSG = """
<span font='16'>Start Tomatoro?</span>"""
BREAK_RESTART_MSG = """
<span font='16'>Start Break?</span>"""
COUNT = """
<span font='11'><tt>Tomatoros Completed: {}</tt></span>"""
TOTAL_TIME = """
<span font='11'><tt>Total Time: {}</tt></span>"""
class Tomaty(Gtk.Window):
"""Tomaty is the main window for the tomaty app, and holds all data and
objects used by the tomaty app."""
def __init__(self):
# call to super, sets window title
super(Tomaty, self).__init__(title="tomaty :: focus!")
# attributes
self.set_border_width(5)
self.set_resizable(False)
self.tomatosCompleted = 0
self.running = False
self.breakPeriod = False
self.tomaTime = timedelta(minutes=TOMA_MINUTES)
self.breakTime = timedelta(minutes=BREAK_MINUTES)
self.extendedBreakTime = timedelta(minutes=EXTENDED_BREAK_MINUTES)
self.remTime = self.tomaTime
self.tomatoroLength = self.tomaTime + self.breakTime
# source id for tracking counter
self.eventSourceID = None
# create notebook, add as main and sole child widget of window
self.notebook = TomatyNotebook(250, 150)
self.add(self.notebook)
# timer page setup
self.timerPage = TomatyPage()
self.timerLabel = TimerLabel(
label=TIMER_FRMT.format(str(self.remTime)[2:]))
self.timerPage.pack_start(self.timerLabel, True, True, 0)
# start button. connect to clickStart class method for click event.
self.tomatyButton = TomatyButton(tmargin=5, bmargin=5)
self.tomatyButton.connect("clicked", self.clickStart)
self.timerPage.pack_start(self.tomatyButton, False, False, 0)
# statistics page setup
self.tomaty_serializer = tomaty_serialization.TomatySerializer()
self.statsPage = TomatyPage()
# counter label for cycles (1 toma + 1 break = 1 cycle)
self.countLabel = StatsLabel(
label=COUNT.format(self.tomatosCompleted), smargin=10, emargin=10)
self.total_time = self.tomaty_serializer.total_time
self.totalLabel = StatsLabel(
label=TOTAL_TIME.format(self.total_time),
emargin=25,
justify=Gtk.Justification.LEFT)
self.statsPage.pack_start(self.countLabel, False, False, 0)
self.statsPage.pack_start(self.totalLabel, False, False, 0)
# add pages to notebook. setup complete.
self.notebook.append_page(
child=self.timerPage, tab_label=Gtk.Label(label='tomatoro'))
self.notebook.append_page(
child=self.statsPage, tab_label=Gtk.Label(label="stats"))
self.connect('delete_event', self.destory)
def destory(self, widget=None, *data):
"""
Save data before clsoing the application
:param widget: widget receiving the 'delete-event' signal
:param data: optional data received from connect
"""
self.tomaty_serializer.save_tomotoro(self.total_time)
Gtk.main_quit()
def clickStart(self, tomatyButton):
"""clickStart initiates the countdown timer for the current phase of a
tomatoro. when the timer is already running, it cancels the current
event and prompts for restarting.
:param tomatyButton: the button object used by the method. this
parameter is mandated by gtk specification when connected to a
gtk.Button
clickStart() uses a set of attribute booleans to check whether the app
is currently, `running` running and what phase it currently is in,
`breakPeriod`. From there, it determines whether to start the counter
or restart it, and correspondingly whether to add an object to Gtk's
event loop via GLib.timeout_add_seconds() or to remove it via
GLib.SOURCE_REMOVE
"""
# check if running
if self.running:
# cancel the timer if running, cleanup for restart.
self.running = False
self.tomatyButton.updateButton()
if self.breakPeriod:
self.timerLabel.set_markup(str=BREAK_RESTART_MSG)
# reset break time to appropriate interval
if not self.tomatosCompleted % TOMA_SET:
self.remTime = self.extendedBreakTime
else:
self.remTime = self.breakTime
else:
self.timerLabel.set_markup(str=TOMA_RESTART_MSG)
self.remTime = self.tomaTime
temp = self.eventSourceID
self.eventSourceID = None
GLib.source_remove(temp)
else:
self.running = True
self.tomatyButton.updateButton()
# check if break, start timer with correct interval
if self.breakPeriod:
# check if cycle finished, set time appropriately
if not self.tomatosCompleted % TOMA_SET:
self.remTime = self.extendedBreakTime
else:
self.remTime = self.breakTime
self.timerLabel.set_markup(
str=TIMER_FRMT.format(str(self.remTime)[2:]))
else:
self.remTime = self.tomaTime
self.timerLabel.set_markup(
str=TIMER_FRMT.format(str(self.remTime)[2:]))
# always used named arguments, especially with
# GLib.timeout_add_selfeconds() due to its other optional params
# we set the time interval to 1 second and add countDown
# to the Gtk event loop.
if self.eventSourceID:
GLib.source_remove(self.eventSourceID)
self.eventSourceID = GLib.timeout_add_seconds(
interval=1, function=self.countDown)
else:
self.eventSourceID = GLib.timeout_add_seconds(
interval=1, function=self.countDown)
def countDown(self):
"""countDown runs the decrement logic of the timer by checking for
whether `remTime` is 0.
countDown checks whether remTime is 0 on each call within the Gtk event
loop which occurs ~each second.
"""
if self.remTime == timedelta(seconds=0):
alarm()
self.running = False
self.tomatyButton.updateButton()
if self.breakPeriod:
self.timerLabel.set_markup(str=BREAK_MSG)
self.breakPeriod = False
self.total_time = self.total_time + self.breakTime
self.totalLabel.set_markup(
str=TOTAL_TIME.format(str(self.total_time)))
else:
self.tomatosCompleted += 1
self.countLabel.set_markup(
str=COUNT.format(self.tomatosCompleted))
self.total_time = self.total_time + self.tomaTime
self.totalLabel.set_markup(
str=TOTAL_TIME.format(str(self.total_time)))
# check if end of cycle, set message accordingly
if not self.tomatosCompleted % TOMA_SET:
self.timerLabel.set_markup(str=FINAL_TOMA_MSG)
else:
self.timerLabel.set_markup(str=TOMA_MSG)
self.breakPeriod = True
temp = self.eventSourceID
self.eventSourceID = None
return GLib.source_remove(temp)
if not self.running:
temp = self.eventSourceID
self.eventSourceID = None
return GLib.source_remove(temp)
self.timerLabel.set_markup(str=TIMER_FRMT.format(self.tickTock()))
# signal to continue countdown within main loop
return GLib.SOURCE_CONTINUE
def tickTock(self):
"""tickTock decrements the counter
:return: a string of the timedelta for remaining time
"""
self.remTime = self.remTime - timedelta(seconds=1)
return str(self.remTime)[2:]
def alarm():
"""calls alarm for the end of a cycle"""
resourcePath = path.join(path.split(__file__)[0], 'resources')
alarmPath = path.join(path.join(resourcePath, 'audio'), 'alarm.wav')
wav_obj = WaveObject.from_wave_file(alarmPath)
wav_obj.play()
def run():
t = Tomaty()
t.show_all()
t.set_keep_above(True)
Gtk.main()
|
from random import randint
def guess(min = 1, max = 3):
return randint(min, max)
def user_game(default=5):
# Get a limit value from the user, this value is going to be the limit of
# the guessing, if not provided the default will be used as limit
limit = input(f'Enter the limit for the guess (default is {default}): ')
limit = int(limit) if limit else default
# Computer should guess a number here
comp_guess = guess(max=limit)
user_guess = 0
# Run the loop until user guess is right, then show number of guesses made
count = 0
while user_guess != comp_guess:
user_guess = int(input(f'Enter what the computer guessed between 1 to {limit}: '))
if user_guess > comp_guess:
print('Sorry, guess again, too high')
elif user_guess < comp_guess:
print('Sorry, guess again, too low')
count += 1
return f'You got it right after {count} guesses'
def comp_game():
"""
Computer will randomly select a limit, user will guess a value and computer
will try to get that value
"""
# random limit
limit = guess(min=5, max=1000)
minimum = 1
# User should guess a number here
user_guess = int(input(f'Pick a number between 1 to {limit}: '))
# Validate user picks
if user_guess < 1 or user_guess > limit:
raise ValueError
comp_guess = 0
# Run the loop until user guess is right, then show number of guesses made
count = 0
while comp_guess != user_guess:
comp_guess = guess(min=minimum, max=limit)
# Block of code below makes computer make faster guesses
if comp_guess > user_guess:
limit = comp_guess
elif comp_guess < user_guess:
minimum = comp_guess
# ======================================================
count += 1
return f'Computer got it right after {count} guesses'
|
# Uses python3
import sys
def get_optimal_value(capacity, weights, values):
sorted_by_fraction = sorted(zip(weights, values), key=lambda item: item[1] / item[0])
value = 0.
while 0 < capacity and len(sorted_by_fraction):
weight_i, value_i = c.pop() # pop the item on top
x = 1.0
if weight_i > capacity:
x = capacity / weight_i
capacity -= weight_i
value += x * value_i
return value
if __name__ == "__main__":
data = list(map(int, sys.stdin.read().split()))
n, capacity = data[0:2]
values = data[2:(2 * n + 2):2]
weights = data[3:(2 * n + 2):2]
opt_value = get_optimal_value(capacity, weights, values)
print("{:.10f}".format(opt_value))
|
#!/usr/bin/python3
"""
Demonstrates the use of Psi4 from Python level.
Useful notes:
o Use psi4.core module for most of the work
o Useful modules within psi4.core:
- MintsHelper
- Molecule
- BasisSet
- ExternalPotential
others
o Psi4 defines its own matrix type (psi4.core.Matrix).
Extracting numpy.array is easy:
numpy_array = numpy.asarray(psi4_matrix)
Creating Psi4 matrix from array is also easy:
psi4_matrix = psi4.core.Matrix.from_array(numpy_array)
o To compute 1-el potential matrix for a set of charges
use ExternalPotential (charge positions are to be provided in Angstroms)
unless charges are just nuclei within the basis set (in this case use of ao_potential method
of MintsHelper is easier).
o ao_potential method of MintsHelper is limited only for nuclei within the same basis set
(the nuclei are taken from the first basis set axis, for example:
mints = MintsHelper(basis_X)
mints.ao_potential() -> nuclei taken from basis of mints object (basis_X)
mints.ao_potential(basis_1, basis_2) -> nuclei taken from basis_1
o Psi4 has efficient and easy to use method of defining fragments within a molecule (use '--' separator).
Defining ghost atoms and extracting fragment i in the multimer-centred basis set is also very straighforward
(method extract_subsets(...) of psi4.core.Molecule)
---
Bartosz Błasiak
"""
import psi4
import numpy
MAX_NBF = 128
class SCF:
"""
---------------------------------------------------------------------------------------------------------------
Self-Consistent Field (SCF) Procedure for Hartree-Fock Model
---------------------------------------------------------------------------------------------------------------
Demo for RHF-SCF method (closed shells). Implements SCF algorithm
with primitive damping of the AO Fock matrix.
Usage:
scf = SCF(molecule)
scf.run(maxit=30, conv=1.0e-7, guess=None, damp=0.01, ndamp=10, verbose=True)
The above example runs SCF on 'molecule' psi4.core.Molecule object
starting from core Hamiltonian as guess (guess=None)
and convergence 1.0E-7 A.U. in total energy with 30 maximum iterations
(10 of which are performed by damping of the Fock matrix with damping coefficient of 0.01).
The SCF iterations are printed to standard output (verbose=True).
---------------------------------------------------------------------------------------------------------------
Last Revision: Gundelfingen, May 4th 2018
"""
def __init__(self, mol):
"Initialize BasisSet, Wavefunction and JK objects"
# Basis set
self._bfs =
# Wavefunction
self._wfn =
# Number of alpha electrons
self._ndocc =
# Integral calculator
self._mints =
# JK object
self._jk =
### Accessors
# nuclear repulsion energy
self.e_nuc =
# Total Energy
self.E = None
# Density Matrix
self.D = None
# LCAO-MO coeffs (occ)
self.Co= None
# LCAO-MO coeffs (occ+vir)
self.C = None
# Fock matrix
self.F = None
# Orbital energies
self.eps = None
# Hcore matrix
self.H = None
# Overlap integrals and orthogonalizer
self.S =
self.X = self._orthogonalizer(self.S)
return
def run(self, maxit=30, conv=1.0e-7, guess=None, damp=0.01, ndamp=10, verbose=True):#TODO
"Solve SCF (public interface)"
if guess is None:
# Form Hcore
...
else: H = numpy.asarray(guess)
self.H = H.copy()
self._run(H, maxit, conv, damp, ndamp, verbose)
return
# --- protected --- #
def _run(self, H, maxit, conv, damp, ndamp, verbose):#TODO
"Solve SCF (protected interface)"
# [1] Guess density matrix
# [2] Start iteration cycles
while (abs(e_old - e_new) > conv):
niter += 1
# [3] form Fock matrix
# [4] compute total energy
if verbose:
print (" @SCF Iter {:02} E = {:14.8f}".format(niter, e_new))
# [5] transform Fock matrix to orthogonal AO basis
# [6] diagonalize the Fock matrix
# [7] convert LCAO-MO coefficiets to non-orthogonal AO basis
# [8] form density matrix
# [9] save current data
if niter > maxit: break
return
def _orthogonalizer(self, S):#TODO
"Form orthogonalizer"
return NotImplementedError
|
#! /usr/bin/python3
# p104
animals = ['bear','python','peacock','kangaroo','whale','platypus']
print("The animal at 1 is %s",animals[0])
print("The third animal is %s",animals[2])
print("The first animal is %s",animals[0])
print("The animal at 3 is %s",animals[2])
print("The fifth animal is %s",animals[4])
print("The animal at 2 is %s",animals[1])
print("The sixth animal is %s",animals[5])
print("the animal at 4 is %s",animals[3])
|
#! /usr/bin/env python3
# coding=utf-8
#================================================================
# Copyright (C) 2018 * Ltd. All rights reserved.
#
# Editor : VIM
# File name : yolov3.py
# Author : YunYang1994
# Created date: 2018-11-21 18:41:35
# Description : YOLOv3: An Incremental Improvement
#
#================================================================
import tensorflow as tf
from core import common, utils
slim = tf.contrib.slim
class darknet53(object):
"""network for performing feature extraction"""
def __init__(self, inputs):
self.outputs = self.forward(inputs)
def _darknet53_block(self, inputs, filters):
"""
implement residuals block in darknet53
"""
shortcut = inputs
inputs = common._conv2d_fixed_padding(inputs, filters * 1, 1)
inputs = common._conv2d_fixed_padding(inputs, filters * 2, 3)
inputs = inputs + shortcut
return inputs
def forward(self, inputs):
inputs = common._conv2d_fixed_padding(inputs, 32, 3, strides=1)
inputs = common._conv2d_fixed_padding(inputs, 64, 3, strides=2)
inputs = self._darknet53_block(inputs, 32)
inputs = common._conv2d_fixed_padding(inputs, 128, 3, strides=2)
for i in range(2):
inputs = self._darknet53_block(inputs, 64)
inputs = common._conv2d_fixed_padding(inputs, 256, 3, strides=2)
for i in range(8):
inputs = self._darknet53_block(inputs, 128)
route_1 = inputs
inputs = common._conv2d_fixed_padding(inputs, 512, 3, strides=2)
for i in range(8):
inputs = self._darknet53_block(inputs, 256)
route_2 = inputs
inputs = common._conv2d_fixed_padding(inputs, 1024, 3, strides=2)
for i in range(4):
inputs = self._darknet53_block(inputs, 512)
return route_1, route_2, inputs
class yolov3(object):
def __init__(self, num_classes=80,
batch_norm_decay=0.9, leaky_relu=0.1, anchors_path='./data/yolo_anchors.txt'):
# self._ANCHORS = [[10 ,13], [16 , 30], [33 , 23],
# [30 ,61], [62 , 45], [59 ,119],
# [116,90], [156,198], [373,326]]
self._ANCHORS = utils.get_anchors(anchors_path)
self._BATCH_NORM_DECAY = batch_norm_decay
self._LEAKY_RELU = leaky_relu
self._NUM_CLASSES = num_classes
self.feature_maps = [] # [[None, 13, 13, 255], [None, 26, 26, 255], [None, 52, 52, 255]]
def _yolo_block(self, inputs, filters):
inputs = common._conv2d_fixed_padding(inputs, filters * 1, 1)
inputs = common._conv2d_fixed_padding(inputs, filters * 2, 3)
inputs = common._conv2d_fixed_padding(inputs, filters * 1, 1)
inputs = common._conv2d_fixed_padding(inputs, filters * 2, 3)
inputs = common._conv2d_fixed_padding(inputs, filters * 1, 1)
route = inputs
inputs = common._conv2d_fixed_padding(inputs, filters * 2, 3)
return route, inputs
def _detection_layer(self, inputs, anchors):
num_anchors = len(anchors)
feature_map = slim.conv2d(inputs, num_anchors * (5 + self._NUM_CLASSES), 1,
stride=1, normalizer_fn=None,
activation_fn=None,
biases_initializer=tf.zeros_initializer())
return feature_map
def get_boxes_confs_scores(self, feature_map, anchors):
num_anchors = len(anchors) # num_anchors=3
grid_size = tf.shape(feature_map)[1:3]
stride = tf.cast(self.img_size // grid_size, tf.float32)
anchors = [(a[0] / stride[0], a[1] / stride[1]) for a in anchors]
feature_map = tf.reshape(feature_map, [-1, grid_size[0], grid_size[1], num_anchors, 5 + self._NUM_CLASSES])
box_centers, box_sizes, confs, probs = tf.split(
feature_map, [2, 2, 1, self._NUM_CLASSES], axis=-1)
box_centers = tf.nn.sigmoid(box_centers)
confs = tf.nn.sigmoid(confs)
probs = tf.nn.sigmoid(probs)
grid_x = tf.range(grid_size[0], dtype=tf.int32)
grid_y = tf.range(grid_size[1], dtype=tf.int32)
a, b = tf.meshgrid(grid_x, grid_y)
x_offset = tf.reshape(a, (-1, 1))
y_offset = tf.reshape(b, (-1, 1))
x_y_offset = tf.concat([x_offset, y_offset], axis=-1)
x_y_offset = tf.reshape(x_y_offset, [grid_size[0], grid_size[1], 1, 2])
x_y_offset = tf.cast(x_y_offset, tf.float32)
box_centers = box_centers + x_y_offset
box_centers = box_centers * stride
box_sizes = tf.exp(box_sizes) * anchors
box_sizes = box_sizes * stride
boxes = tf.concat([box_centers, box_sizes], axis=-1)
return x_y_offset, boxes, confs, probs
@staticmethod
def _upsample(inputs, out_shape):
new_height, new_width = out_shape[1], out_shape[2]
inputs = tf.image.resize_nearest_neighbor(inputs, (new_height, new_width))
inputs = tf.identity(inputs, name='upsampled')
return inputs
# @staticmethod
# def _upsample(inputs, out_shape):
# """
# replace resize_nearest_neighbor with conv2d_transpose To support TensorRT 5 optimization
# """
# new_height, new_width = out_shape[1], out_shape[2]
# filters = 256 if (new_height == 26 and new_width==26) else 128
# inputs = tf.layers.conv2d_transpose(inputs, filters, kernel_size=3, padding='same',
# strides=(2,2), kernel_initializer=tf.ones_initializer())
# return inputs
def forward(self, inputs, is_training=False, reuse=False):
"""
Creates YOLO v3 model.
:param inputs: a 4-D tensor of size [batch_size, height, width, channels].
Dimension batch_size may be undefined. The channel order is RGB.
:param is_training: whether is training or not.
:param reuse: whether or not the network and its variables should be reused.
:return:
"""
# it will be needed later on
self.img_size = tf.shape(inputs)[1:3]
# set batch norm params
batch_norm_params = {
'decay': self._BATCH_NORM_DECAY,
'epsilon': 1e-05,
'scale': True,
'is_training': is_training,
'fused': None, # Use fused batch norm if possible.
}
# Set activation_fn and parameters for conv2d, batch_norm.
with slim.arg_scope([slim.conv2d, slim.batch_norm, common._fixed_padding],reuse=reuse):
with slim.arg_scope([slim.conv2d], normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params,
biases_initializer=None,
activation_fn=lambda x: tf.nn.leaky_relu(x, alpha=self._LEAKY_RELU)):
with tf.variable_scope('darknet-53'):
route_1, route_2, inputs = darknet53(inputs).outputs
with tf.variable_scope('yolo-v3'):
route, inputs = self._yolo_block(inputs, 512)
feature_map_1 = self._detection_layer(inputs, self._ANCHORS[6:9])
feature_map_1 = tf.identity(feature_map_1, name='feature_map_1')
inputs = common._conv2d_fixed_padding(route, 256, 1)
upsample_size = route_2.get_shape().as_list()
inputs = self._upsample(inputs, upsample_size)
inputs = tf.concat([inputs, route_2], axis=3)
route, inputs = self._yolo_block(inputs, 256)
feature_map_2 = self._detection_layer(inputs, self._ANCHORS[3:6])
feature_map_2 = tf.identity(feature_map_2, name='feature_map_2')
inputs = common._conv2d_fixed_padding(route, 128, 1)
upsample_size = route_1.get_shape().as_list()
inputs = self._upsample(inputs, upsample_size)
inputs = tf.concat([inputs, route_1], axis=3)
route, inputs = self._yolo_block(inputs, 128)
feature_map_3 = self._detection_layer(inputs, self._ANCHORS[0:3])
feature_map_3 = tf.identity(feature_map_3, name='feature_map_3')
return feature_map_1, feature_map_2, feature_map_3
def _reshape(self, x_y_offset, boxes, confs, probs):
grid_size = x_y_offset.shape.as_list()[:2]
boxes = tf.reshape(boxes, [-1, grid_size[0]*grid_size[1]*3, 4])
confs = tf.reshape(confs, [-1, grid_size[0]*grid_size[1]*3, 1])
probs = tf.reshape(probs, [-1, grid_size[0]*grid_size[1]*3, self._NUM_CLASSES])
return boxes, confs, probs
def predict(self, feature_maps):
"""
Note: given by feature_maps, compute the receptive field
and get boxes, confs and class_probs
input_argument: feature_maps -> [None, 13, 13, 255],
[None, 26, 26, 255],
[None, 52, 52, 255],
"""
feature_map_1, feature_map_2, feature_map_3 = feature_maps
feature_map_anchors = [(feature_map_1, self._ANCHORS[6:9]),
(feature_map_2, self._ANCHORS[3:6]),
(feature_map_3, self._ANCHORS[0:3]),]
results = [self.get_boxes_confs_scores(feature_map, anchors) for (feature_map, anchors) in feature_map_anchors]
boxes_list, confs_list, probs_list = [], [], []
for result in results:
boxes, confs, probs = self._reshape(*result)
boxes_list.append(boxes)
confs_list.append(confs)
probs_list.append(probs)
boxes = tf.concat(boxes_list, axis=1)
confs = tf.concat(confs_list, axis=1)
probs = tf.concat(probs_list, axis=1)
center_x, center_y, height, width = tf.split(boxes, [1,1,1,1], axis=-1)
x0 = center_x - height / 2
y0 = center_y - width / 2
x1 = center_x + height / 2
y1 = center_y + width / 2
boxes = tf.concat([x0, y0, x1, y1], axis=-1)
return boxes, confs, probs
def compute_loss(self, feature_maps, boxes_true):
"""
Note: compute the loss
Arguments: feature_maps, list -> [feature_map_1, feature_map_2, feature_map_3]
the shape of [None, 13, 13, 3*85]. etc
"""
_ANCHORS = [self._ANCHORS[6:9], self._ANCHORS[3:6], self._ANCHORS[0:3]]
loss = 0.
for i, feature_map in enumerate(feature_maps):
loss += self.loss_layer(feature_map, boxes_true[i], _ANCHORS[i])
return loss
def loss_layer(self, feature_map_i, y_true, anchors):
NO_OBJECT_SCALE = 1.0
OBJECT_SCALE = 5.0
COORD_SCALE = 1.0
CLASS_SCALE = 1.0
grid_size = tf.shape(feature_map_i)[1:3]
# stride = [self.img_size[0] // grid_size[0], self.img_size[1] // grid_size[1]]
stride = tf.cast(self.img_size//grid_size, dtype=tf.float32)
pred_result = self.get_boxes_confs_scores(feature_map_i, anchors)
xy_offset, pred_box, pred_box_conf, pred_box_class = pred_result
# print(pred_box_class)
true_box_xy = y_true[...,:2] # absolute coordinate
true_box_wh = y_true[...,2:4] # absolute size
pred_box_xy = pred_box[...,:2]# absolute coordinate
pred_box_wh = pred_box[...,2:4]# absolute size
# caculate iou between true boxes and pred boxes
intersect_xy1 = tf.maximum(true_box_xy - true_box_wh / 2.0,
pred_box_xy - pred_box_xy / 2.0)
intersect_xy2 = tf.minimum(true_box_xy + true_box_wh / 2.0,
pred_box_xy + pred_box_wh / 2.0)
intersect_wh = tf.maximum(intersect_xy2 - intersect_xy1, 0.)
intersect_area = intersect_wh[..., 0] * intersect_wh[..., 1]
true_area = true_box_wh[..., 0] * true_box_wh[..., 1]
pred_area = pred_box_wh[..., 0] * pred_box_wh[..., 1]
union_area = true_area + pred_area - intersect_area
iou_scores = tf.truediv(intersect_area, union_area)
iou_scores = tf.expand_dims(iou_scores, axis=-1)
# true_box_conf = iou_scores * y_true[...,4:5]
true_box_conf = y_true[...,4:5]
# best_ious = tf.reduce_max(iou_scores, axis=-1)
conf_mask = tf.to_float(iou_scores < 0.6) * (1 - y_true[..., 4:5]) * NO_OBJECT_SCALE
# penalize the confidence of the boxes, which are reponsible for corresponding ground truth box
conf_mask = conf_mask + y_true[..., 4:5] * OBJECT_SCALE
### adjust x and y => relative position to the containing cell
true_box_xy = true_box_xy / stride - xy_offset
pred_box_xy = pred_box_xy / stride - xy_offset
### adjust w and h => relative size to the containing cell
true_box_wh_logit = true_box_wh / (anchors * stride)
pred_box_wh_logit = pred_box_wh / (anchors * stride)
true_box_wh_logit = tf.where(condition=tf.equal(true_box_wh_logit,0),
x=tf.ones_like(true_box_wh_logit), y=true_box_wh_logit)
pred_box_wh_logit = tf.where(condition=tf.equal(pred_box_wh_logit,0),
x=tf.ones_like(pred_box_wh_logit), y=pred_box_wh_logit)
true_box_wh = tf.log(true_box_wh_logit)
pred_box_wh = tf.log(pred_box_wh_logit)
### adjust class probabilities
class_mask = y_true[..., 4:5] * CLASS_SCALE
### class mask: simply the position of the ground truth boxes (the predictors)
coord_mask = y_true[..., 4:5] * COORD_SCALE
nb_coord_box = tf.reduce_sum(tf.to_float(coord_mask > 0.0))
nb_conf_box = tf.reduce_sum(tf.to_float(conf_mask > 0.0))
nb_class_box = tf.reduce_sum(tf.to_float(class_mask > 0.0))
# print("nb_conf_box", nb_conf_box)
# print("conf_mask,", conf_mask)
# print("true_box_conf", true_box_conf)
loss_coord = tf.reduce_sum(tf.square(true_box_xy-pred_box_xy) * coord_mask) / (nb_coord_box + 1e-6) / 2.
loss_sizes = tf.reduce_sum(tf.square(true_box_wh-pred_box_wh) * coord_mask) / (nb_coord_box + 1e-6) / 2.
loss_confs = tf.reduce_sum(tf.square(true_box_conf-pred_box_conf) * conf_mask) / (nb_conf_box + 1e-6) / 2.
loss_class = tf.nn.sigmoid_cross_entropy_with_logits(labels=y_true[...,5:], logits=pred_box_class)
# loss_class = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=tf.argmax(y_true[...,5:], axis=-1),
# logits=tf.argmax(pred_box_class, axis=-1))
loss_class = tf.reduce_sum(loss_class * class_mask) / (nb_class_box + 1e-6)
# loss = loss_coord + loss_sizes + loss_confs + loss_class
# loss = loss_xy + loss_wh + loss_conf
loss = loss_coord
loss = tf.Print(loss, [loss_coord], message='LOSS COORD\t', summarize=1000)
loss = tf.Print(loss, [loss_sizes], message='LOSS SIZES\t', summarize=1000)
loss = tf.Print(loss, [loss_confs], message='LOSS CONFS\t', summarize=1000)
loss = tf.Print(loss, [loss_class], message='Loss CLASS\t', summarize=1000)
loss = tf.Print(loss, [loss], message='LOSS TOTAL\t', summarize=1000)
return loss
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import pprint
from selenium.webdriver.support.ui import WebDriverWait
from pytesseract import image_to_string
from PIL import Image
import time
import io
import pytesseract
import pytesseract as tss
import requests
import argparse
from selenium.webdriver.firefox.options import Options as FirefoxOptions
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.action_chains import ActionChains
def get_captcha_text(location, size):
# pytesseract.pytesseract.tesseract_cmd = 'path/to/pytesseract'
# tss.pytesseract.tesseract_cmd = r'C:\Program Files (x86)\Tesseract-OCR\tesseract.exe'
im = Image.open('screenshot.png')
left = location['x']
top = location['y']
right = location['x'] + size['width']
bottom = location['y'] + size['height']
im = im.crop((left, top, right, bottom)) # defines crop points
im.save('screenshot.png')
# response = requests.get("http://main.sci.gov.in/php/captcha.php")
captcha_text = image_to_string(Image.open('screenshot.png'))
print(captcha_text)
print(type(captcha_text))
return captcha_text
def get_date_select_elm(date_str='17-10-2020'):
print("in get_date_select_elm")
date_tds = driver.find_elements_by_css_selector("tr[bgcolor='#0099CC'] ~ tr td a")
for date_td in date_tds :
date_href = date_td.get_attribute("href")
if date_href is None:
continue
date_href = date_href.split(".value=")[1].split(";")[0]
#print(date_href)
#print("'"+date_str+"'")
if date_href == "'"+date_str+"'":
return date_td
return date_tds[0]
parser = argparse.ArgumentParser(description='Short sample app')
parser.add_argument('-id','--id', required=False, type=int)
args = vars(parser.parse_args())
# print("Hi there {}, it's nice to meet you!".format(args["id"]))
try:
frm_date = '19-10-2020'
to_date = '22-10-2020'
side_selected = ''
act_selected = ''
driver = webdriver.Firefox()
driver.implicitly_wait(30)
# driver.maximize_window()
driver.get('https://bombayhighcourt.nic.in/index.html')
WebDriverWait(driver, 10).until(lambda d: d.execute_script('return document.readyState') == 'complete')
time.sleep(2)
driver.find_element_by_link_text('Court Orders').click()
cour_order_elm = driver.find_element_by_link_text('Court Orders')
time.sleep(5)
ActionChains(driver).move_to_element(cour_order_elm).perform()
time.sleep(2)
judgement_order_elm = driver.find_element_by_link_text('Rep. Judgment/Orders')
ActionChains(driver).move_to_element(judgement_order_elm).perform()
time.sleep(2)
judgement_order_elm.click()
side_select = Select(driver.find_element_by_css_selector('[name="m_sideflg"]'))
side_selected = side_select.first_selected_option
print("side selected" + side_selected.text)
act_select = Select(driver.find_element_by_css_selector('[name="actcode"]'))
act_selected = act_select.first_selected_option
print("act selected" + act_selected.text)
pick_from_to = driver.find_elements_by_css_selector("img[alt='Pick a Date']")
print(len(pick_from_to))
from_element = pick_from_to[0]
to_element = pick_from_to[1]
window_main = driver.window_handles[0]
time.sleep(5)
from_element.click()
time.sleep(2)
window_calender = driver.window_handles[1]
driver.switch_to.window(window_calender)
time.sleep(2)
#date_elm = driver.find_element_by_css_selector("td[bgcolor='#FFFF33'] a")
date_elm = get_date_select_elm(frm_date)
href_from_date = date_elm.get_attribute("href")
#### value of date
href_from_date = href_from_date.split(".value=")[1].split(";")[0]
print("selected from date"+href_from_date)
date_elm.click()
time.sleep(2)
driver.switch_to.window(window_main)
to_element.click()
time.sleep(5)
window_calenders = driver.window_handles
print(len(window_calenders))
driver.switch_to.window(window_calenders[1])
time.sleep(5)
#date_elm = driver.find_element_by_css_selector("td[bgcolor='#FFFF33'] + td a")
time.sleep(2)
date_elm = get_date_select_elm(to_date)
href_to_date = date_elm.get_attribute("href")
#### value of date
href_to_date = href_to_date.split(".value=")[1].split(";")[0]
print("selected to date"+href_from_date)
date_elm.click()
driver.switch_to.window(window_main)
time.sleep(5)
element = driver.find_element_by_id('captchaimg')
location = element.location
size = element.size
driver.save_screenshot('screenshot.png')
captcha = driver.find_element_by_id('captcha_code')
captcha.clear()
captcha_text = get_captcha_text(location, size)
print(captcha_text)
captcha.send_keys(captcha_text)
driver.find_element_by_css_selector('[name="submit1"]').click()
time.sleep(5)
output = driver.find_element_by_id('JBJ')
source_code = output.get_attribute("outerHTML")
print(source_code)
#https://medium.com/@vineet_c/using-tesseract-to-solve-captcha-while-logging-in-to-a-website-with-selenium-899a810cf14
driver.quit()
except Exception as err:
print('ERROR: %sn' % str(err))
# mydb.close()
driver.quit()
|
from enum import Enum
class OperationTypes(Enum):
Single = 0
Multiple = 1
|
import traceback
import inspect
import platform
from time import time
import zlib
import bz2
import lzma
import warnings
from typing import (
Optional,
List,
Tuple,
Dict,
Any,
AsyncIterator
)
from pymprpc.errors import (
ProtocolSyntaxException,
LoginError,
RequestError,
MethodError,
ServerException
)
from pymprpc.mixins.encoder_decoder_mixin import EncoderDecoderMixin
from .log import logger, access_logger
if platform.system() == "Windows":
try:
import aio_windows_patch as asyncio
except:
warnings.warn(
"you should install aio_windows_patch to support windows",
RuntimeWarning,
stacklevel=3)
import asyncio
else:
import asyncio
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass
class MPProtocolServer(asyncio.StreamReaderProtocol, EncoderDecoderMixin):
"""message-pack Protocol 的服务器协议实现.
Attributes:
CONNECTIONS (Set[MPProtocolServer]): - 当前的所有连接的池
TASKS (List[asyncio.Future]): - 当前的所有连接下的所有任务
SEPARATOR (bytes): - 读请求时的终止符,默认为`b"##PRO-END##"`
VERSION (str): - 协议版本,以`x.x`的形式表现版本
COMPRESERS (Dict[str,model]): - 支持的压缩解压工具
version (int): - 协议版本
auth (List[Tuple[str, str]]): - 允许的登录用户名密码对
method_wrapper (SimpleMPRPServer): - 远程调用包装
tasks (Dict[str,asyncio.Future]): - 当前连接执行的任务
timeout (float): - 连接的过期时间,一旦超时没有一条消息写入那么就强行关闭连接.默认为60.0s
debug (bool): - 是否使用debug模式,默认不使用
compreser (Optional[str]): - 是否压缩传输的信息,默认不压缩
Property:
connections (Set): - 当前的连接池.
"""
CONNECTIONS = set()
TASKS = []
SEPARATOR = b"##PRO-END##"
VERSION = "0.1"
COMPRESERS = {
"zlib": zlib,
"bz2": bz2,
"lzma": lzma
}
@property
def connections(self):
"""当前的连接数."""
return self.__class__.CONNECTIONS
def __init__(self,
method_wrapper: "SimpleMPRPServer",
loop: Optional[asyncio.AbstractEventLoop]=None,
auth: List[Tuple[str, str]]=None,
timeout: Optional[float] = 180.0,
debug: bool=False,
compreser: Optional[str]=None
):
"""实例化一个协议对象,用于管理一个连接.
初始化主要是设置成员的值,包括一些用于管理状态的变量和容器
Parameters:
method_wrapper (SimpleMPRPServer): - pmprpc服务的实例
loop (Optional[asyncio.AbstractEventLoop]): - 事件循环
auth (Optional[List[Tuple[str, str]]]): - 合法的认证信息,保存在一个列表中,
以username,password的成对形式保存
timeout (Optional[float]): - 过期时间,如果timeout为None则不会因过期而关闭
debug (bool): - 是否使用debug模式,默认为否
compreser(Optional[str]): - 是否使用压缩工具压缩传输信息,以及压缩工具是什么,默认为不使用.
"""
# public
self.auth = auth
self.timeout = timeout
self.debug = debug
self.method_wrapper = method_wrapper
self.tasks = {}
if compreser is not None:
_compreser = self.COMPRESERS.get(compreser)
if _compreser is not None:
self.compreser = _compreser
else:
raise RuntimeError("compreser unsupport")
else:
self.compreser = None
# protected
self._handlertask = None # 执行的任务循环
self._loop = loop or asyncio.get_event_loop()
self._last_response_time = time() # 上一次响应的过期时间
self._timeout_handler = None
self._transport = None
self._remote_host = None
self._extra = None
self._stream_writer = None
self._stream_reader = None
self._client_connected_cb = None
self._over_ssl = False
self._paused = False
self._drain_waiter = None
self._connection_lost = False
# ------------------------连接处理------------------------------------
def connection_made(self, transport: asyncio.transports.Transport):
"""连接建立起来触发的回调函数.
用于设定一些参数,并将监听任务放入事件循环,如果设置了timeout,也会将timeout_callback放入事件循环
Parameters:
transport (asyncio.Transports): - 连接的传输对象
"""
self._transport = transport
self._remote_host = self._transport.get_extra_info('peername')
self._extra = {"client": str(self._remote_host)}
self.connections.add(self)
self._stream_reader = asyncio.StreamReader(loop=self._loop)
self._stream_writer = asyncio.StreamWriter(transport, self,
self._stream_reader,
self._loop)
super().connection_made(transport)
if self.timeout:
self._timeout_handler = self._loop.call_soon(
self.timeout_callback)
self._handlertask = asyncio.ensure_future(self.query_handler())
if self.debug:
access_logger.info("connected", extra=self._extra)
def connection_lost(self, exc: Exception=None):
"""连接丢失时触发的回调函数.
用于清理一些任务和关闭连接,包括:
+ 取消监听任务
+ 取消过期监控任务
+ 取消其他还没执行完的任务
+ 将流读写器都重置
+ 将本连接从当前的连接池中去除
Parameters:
exc (Exception): - 异常,如果是None的话说明不是因为异常而关闭的连接
"""
self._handlertask.cancel()
super().connection_lost(exc)
if self._timeout_handler:
self._timeout_handler.cancel()
self._transport = None
for i, task in self.tasks.items():
task.cancel()
self.connections.discard(self)
if self.debug:
access_logger.info("lost connection", extra=self._extra)
def shutdown(self):
"""关停当前连接.
用于主动关停连接,并清理一些任务,包括:
+ 取消监听任务
+ 取消过期监控任务
+ 取消其他还没执行完的任务
+ 将流读写器都重置
"""
self._handlertask.cancel()
if self._timeout_handler:
self._timeout_handler.cancel()
self._transport = None
self._stream_reader = None
self._stream_writer.close()
for i, task in self.tasks.items():
task.cancel()
self._stream_writer = None
logger.info("close connection with {}".format(
self._remote_host
))
def close(self):
"""强制关闭当前连接.
用于主动关闭连接,并清理一些任务,包括:
+ 取消监听任务
+ 取消过期监控任务
+ 取消其他还没执行完的任务
+ 将流读写器都重置
+ 将本连接从当前的连接池中去除
"""
self.shutdown()
self.connections.discard(self)
# --------------------------------过期处理-----------------------------
def timeout_callback(self):
"""过期回调函数.
如果设置了timeout则会启动一个协程按当前时间和最近的响应时间只差递归的执行这个回调
"""
# Check if elapsed time since last response exceeds our configured
# maximum keep alive timeout value
now = time()
time_elapsed = now - self._last_response_time
if time_elapsed < self.timeout:
time_left = self.timeout - time_elapsed
self._timeout_handler = (
self._loop.call_later(
time_left,
self.timeout_callback
)
)
else:
logger.info('KeepAlive Timeout. Closing connection.')
responseb = self.encoder({
"MPRPC": self.VERSION,
"CODE": 504
})
self._stream_writer.write(responseb)
self.close()
# ------------------------流读写包装---------------------------------
def writer(self, response: Dict[str, Any]):
"""将响应的python结构转化为字节,并写入到流中,同时刷新最后一次响应时间为当前时间
Parameters:
response (Dict[str,Any]): - 要写入给客户端的响应的python结构
"""
responseb = self.encoder(response)
self._stream_writer.write(responseb)
if self.debug:
access_logger.info("write {}".format(responseb), extra=self._extra)
self._last_response_time = time()
async def read(self):
"""读取请求,并转化为python的字典结构.如果读入了EOF,那么触发回调函数connection_lost."""
try:
data = await self._stream_reader.readuntil(self.SEPARATOR)
except asyncio.IncompleteReadError:
self.connection_lost()
else:
query = self.decoder(data)
if self.debug:
access_logger.info("get query: {}".format(
query
), extra=self._extra)
return query
# -------------------------------请求处理--------------------------------
async def query_handler(self):
"""根据获取到的不同请求执行不同的动作.会在建立连接后被放入事件循环.
主要为3种请求:
+ 验证请求
+ 心跳请求
+ 任务调用请求
如果中途有步骤报错也负责将对应的错误转化为错误信息发送给客户端.
"""
while True:
request = await self.read()
if request is None:
break
ID = request.get('ID')
try:
if request.get("AUTH"):
self._check_auth_handler(request)
elif request.get("HEARTBEAT"):
response = {
"MPRPC": self.VERSION,
"CODE": 101,
"HEARTBEAT": "pong"
}
self.writer(response)
elif ID:
fut = asyncio.ensure_future(
self._RPC_handler(request),
loop=self._loop)
if asyncio.isfuture(fut):
self.tasks[ID] = fut
self.__class__.TASKS.append(fut)
else:
raise ProtocolSyntaxException("Protocol Syntax Error")
except MethodError as se:
exinfo = traceback.TracebackException.from_exception(
se).format(chain=True)
frames = "".join([i + "/n" for i in exinfo])
response = {
"MPRPC": self.VERSION,
"CODE": se.status_code,
"MESSAGE": {
"ID": ID,
'EXCEPTION': str(type(se)),
'MESSAGE': str(se),
"DATA": {
'METHOD': request.get("METHOD"),
"ARGS": request.get("ARGS"),
"KWARGS": request.get("KWARGS"),
'FRAME': frames}
}
}
self.writer(response)
except ServerException as me:
response = {
"MPRPC": self.VERSION,
"CODE": me.status_code,
}
self.writer(response)
except Exception as e:
if self.debug:
logger.info("Unknow Error: {}[{}]".format(
type(e).__name__, str(e)
))
# -------------------------------用户验证---------------------------------
def _check_auth_handler(self, request: Dict[str, Any]):
"""用于验证客户端是否有权限调服务.
如果服务端有验证信息,则会根据验证信息判断是否合法
+ 如果合法,那么返回一条信息用于响应验证请求
+ 如果不合法,那么返回验证错误
如果服务端没有验证信息
+ 如果验证信息都为空,直接返回响应
+ 如果信息不为空,那么返回验证错误
Parameters:
request (Dict[str, Any]): - python字典形式的请求
Return:
(bool): - 请求是否被验证通过,通过了返回True
Raise:
(LoginError): - 当验证不通过时抛出
"""
a_username = request.get("AUTH").get("USERNAME")
a_password = request.get("AUTH").get("PASSWORD")
auth_len = len(self.auth)
if auth_len == 0:
if any([a_username, a_password]):
if self.debug:
access_logger.info("login failed", extra=self._extra)
raise LoginError("login error ,unknown username/password")
else:
return True
else:
for username, password in self.auth:
if all([a_username == username, a_password == password]):
response = {
"MPRPC": self.VERSION,
"CODE": 100,
"VERSION": self.method_wrapper.version,
"DESC": self.method_wrapper.__doc__,
"DEBUG": self.debug,
"COMPRESER": self.compreser.__name__ if (
self.compreser) else None,
"TIMEOUT": self.timeout,
}
self.writer(response)
if self.debug:
access_logger.info("login succeed", extra=self._extra)
break
else:
if self.debug:
access_logger.info("login failed", extra=self._extra)
raise LoginError("login error ,unknown username/password")
return True
# ----------------------------------RPC调用处理---------------------------------
async def _RPC_handler(self, request: Dict[str, Any]):
"""用于调用函数并执行.同时如果执行出错也负责将错误转化为对应的调用错误返回给客户端.
执行成功后根据结果进行不同的处理,如果注册的是函数,实例中的方法,或者协程,则获取计算得的结果,并返回给客户端.
如果是异步生成器函数,那么返回的就是一个对应的异步生成器,我们通过对其包装后循环调用实现流传输.
Parameters:
request (Dict[str, Any]): - python字典形式的请求
Raise:
(Exception): - 当执行调用后抛出了异常,且异常不在定义范围内,则抛出
Return:
(bool): - 当正常调用则返回True,如果抛出了规定范围内的异常则返回False
"""
ID = request.get("ID")
method = request.get("METHOD")
with_return = request.get("RETURN")
args = request.get("ARGS") or []
kwargs = request.get("KWARGS") or {}
try:
if method is None:
raise RequestError(
"request do not have method", request.get("ID"))
if method == "system.getresult":
await self._get_result(ID, *args, **kwargs)
else:
result = await self.method_wrapper.apply(ID, method,
*args, **kwargs)
except MethodError as se:
exinfo = traceback.TracebackException.from_exception(
se).format(chain=True)
frames = "".join([i + "/n" for i in exinfo])
response = {
"MPRPC": self.VERSION,
"CODE": se.status_code,
"MESSAGE": {
"ID": ID,
'EXCEPTION': str(type(se)),
'MESSAGE': str(se),
"DATA": {
'METHOD': request.get("METHOD"),
"ARGS": request.get("ARGS"),
"KWARGS": request.get("KWARGS"),
'FRAME': frames}
}
}
self.writer(response)
return False
except ServerException as me:
response = {
"MPRPC": self.VERSION,
"CODE": me.status_code,
}
self.writer(response)
return False
except Exception as e:
if self.debug is True:
raise e
else:
logger.info(
"Task[{}]: Unknown Error {}:\nmessage:{}".format(
ID, e.__class__.__name__, str(e))
)
else:
if with_return:
if inspect.isasyncgen(result):
await self._asyncgen_wrap(result, ID)
else:
response = {
"MPRPC": self.VERSION,
"CODE": 200,
"MESSAGE": {
"ID": ID,
'RESULT': result
}
}
self.writer(response)
if self.debug:
access_logger.info(
"Task[{}]: response answered".format(ID),
extra=self._extra
)
return result
async def _asyncgen_wrap(self, cor: AsyncIterator, ID: str):
"""流包装器.
通过调用异步生成器传输流数据.
Parameters:
cor (AsyncIterator): - 异步迭代器
ID (str): - 任务的ID
Return:
(bool): - 当正常调用则返回True
"""
response = {
"MPRPC": self.VERSION,
"CODE": 201,
"MESSAGE": {
"ID": ID
}
}
self.writer(response)
if self.debug:
access_logger.info(
"Task[{}]: response stream start".format(ID),
extra=self._extra
)
async for i in cor:
response = {
"MPRPC": self.VERSION,
"CODE": 202,
"MESSAGE": {
"ID": ID,
'RESULT': i
}
}
self.writer(response)
if self.debug:
access_logger.info(
"Task[{}]: response stream yield".format(ID),
extra=self._extra
)
response = {
"MPRPC": self.VERSION,
"CODE": 206,
"MESSAGE": {
"ID": ID
}
}
self.writer(response)
if self.debug:
access_logger.info(
"Task[{}]: response stream end".format(ID),
extra=self._extra
)
return True
# ---------------------------------getresult----------------------------------
async def _get_result(self, ID, _ID):
try:
result = await self.tasks[_ID]
if inspect.isasyncgen(result):
await self._asyncgen_wrap(result, _ID)
else:
response = {
"MPRPC": self.VERSION,
"CODE": 200,
"MESSAGE": {
"ID": ID,
'RESULT': result
}
}
self.writer(response)
if self.debug:
access_logger.info(
"Task[{}]: response answered".format(ID),
extra=self._extra
)
except Exception as e:
print(e)
|
# ###########################################################################
#
# CLOUDERA APPLIED MACHINE LEARNING PROTOTYPE (AMP)
# (C) Cloudera, Inc. 2021
# All rights reserved.
#
# Applicable Open Source License: Apache 2.0
#
# NOTE: Cloudera open source products are modular software products
# made up of hundreds of individual components, each of which was
# individually copyrighted. Each Cloudera open source product is a
# collective work under U.S. Copyright Law. Your license to use the
# collective work is as provided in your written agreement with
# Cloudera. Used apart from the collective work, this file is
# licensed for your use pursuant to the open source license
# identified above.
#
# This code is provided to you pursuant a written agreement with
# (i) Cloudera, Inc. or (ii) a third-party authorized to distribute
# this code. If you do not have a written agreement with Cloudera nor
# with an authorized and properly licensed third party, you do not
# have any rights to access nor to use this code.
#
# Absent a written agreement with Cloudera, Inc. (“Cloudera”) to the
# contrary, A) CLOUDERA PROVIDES THIS CODE TO YOU WITHOUT WARRANTIES OF ANY
# KIND; (B) CLOUDERA DISCLAIMS ANY AND ALL EXPRESS AND IMPLIED
# WARRANTIES WITH RESPECT TO THIS CODE, INCLUDING BUT NOT LIMITED TO
# IMPLIED WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE; (C) CLOUDERA IS NOT LIABLE TO YOU,
# AND WILL NOT DEFEND, INDEMNIFY, NOR HOLD YOU HARMLESS FOR ANY CLAIMS
# ARISING FROM OR RELATED TO THE CODE; AND (D)WITH RESPECT TO YOUR EXERCISE
# OF ANY RIGHTS GRANTED TO YOU FOR THE CODE, CLOUDERA IS NOT LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, PUNITIVE OR
# CONSEQUENTIAL DAMAGES INCLUDING, BUT NOT LIMITED TO, DAMAGES
# RELATED TO LOST REVENUE, LOST PROFITS, LOSS OF INCOME, LOSS OF
# BUSINESS ADVANTAGE OR UNAVAILABILITY, OR LOSS OR CORRUPTION OF
# DATA.
#
# ###########################################################################
import numpy as np
import os
import csv
import torch
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader
from collections import deque
class Train:
def __init__(self, net, handler, n_epoch, lr, data, model_dir):
self.data = data
self.clf = net
self.handler = handler
use_cuda = torch.cuda.is_available()
self.device = torch.device("cuda" if use_cuda else "cpu")
self.n_epoch = n_epoch
self.lr = lr
self.model_dir = model_dir
self.step = deque()
self.train_loss = deque()
self.val_loss = deque()
self.train_acc = deque()
self.val_acc = deque()
self.clf = self.clf().to(self.device)
def save_checkpoint(self, checkpoint, model_dir):
f_path = os.path.join(model_dir, 'checkpoint.pt')
torch.save(checkpoint, f_path)
def train(self):
print('train:train with {} datapoints'.format(self.data.X.shape[0]))
checkpoint_fpath = os.path.join(self.model_dir, 'checkpoint.pt')
optimizer = optim.SGD(self.clf.parameters(), lr=self.lr, momentum=0.5)
start_epoch = 1
if not os.path.exists(self.model_dir):
os.mkdir(self.model_dir)
# load checkpoint if available
if os.path.isfile(checkpoint_fpath):
checkpoint = torch.load(checkpoint_fpath)
self.clf.load_state_dict(checkpoint['state_dict'])
optimizer.load_state_dict(checkpoint['optimizer'])
start_epoch = checkpoint['epoch']
train_loader = self.data.load_train_data()
test_loader = self.data.load_test_data()
runlog_filename = os.path.join(self.model_dir, "run_log.csv")
fieldnames = ['epoch', 'train_loss', 'val_loss', 'train_acc', 'val_acc']
if not os.path.isfile(runlog_filename):
csvfile = open(runlog_filename, 'w', newline='', )
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
else: # else it exists so append without writing the header
csvfile = open(runlog_filename, 'a', newline='')
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
for epoch in range(start_epoch, start_epoch + self.n_epoch):
self._train(train_loader, optimizer, epoch)
self._test(test_loader, epoch)
writer.writerow({'epoch': self.step[-1], 'train_loss': self.train_loss[-1],
'val_loss': self.val_loss[-1], 'train_acc': self.train_acc[-1],
'val_acc': self.val_acc[-1]})
checkpoint = {
'epoch': self.n_epoch + start_epoch,
'state_dict': self.clf.state_dict(),
'optimizer': optimizer.state_dict()
}
self.save_checkpoint(checkpoint, self.model_dir)
'''
The following function is adapted from the original Pytorch example code:
https://github.com/pytorch/examples/blob/master/mnist/main.py
'''
def _train(self, train_loader, optimizer, epoch):
self.clf.train()
train_loss = 0
correct = 0
for batch_idx, (data, target, idxs) in enumerate(train_loader):
data, target = data.to(self.device), target.to(self.device)
optimizer.zero_grad()
output, _ = self.clf(data)
loss = F.nll_loss(output, target)
train_loss += F.nll_loss(output, target, reduction='sum').item()
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
loss.backward()
optimizer.step()
train_loss /= len(train_loader.dataset)
print('\nTrain set: Epoch: {}, Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)'.format(
epoch, train_loss, correct, len(train_loader.dataset),
100. * correct / len(train_loader.dataset)))
self.step.append(epoch)
self.train_loss.append(train_loss)
self.train_acc.append(correct / len(train_loader.dataset))
'''
The following function is adapted from the original Pytorch example code:
https://github.com/pytorch/examples/blob/master/mnist/main.py
'''
def _test(self, test_loader, epoch):
self.clf.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for batch_idx, (data, target, idxs) in enumerate(test_loader):
data, target = data.to(self.device), target.to(self.device)
output, _ = self.clf(data)
loss = F.nll_loss(output, target)
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('Test set: Epoch: {}, Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)'.format(
epoch, test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
self.val_loss.append(test_loss)
self.val_acc.append(correct / len(test_loader.dataset))
def get_trained_embedding(self):
loader = self.data.load_train_data()
self.clf.eval()
emb = np.zeros((self.data.X.shape[0], self.clf.get_embedding_dim()))
with torch.no_grad():
for x, y, idxs in loader:
# x, y = x.to(self.device), y.to(self.device)
out, e1 = self.clf(x)
emb[idxs] = e1
return emb
def get_prev_trained_embedding(self):
loader = self.data.load_prev_data()
self.clf.eval()
emb = np.zeros((self.data.X_Prev.shape[0],
self.clf.get_embedding_dim()))
with torch.no_grad():
for x, y, idxs in loader:
# x, y = x.to(self.device), y.to(self.device)
out, e1 = self.clf(x)
emb[idxs] = e1
return emb
def get_test_embedding(self):
loader = self.data.load_test_data()
self.clf.eval()
emb = np.zeros((self.data.X_TE.shape[0], self.clf.get_embedding_dim()))
with torch.no_grad():
for x, y, idxs in loader:
out, e1 = self.clf(x)
emb[idxs] = e1
return emb
def get_nolb_embedding(self):
loader = self.data.load_nolabel_data()
self.clf.eval()
emb = np.zeros((self.data.X_NOLB.shape[0], self.clf.get_embedding_dim()))
with torch.no_grad():
for x, y, idxs in loader:
out, e1 = self.clf(x)
emb[idxs] = e1
return emb
def get_tolb_embedding(self):
loader = self.data.load_tolabel_data()
self.clf.eval()
emb = np.zeros((self.data.X_TOLB.shape[0], self.clf.get_embedding_dim()))
with torch.no_grad():
for x, y, idxs in loader:
out, e1 = self.clf(x)
emb[idxs] = e1
return emb
def update_data(self, data):
self.data = data
|
"""
Mass constants
Base unit: Gram
"""
from fractions import Fraction
# US customary (mass in g) (avoirdupois - avdp)
OZ = Fraction(28.349523125) # ounce
GR = Fraction(2/875) * OZ # grain (16/7000)
DR = Fraction(1/16) * OZ # dram
LB = 16 * OZ # pound
CWT = 100 * 16 * OZ # US hundredweight(=100 lb)
|
from typing import Any, List, Literal, Optional, Tuple
import torch
from pytorch_lightning.metrics.classification.confusion_matrix import (
ConfusionMatrix as ConfusionMatrixBase,
)
from torch import Tensor
from ..custom_types import Annotation
from ..visualization import plot_confusion_matrix
from .utilities import calculate_iou_matrix
class ConfusionMatrix(ConfusionMatrixBase):
"""Confusion matrix metric for object detection.
:param num_classes: Number of classes in the dataset (including the background class).
:param iou_type: Type of Intersection over Union (IOU) used to determine if a prediction matches
a target. Either "box" or "mask".
:param iou_threshold: IOU threshold, above which a prediction is considered a match for a
target.
:param score_threshold: Score threshold, above which a prediction is considered non-background.
default: 0.5
:param normalize: Normalization mode for confusion matrix. Choose from
- ``None``: no normalization (default)
- ``'true'``: normalization over the targets (most commonly used)
- ``'pred'``: normalization over the predictions
- ``'all'``: normalization over the whole matrix
:param compute_on_step: Forward only calls ``update()`` and return None if this is set to False.
default: True
:param dist_sync_on_step: Synchronize metric state across processes at each ``forward()``
before returning the value at the step. default: False
:param process_group: Specify the process group on which synchronization is called.
default: None (which selects the entire world)
"""
def __init__(
self,
num_classes: int,
iou_type: Literal["box", "mask"],
iou_threshold: float,
score_threshold: float = 0.5,
normalize: Optional[str] = None,
compute_on_step: bool = True,
dist_sync_on_step: bool = False,
process_group: Optional[Any] = None,
):
super().__init__(
num_classes, normalize, 1, compute_on_step, dist_sync_on_step, process_group
)
self.iou_threshold = iou_threshold
self.iou_type = iou_type
self.score_threshold = score_threshold
# Remove ambiguous attribute from parent class.
delattr(self, "threshold")
def update(self, predictions: List[Annotation], targets: Tuple[Annotation, ...]) -> None:
"""Updates the confusion matrix based on the supplied targets and predictions.
:param predictions: List of dictionaries with prediction data, such as boxes and masks.
:param targets: Tuple of dictionaries with target data, such as boxes and masks.
"""
for prediction, target in zip(predictions, targets):
confusion_matrix = self._evaluate_image(prediction, target)
self.confmat += confusion_matrix
def _evaluate_image(self, prediction: Annotation, target: Annotation) -> Tensor:
"""Evaluates the target and prediction instances of a single image.
:param prediction: Dictionary with prediction data, such as boxes and masks.
:param target: Dictionary with target data, such as boxes and masks.
:return:
"""
device = target["boxes"].device
boxes_pred = prediction["boxes"]
boxes_gt = target["boxes"]
labels_pred = prediction["labels"]
labels_gt = target["labels"]
scores_pred = prediction["scores"]
if self.iou_type == "mask":
masks_pred = prediction["masks"]
masks_gt = target["masks"]
else:
masks_pred = [None] * len(boxes_pred)
masks_gt = [None] * len(boxes_gt)
descending_score_indices = torch.argsort(scores_pred, descending=True)
is_assigned_gt = torch.zeros_like(labels_gt, dtype=torch.bool)
confusion_matrix = torch.zeros(self.num_classes, self.num_classes, device=device)
# Iterate all predictions.
for box_pred, label_pred, mask_pred, score_pred in zip(
boxes_pred[descending_score_indices],
labels_pred[descending_score_indices],
masks_pred[descending_score_indices],
scores_pred[descending_score_indices],
):
ious = calculate_iou_matrix(
torch.unsqueeze(box_pred, dim=0),
boxes_gt,
self.iou_type,
mask_pred,
masks_gt,
)
# Assign predictions with a score below score_threshold to the background class.
if score_pred <= self.score_threshold:
label_pred = 0
best_iou, best_gt_index = ious.squeeze(0).max(0)
if best_iou > self.iou_threshold and not is_assigned_gt[best_gt_index]:
# We have a match, so the predicted label should be that of the matching ground
# truth.
label_gt = labels_gt[best_gt_index]
# mark the ground truth with the highest iou as assigned
is_assigned_gt[best_gt_index] = True
else:
# We don't have a matching ground truth, so the predicted label should have been
# that of the background class (0)
label_gt = 0
confusion_matrix[label_gt, label_pred] += 1
# Iterate all ground truths that where not detected.
label_pred = 0 # background class
for label_gt in labels_gt[~is_assigned_gt]:
confusion_matrix[label_gt, label_pred] += 1
return confusion_matrix
def plot(self, class_names: Optional[List[str]] = None):
"""Compute and plot the confusion matrix.
:param class_names: Optional class names to be used as labels.
:return: figure handle
"""
cm = self.compute()
cm = _convert_to_int_if_lossless(cm)
return plot_confusion_matrix(cm, class_names=class_names)
def _convert_to_int_if_lossless(a: torch.Tensor) -> torch.Tensor:
"""Converts the supplied tensor to int, if the conversion can be done lossless.
:param a: tensor
:return: Input tensor as int, if lossless conversion is possible or as original data type, if
not.
"""
if torch.sum(a % 1) == 0:
a = a.int()
return a
|
from conformance_checking.algorithms import (
Act2VecWmdConformance,
Act2VecIctConformance,
Trace2VecCosineConformance,
)
import numpy as np
def main():
model_traces = [
["hi", "foo"],
["hi", "foo"],
["bar"],
[],
["a", "long", "trace", "with", "doubled", "words", "like", "long"],
]
real_traces = [
["foobar", "hi"],
["bar"],
["bar"],
[],
["a", "long", "long", "trace", "but", "not", "the", "same"],
]
print("Model traces: %s" % str(model_traces))
print("Real traces: %s" % str(real_traces))
print("Executing Act2VecWmdConformance algorithm...")
dissimilarity_matrix_a = Act2VecWmdConformance().execute(model_traces, real_traces)
print("Executing Act2VecIctConformance algorithm...")
dissimilarity_matrix_b = Act2VecIctConformance().execute(model_traces, real_traces)
print("Executing Trace2VecCosineConformance algorithm...")
dissimilarity_matrix_c = Trace2VecCosineConformance().execute(
model_traces, real_traces
)
np.set_printoptions(precision=4, suppress=True) # similar effect to %.4f
print_results("Act2VecWmdConformance", dissimilarity_matrix_a)
print_results("Act2VecIctConformance", dissimilarity_matrix_b)
print_results("Trace2VecCosineConformance", dissimilarity_matrix_c)
def print_results(name, dissimilarity_matrix):
print("Dissimilarity matrix of %s: " % name)
print(dissimilarity_matrix.get_dissimilarity_matrix())
print("Precision: %.4f" % dissimilarity_matrix.calc_precision())
print("Fitness: %.4f" % dissimilarity_matrix.calc_fitness())
if __name__ == "__main__":
main()
|
import wxconfig as cfg
import unittest
from unittest.mock import patch
import definitions
import algotrader.connections.ds as ds
class TestDataSource(unittest.TestCase):
def setUp(self) -> None:
# Setup config
cfg.Config().load(fr"{definitions.ROOT_DIR}\tests\testconfig.yaml")
def test_instance(self):
# Test that we get an instance of MetaTrader5 datasource from its configured name mt5
datasource = ds.DataSource.instance('mt5')
self.assertTrue(isinstance(datasource, ds.MT5DataSource), "DataSource should be an instance of MetaTrader5")
class Symbol:
""" A Mock symbol class returned to test MT5 as MT5 returns Symbol object with name and visible properties"""
name = None
visible = None
def __init__(self, name, visible):
self.name = name
self.visible = visible
class TestMT5DataSource(unittest.TestCase):
__mock_symbols = [Symbol(name='SYMBOL1', visible=True),
Symbol(name='SYMBOL2', visible=True),
Symbol(name='SYMBOL3', visible=False),
Symbol(name='SYMBOL4', visible=True),
Symbol(name='SYMBOL5', visible=True)]
def setUp(self) -> None:
# Setup config
cfg.Config().load(fr"{definitions.ROOT_DIR}\tests\testconfig.yaml")
@patch('algotrader.connections.ds.MetaTrader5')
def test_get_symbols(self, mock):
# Mock return values
mock.symbols_get.return_value = self.__mock_symbols
mock.symbols_total.return_value = len(self.__mock_symbols)
# Get the MT5 datasource
datasource = ds.DataSource.instance('mt5')
# Call get_symbols
symbols = datasource.get_symbols()
# There should be 4 as 1 is not visible in market watch
self.assertTrue(len(symbols) == 4, "There should be 4 symbols. 5 Returned from MT5, 1 of which is not visible.")
|
from unittest.mock import Mock, patch
import pytest
def test_subscription_inst():
from tartiflette.subscription.subscription import Subscription
r = Subscription("a")
assert r._implementation is None
assert r._schema_name == "default"
assert r._name == "a"
@pytest.fixture
def a_subscription():
from tartiflette.subscription.subscription import Subscription
a_subscription = Subscription("a_subscription")
a_subscription._implementation = "A"
return a_subscription
def test_subscription_bake_raise_missing_implementation(a_subscription):
from tartiflette.types.exceptions.tartiflette import MissingImplementation
schema_mock = Mock()
a_subscription._implementation = None
with pytest.raises(MissingImplementation):
a_subscription.bake(schema_mock)
def test_subscription_bake_unknown_field_definition(a_subscription):
from tartiflette.types.exceptions.tartiflette import UnknownFieldDefinition
a_field_mock = Mock()
a_field_mock.parent_type = Mock()
schema_mock = Mock()
schema_mock.get_field_by_name = Mock(side_effect=KeyError())
with pytest.raises(UnknownFieldDefinition):
a_subscription.bake(schema_mock)
assert schema_mock.get_field_by_name.call_args_list == [
(("a_subscription",),)
]
@pytest.mark.skip
def test_subscription_bake_not_subscription_field(a_subscription):
from tartiflette.types.exceptions.tartiflette import NotSubscriptionField
schema_mock = Mock()
schema_mock.subscription_type = "Subscription"
schema_mock.find_type = Mock()
with pytest.raises(NotSubscriptionField):
a_subscription.bake(schema_mock)
assert schema_mock.find_type.call_args_list == [(("Subscription",),)]
def test_subscription_bake(a_subscription):
subscription_type_mock = Mock()
a_field_mock = Mock()
a_field_mock.parent_type = subscription_type_mock
schema_mock = Mock()
schema_mock.get_field_by_name = Mock(return_value=a_field_mock)
schema_mock.find_type = Mock(return_value=subscription_type_mock)
assert a_subscription.bake(schema_mock) is None
assert schema_mock.get_field_by_name.call_args_list == [
(("a_subscription",),)
]
assert a_field_mock.subscribe is a_subscription._implementation
def test_subscription__call__not_async_generator(a_subscription):
from tartiflette.types.exceptions.tartiflette import (
NonAsyncGeneratorSubscription,
)
with pytest.raises(NonAsyncGeneratorSubscription):
def implementation():
pass
a_subscription(implementation)
def test_subscription__call__(a_subscription):
async def implementation():
yield None
with patch(
"tartiflette.schema.registry.SchemaRegistry.register_subscription"
) as register_subscription_mock:
subscription = a_subscription(implementation)
assert subscription is implementation
assert register_subscription_mock.call_args_list == [
(("default", a_subscription),)
]
assert a_subscription._implementation is implementation
|
from models.models import User
import os
import sys
from werkzeug.utils import secure_filename
from flask import Flask, render_template, url_for, request, redirect, session, jsonify, Blueprint, flash
from scoss import smoss
import requests
from sctokenizer import Source
from scoss import Scoss
from scoss.metrics import all_metrics
from models.models import db, MessageStatus
from werkzeug.security import generate_password_hash, check_password_hash
from jinja2 import Environment
from config import URL
import config
user = Blueprint('users_page', __name__)
@user.route('/admin', methods=['GET', 'POST'])
def admin():
if 'logged_in' in session:
if session['logged_in'] == True:
if session['role'] == 0:
if request.method == 'GET':
url = URL + '/api/users'
headers = {'Authorization': "Bearer {}".format(session['token'])}
data = requests.get(url=url, headers=headers)
# print(data.json())
if data.status_code != 200 and 'msg' in data.json():
session.clear()
return redirect(url_for('login_page.login_page'))
if 'error' in data.json().keys():
flash(data.json()['error'], MessageStatus.error)
return render_template('admin.html', data=data.json()['users'])
else:
username = request.form['username']
email = request.form['email']
password = '12345'
role = 1
data_form = {'username': username, 'email': email, 'role': role, 'password': password}
url = URL + '/api/users/add'
headers = {'Authorization': "Bearer {}".format(session['token'])}
req = requests.post(url=url,json=data_form, headers=headers)
if req.status_code != 200 and 'msg' in req.json():
session.clear()
return redirect(url_for('login_page.login_page'))
if 'error' in req.json().keys():
flash(req.json()['error'], MessageStatus.error)
return redirect(url_for('users_page.admin'))
@user.route('/admin/redis', methods=['GET'])
def admin_rq():
return redirect('/rq')
@user.route('/admin/mongo', methods=['GET'])
def admin_mg():
if not config.server_name:
config.server_name = request.host.split(":")[0]
url = 'http://{}:{}'.format(config.server_name, config.MONGO_PORT)
return redirect(url)
@user.route('/users/<user_id>/update', methods=['GET', 'POST'])
def update_password(user_id):
if 'logged_in' in session:
if session['logged_in'] == True:
if request.method == 'GET':
data = User.objects.get(user_id=user_id)
return render_template('profile.html', data=data.to_mongo())
if request.method == 'POST':
email = request.form['email']
old_pass = request.form['old_password']
new_pass = request.form['new_password']
data_form = {
'email': email,
'old_password': old_pass,
'new_password': new_pass
}
base_url = request.referrer
url = URL + '/api/users/{}'.format(user_id)
headers = {'Authorization': "Bearer {}".format(session['token'])}
req = requests.put(url=url, json=data_form, headers=headers)
if req.status_code != 200 and 'msg' in req.json():
session.clear()
return redirect(url_for('login_page.login_page'))
if 'error' in req.json().keys():
flash(req.json()['error'], MessageStatus.error)
else:
flash(req.json()['info'], MessageStatus.success)
return redirect(base_url)
else:
return redirect(url_for('login_page.login_page'))
|
from collections import OrderedDict
import heterocl as hcl
import heterocl.tvm as tvm
class PPAC_config:
"""Wrap PPAC parameters and function names."""
def __init__(self, multi_bit=False, word_bits=None, elem_bits=None):
"""Initialize PPAC configurations
Parameters
----------
multi_bit : Whether to use specialized ppac accelerator
or generalized ppac module.
See hardware implementation for more.
word_bits : Number of bits in a row in ppac.
elem_bits : Number of bits in a number in matrix (datatype)
"""
self.word_bits = (word_bits if word_bits else 256) if multi_bit else 64
self.elem_bits = (elem_bits if elem_bits else 8) if multi_bit else 1
self.elem_num = self.word_bits // self.elem_bits
self.depth = self.elem_num
assert self.elem_bits in [1, 2, 4, 8, 16, 32], "elem_bits must be in {1, 2, 4, 8, 16, 32}"
assert (self.word_bits % 64 == 0) and (self.elem_num*self.elem_bits == self.word_bits), \
"word_bits must be times of 64 and times of elem_bits"
if multi_bit:
self.func_call = ['PPACFunc_GeMMUInt', 'PPACFunc_GeMMSInt']
else:
self.func_call = ['PPACFunc_HmmSim', 'PPACFunc_GeMMBin']
class PPAC_func_params:
"""
names of PPAC function call parameters
used as annotation key on the stage
"""
def __init__(self):
self.func_name = '_ppac_func_name'
self.ret = '_ret'
self.arg0 = '_arg0'
self.arg1 = '_arg1'
self.b_n = '_batch_num'
self.i_b_n = '_in_block_num'
self.o_c_n = '_out_channel_num'
ppac_params = PPAC_func_params()
def hmm_sim(x, y, name=None):
"""Compute hamming-similarity between each element in x and y
Parameters
----------
x : 1-d tensor of datatype uint64
y : 1-d tensor of datatype uint64
Returns
-------
res: 2-d tensor of shape (x.shape[0], y.shape[0]) and datatype uint64
"""
assert x.dtype == 'uint64' and y.dtype == 'uint64', "only support datatype uint64"
assert len(x.shape) == 1 and len(y.shape) == 1, "only support 1-dim hamming-similarity operation"
ppac_config = PPAC_config()
try:
res_shape = x.shape + y.shape
batch_num = x.shape[0]
except:
# x is scalar
res_shape = y.shape
batch_num = 1
res_name = name if name else 'res'
in_block_num = 1
out_channel_num = y.shape[0]
def _assign_val(*args):
temp = hcl.local(0, name='sim_acc', dtype=hcl.UInt(64))
temp[0] = tvm.popcount(~(x[args[0]] ^ y[args[1]]))
return temp[0]
return hcl.compute( res_shape, _assign_val, res_name, dtype=hcl.UInt(64),
attrs=OrderedDict([(ppac_params.func_name, tvm.make.StringImm(ppac_config.func_call[0])),
(ppac_params.ret, tvm.make.StringImm(res_name)),
(ppac_params.arg0, tvm.make.StringImm(x.name)),
(ppac_params.arg1, tvm.make.StringImm(y.name)),
(ppac_params.b_n, batch_num),
(ppac_params.i_b_n, in_block_num),
(ppac_params.o_c_n, out_channel_num)]) )
def gemm_binary(d, w, name=None):
"""Compute general matrix multiplication of datatype {1, -1}
Parameters
----------
d : 2-d tensor of datatype uint1
w : 2-d tensor of datatype uint1
Returns
-------
res: 2-d tensor of shape (d.shape[0], w.shape[0]) and datatype uint64
res = dot(d, w.T) (with datatype {1, -1})
"""
assert d.dtype == 'uint1' and w.dtype == 'uint1', 'only support binary data'
assert len(w.shape) == 2 and len(d.shape) == 2, "only support 2-dim binary gemm"
assert d.shape[1] == w.shape[1]
ppac_config = PPAC_config()
assert d.shape[1] % ppac_config.elem_num == 0, \
"input channel should be times of " + str(ppac_config.elem_num)
res_name = name if name else 'res'
batch_num = d.shape[0]
in_channel_num = w.shape[1]
in_block_num = in_channel_num // ppac_config.elem_num
out_channel_num = w.shape[0]
res_shape = (batch_num, out_channel_num)
block_size = ppac_config.elem_num // 8
def _bin_pack_uint8(tensor):
"""Pack uint1 to uint8.
uint1 is cast to uint8 in c backend.
This operation squeezes memory 8 times.
"""
assert tensor.dtype == 'uint1'
ishape = tensor.shape
n = len(ishape)
oshape = ishape[:-1] + (ishape[n-1] // 8, )
def _assign_val(*args):
temp = hcl.local(0, name='pack_acc', dtype=hcl.UInt(8))
with hcl.for_(0, 8) as i:
temp[0] = temp[0] | (tensor[args[0], i + args[1]*8] << i)
return temp[0]
return hcl.compute(oshape, _assign_val,
name=tensor.name+'_packed', dtype=hcl.UInt(8))
def _mvpodd_reduce(*args):
"""compute {1, -1} dot product on packed data."""
temp = hcl.local(0, name='mvpodd_acc', dtype=hcl.Int(64))
with hcl.for_(0, in_block_num) as o:
with hcl.for_(0, block_size) as i:
temp[0] += tvm.popcount(d_packed[args[0], i+block_size*o] ^ w_packed[args[1], i+block_size*o])
temp[0] = ppac_config.elem_num - temp[0]*2
return temp[0]
d_packed = _bin_pack_uint8(d)
w_packed = _bin_pack_uint8(w)
return hcl.compute(res_shape, _mvpodd_reduce, name=res_name, dtype=hcl.Int(64),
attrs=OrderedDict([(ppac_params.func_name, tvm.make.StringImm(ppac_config.func_call[1])),
(ppac_params.ret, tvm.make.StringImm(res_name)),
(ppac_params.arg0, tvm.make.StringImm(d_packed.name)),
(ppac_params.arg1, tvm.make.StringImm(w_packed.name)),
(ppac_params.b_n, batch_num),
(ppac_params.i_b_n, in_block_num),
(ppac_params.o_c_n, out_channel_num)]) )
def gemm_multi_bit(d, w, name=None):
"""Compute general matrix multiplication of multi-bit data
Parameters
----------
d : 2-d tensor
w : 2-d tensor
Returns
-------
res: 2-d tensor of shape (d.shape[0], w.shape[0]) and datatype uint64
res = dot(d, w.T)
"""
assert w.dtype == d.dtype
assert w.dtype in ['uint8', 'int8', 'uint16', 'int16', 'uint32', 'int32']
assert len(w.shape) == 2 and len(d.shape) == 2, "only support 2-dim gemm"
assert d.shape[1] == w.shape[1]
ppac_config = PPAC_config(multi_bit=True)
assert d.shape[1] % ppac_config.elem_num == 0, \
"only support data with size of times of " + str(ppac_config.elem_num)
res_name = name if name else 'res'
res_dtype = hcl.UInt(64) if ('u' in d.dtype) else hcl.Int(64)
batch_num = d.shape[0]
in_channel_num = d.shape[1]
in_block_num = in_channel_num // ppac_config.elem_num
out_channel_num = w.shape[0]
res_shape = (batch_num, out_channel_num)
func_name = ppac_config.func_call[0] if ('u' in d.dtype) else ppac_config.func_call[1]
r = hcl.reduce_axis(0, in_channel_num, name='k')
return hcl.compute(res_shape,
lambda i, j: hcl.sum(d[i, r] * w[j, r], axis=r),
name=res_name, dtype=res_dtype,
attrs=OrderedDict([(ppac_params.func_name, tvm.make.StringImm(func_name)),
(ppac_params.ret, tvm.make.StringImm(res_name)),
(ppac_params.arg0, tvm.make.StringImm(d.name)),
(ppac_params.arg1, tvm.make.StringImm(w.name)),
(ppac_params.b_n, batch_num),
(ppac_params.i_b_n, in_block_num),
(ppac_params.o_c_n, out_channel_num)]))
|
from typing import Iterable, Tuple
import numpy as np
from unittest import TestCase
import numpy.testing as npt
from distancematrix.util import diag_indices_of
from distancematrix.consumer.contextual_matrix_profile import ContextualMatrixProfile
from distancematrix.consumer.contextmanager import GeneralStaticManager
from distancematrix.consumer.contextmanager import AbstractContextManager
class TestContextualMatrixProfile(TestCase):
def setUp(self):
self.dist_matrix = np.array([
[8.67, 1.10, 1.77, 1.26, 1.91, 4.29, 6.32, 4.24, 4.64, 5.06, 6.41, 4.07, 4.67, 9.32, 5.09],
[4.33, 4.99, 0.14, 2.79, 2.10, 6.26, 9.40, 4.14, 5.53, 4.26, 8.21, 5.91, 6.83, 9.26, 6.19],
[0.16, 9.05, 1.35, 4.78, 7.01, 4.36, 5.24, 8.81, 7.90, 5.84, 8.90, 7.88, 3.37, 4.70, 6.94],
[0.94, 8.70, 3.87, 6.29, 0.32, 1.79, 5.80, 2.61, 1.43, 6.32, 1.62, 0.20, 2.28, 7.11, 2.15],
[9.90, 4.51, 2.11, 2.83, 5.52, 8.55, 6.90, 0.24, 1.58, 4.26, 8.75, 3.71, 9.93, 8.33, 0.38],
[7.30, 5.84, 9.63, 1.95, 3.76, 3.61, 9.42, 5.56, 5.09, 7.07, 1.90, 4.78, 1.06, 0.69, 3.67],
[2.17, 8.37, 3.99, 4.28, 4.37, 2.86, 8.61, 3.39, 8.37, 6.95, 6.57, 1.79, 7.40, 4.41, 7.64],
[6.26, 0.29, 6.44, 8.84, 1.24, 2.52, 6.25, 3.07, 5.55, 3.19, 8.16, 5.32, 9.01, 0.39, 9.],
[4.67, 8.88, 3.05, 3.06, 2.36, 8.34, 4.91, 5.46, 9.25, 9.78, 0.03, 5.64, 5.10, 3.58, 6.92],
[1.01, 0.91, 6.28, 7.79, 0.68, 5.50, 6.72, 5.11, 0.80, 9.30, 9.77, 4.71, 3.26, 7.29, 6.26]])
self.m = 5
def mock_initialise(self, cdm):
cdm.initialise(1, self.dist_matrix.shape[0], self.dist_matrix.shape[1])
def bruteforce_cdm(self, dist_matrix, query_ranges, series_ranges):
"""
Brute force calculation of contextual distance matrix.
:param dist_matrix: 2D matrix
:param query_ranges: list of length m containing: ranges or list of ranges
:param series_ranges: list of length n containing: ranges or list of ranges
:return: 2D matrix (m by n)
"""
correct = np.full((len(query_ranges), len(series_ranges)), np.inf, dtype=float)
correct_qi = np.full((len(query_ranges), len(series_ranges)), -1, dtype=int)
correct_si = np.full((len(query_ranges), len(series_ranges)), -1, dtype=int)
for i, r0s in enumerate(query_ranges):
if not isinstance(r0s, list):
r0s = [r0s]
for r0 in r0s:
r0 = slice(r0.start, r0.stop)
for j, r1s in enumerate(series_ranges):
if not isinstance(r1s, list):
r1s = [r1s]
for r1 in r1s:
r1 = slice(r1.start, r1.stop)
view = dist_matrix[r0, :][:, r1]
if view.size == 0:
continue
min_value = np.min(view)
if correct[i, j] > min_value:
correct[i, j] = np.min(view)
correct_qi[i, j], correct_si[i, j] = next(zip(*np.where(view == np.min(view))))
correct_qi[i, j] += r0.start
correct_si[i, j] += r1.start
return correct, correct_qi, correct_si
def test_process_diagonal(self):
query_ranges = [range(0, 4), range(7, 8)]
series_ranges = [range(1, 4), range(6, 8)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for diag in range(-self.dist_matrix.shape[0] + 1, self.dist_matrix.shape[1]):
diag_ind = diag_indices_of(self.dist_matrix, diag)
cdm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_diagonal_partial_calculation(self):
# Ranges selected so that the diagonals have a mix of the number of numbers that are filled in
query_ranges = [range(0, 2), range(2, 5), range(7, 8), range(8, 9), range(9, 10)]
series_ranges = [range(0, 2), range(2, 4), range(4, 10), range(13, 14)]
part_dist_matrix = np.full_like(self.dist_matrix, np.inf)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for diag in range(-8, self.dist_matrix.shape[1], 4):
diag_ind = diag_indices_of(self.dist_matrix, diag)
cdm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
part_dist_matrix[diag_ind] = self.dist_matrix[diag_ind]
correct, correct_qi, correct_si = self.bruteforce_cdm(part_dist_matrix, query_ranges, series_ranges)
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_diagonal_complex_ranges(self):
# Overlapping ranges and contexts consisting of multiple ranges
query_ranges = [range(0, 10), range(1, 5), range(1, 2), range(4, 5),
[range(1, 2), range(3, 4), range(7, 9)]]
series_ranges = [range(0, 2), range(1, 3), range(2, 4), range(3, 6), range(4, 8), range(4, 10),
[range(0, 3), range(3, 5), range(13, 15)]]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for diag in range(-self.dist_matrix.shape[0] + 1, self.dist_matrix.shape[1]):
diag_ind = diag_indices_of(self.dist_matrix, diag)
cdm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_diagonal_context_goes_beyond_distancematrix(self):
query_ranges = [range(0, 8), range(8, 16)]
series_ranges = [range(0, 10), range(10, 20)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for diag in range(-self.dist_matrix.shape[0] + 1, self.dist_matrix.shape[1]):
diag_ind = diag_indices_of(self.dist_matrix, diag)
cdm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_diagonal_context_goes_beyond_distancematrix_2(self):
self.dist_matrix = self.dist_matrix.T
query_ranges = [range(0, 8), range(13, 20)]
series_ranges = [range(0, 10), range(9, 13)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for diag in range(-self.dist_matrix.shape[0] + 1, self.dist_matrix.shape[1]):
diag_ind = diag_indices_of(self.dist_matrix, diag)
cdm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_diagonal_context_falls_outside_distancematrix(self):
query_ranges = [range(0, 8), range(8, 16), range(20, 30)]
series_ranges = [range(0, 10), range(10, 20), range(30, 40)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for diag in range(-self.dist_matrix.shape[0] + 1, self.dist_matrix.shape[1]):
diag_ind = diag_indices_of(self.dist_matrix, diag)
cdm.process_diagonal(diag, np.atleast_2d(self.dist_matrix[diag_ind]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_column(self):
query_ranges = [range(0, 4), range(7, 8)]
series_ranges = [range(1, 4), range(6, 8)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for column in range(0, self.dist_matrix.shape[1]):
cdm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_column_partial_calculation(self):
# Ranges selected so that the contexts will receive 0, 1, 2 or 3 columns
query_ranges = [range(0, 2), range(2, 5), range(7, 10)]
series_ranges = [range(0, 2), range(2, 4), range(4, 6), range(9, 14)]
part_dist_matrix = np.full_like(self.dist_matrix, np.inf)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for column in [2, 3, 4, 5, 10, 11, 12]:
cdm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
part_dist_matrix[:, column] = self.dist_matrix[:, column]
correct, correct_qi, correct_si = self.bruteforce_cdm(part_dist_matrix, query_ranges, series_ranges)
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_column_complex_ranges(self):
# Overlapping ranges and contexts consisting of multiple ranges
query_ranges = [range(0, 10), range(1, 5), range(1, 2), range(4, 5),
[range(1, 2), range(3, 4), range(7, 9)]]
series_ranges = [range(0, 2), range(1, 3), range(2, 4), range(3, 6), range(4, 8), range(4, 10),
[range(0, 3), range(3, 5), range(13, 15)]]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for column in range(0, self.dist_matrix.shape[1]):
cdm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_column_context_goes_beyond_distancematrix(self):
query_ranges = [range(0, 8), range(8, 16)]
series_ranges = [range(0, 10), range(10, 20)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for column in range(0, self.dist_matrix.shape[1]):
cdm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
def test_process_column_context_falls_outside_distancematrix(self):
query_ranges = [range(0, 8), range(8, 16), range(20, 30)]
series_ranges = [range(0, 10), range(10, 20), range(30, 40)]
correct, correct_qi, correct_si = self.bruteforce_cdm(self.dist_matrix, query_ranges, series_ranges)
cdm = ContextualMatrixProfile(GeneralStaticManager(series_ranges, query_ranges))
self.mock_initialise(cdm)
for column in range(0, self.dist_matrix.shape[1]):
cdm.process_column(column, np.atleast_2d(self.dist_matrix[:, column]))
npt.assert_allclose(cdm.distance_matrix, correct)
npt.assert_equal(cdm.match_index_query, correct_qi)
npt.assert_equal(cdm.match_index_series, correct_si)
class MockContextManager(AbstractContextManager):
def __init__(self) -> None:
self.series_shift = 0
self.query_shift = 0
def context_matrix_shape(self) -> (int, int):
return 2, 4
def shift_query(self, amount: int) -> int:
self.query_shift += amount
if self.query_shift == 2:
return 0
elif self.query_shift == 4:
return 1
else:
raise RuntimeError("Invalid test state")
def shift_series(self, amount: int) -> int:
self.series_shift += amount
if self.series_shift == 1:
return 0
elif self.series_shift == 2:
return 1
else:
raise RuntimeError("Invalid test state")
def series_contexts(self, start: int, stop: int) -> Iterable[Tuple[int, int, int]]:
if self.series_shift == 0:
ctxs = [(0, 2, 0), (2, 4, 1), (4, 6, 2)]
elif self.series_shift == 1:
ctxs = [(0, 1, 0), (1, 3, 1), (3, 5, 2), (5, 6, 3)]
elif self.series_shift == 2:
ctxs = [(0, 2, 0), (2, 4, 1), (4, 6, 2)]
else:
raise RuntimeError("Invalid test state")
return filter(lambda c: c[0] < stop and c[1] > start, ctxs)
def query_contexts(self, start: int, stop: int) -> Iterable[Tuple[int, int, int]]:
if self.query_shift == 0:
ctxs = [(0, 3, 0), (3, 10, 1)]
elif self.query_shift == 2:
ctxs = [(0, 1, 0), (1, 10, 1)]
elif self.query_shift == 4:
ctxs = [(0, 10, 0), (2, 8, 1)]
else:
raise RuntimeError("Invalid test state")
return filter(lambda c: c[0] < stop and c[1] > start, ctxs)
class TestStreamingContextualMatrixProfile(TestCase):
def setUp(self):
self.dist_matrix = np.array([
[9.2, 4.6, 0.5, 0.0, 9.7, 8.4, 8.8, 7.2],
[8.4, 3.3, 3.8, 6.2, 2.3, 7.3, 1.0, 9.7],
[7.2, 3.1, 6.1, 3.2, 8.0, 5.9, 2.7, 8.1],
[5.3, 7.3, 5.8, 9.0, 2.6, 1.0, 2.9, 8.8],
[8.9, 4.8, 6.4, 6.7, 2.1, 6.4, 1.2, 5.2],
[4.2, 8.1, 2.3, 4.4, 3.4, 6.6, 1.5, 4.2],
[1.7, 8.8, 1.2, 0.4, 3.7, 9.3, 4.4, 3.7],
[1.6, 7.8, 4.0, 8.9, 0.1, 7.5, 2.5, 8.]
])
def test_streaming_process_column(self):
cdm = ContextualMatrixProfile(MockContextManager())
cdm.initialise(1, 4, 6)
cdm.process_column(0, np.atleast_2d(self.dist_matrix[0:4, 0]))
cdm.process_column(2, np.atleast_2d(self.dist_matrix[0:4, 2]))
npt.assert_equal(cdm.distance_matrix, [[7.2, 0.5, np.Inf, np.Inf], [5.3, 5.8, np.Inf, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[2, 0, -1, -1], [3, 3, -1, -1]])
npt.assert_equal(cdm.match_index_series, [[0, 2, -1, -1], [0, 2, -1, -1]])
cdm.shift_series(1)
cdm.shift_query(2)
npt.assert_equal(cdm.distance_matrix, [[7.2, 0.5, np.Inf, np.Inf], [5.3, 5.8, np.Inf, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[2, 0, -1, -1], [3, 3, -1, -1]])
npt.assert_equal(cdm.match_index_series, [[0, 2, -1, -1], [0, 2, -1, -1]])
cdm.process_column(0, np.atleast_2d(self.dist_matrix[2:6, 1]))
cdm.process_column(1, np.atleast_2d(self.dist_matrix[2:6, 2]))
cdm.process_column(3, np.atleast_2d(self.dist_matrix[2:6, 4]))
npt.assert_equal(cdm.distance_matrix, [[3.1, 0.5, 8.0, np.Inf], [4.8, 2.3, 2.1, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[2, 0, 2, -1], [4, 5, 4, -1]])
npt.assert_equal(cdm.match_index_series, [[1, 2, 4, -1], [1, 2, 4, -1]])
cdm.shift_series(1)
cdm.shift_query(2)
npt.assert_equal(cdm.distance_matrix, [[2.3, 2.1, np.Inf, np.Inf], [np.Inf, np.Inf, np.Inf, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[5, 4, -1, -1], [-1, -1, -1, -1]])
npt.assert_equal(cdm.match_index_series, [[2, 4, -1, -1], [-1, -1, -1, -1]])
cdm.process_column(0, np.atleast_2d(self.dist_matrix[4:8, 2]))
cdm.process_column(3, np.atleast_2d(self.dist_matrix[4:8, 5]))
cdm.process_column(4, np.atleast_2d(self.dist_matrix[4:8, 6]))
cdm.process_column(5, np.atleast_2d(self.dist_matrix[4:8, 7]))
npt.assert_equal(cdm.distance_matrix, [[1.2, 2.1, 1.2, np.Inf], [1.2, 7.5, 2.5, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[6, 4, 4, -1], [6, 7, 7, -1]])
npt.assert_equal(cdm.match_index_series, [[2, 4, 6, -1], [2, 5, 6, -1]])
def test_streaming_process_diagonal(self):
cdm = ContextualMatrixProfile(MockContextManager())
cdm.initialise(1, 4, 6)
v = self.dist_matrix[0:4, 0:6]
cdm.process_diagonal(0, np.atleast_2d(v[diag_indices_of(v, 0)]))
cdm.process_diagonal(2, np.atleast_2d(v[diag_indices_of(v, 2)]))
cdm.process_diagonal(-1, np.atleast_2d(v[diag_indices_of(v, -1)]))
npt.assert_equal(cdm.distance_matrix, [[3.1, 0.5, 8., np.Inf], [np.Inf, 5.8, 1., np.Inf]])
npt.assert_equal(cdm.match_index_query, [[2, 0, 2, -1], [-1, 3, 3, -1]])
npt.assert_equal(cdm.match_index_series, [[1, 2, 4, -1], [-1, 2, 5, -1]])
cdm.shift_series(1)
cdm.shift_query(2)
v = self.dist_matrix[2:6, 1:7]
npt.assert_equal(cdm.distance_matrix, [[3.1, 0.5, 8., np.Inf], [np.Inf, 5.8, 1., np.Inf]])
npt.assert_equal(cdm.match_index_query, [[2, 0, 2, -1], [-1, 3, 3, -1]])
npt.assert_equal(cdm.match_index_series, [[1, 2, 4, -1], [-1, 2, 5, -1]])
cdm.process_diagonal(5, np.atleast_2d(v[diag_indices_of(v, 5)]))
cdm.process_diagonal(1, np.atleast_2d(v[diag_indices_of(v, 1)]))
cdm.process_diagonal(0, np.atleast_2d(v[diag_indices_of(v, 0)]))
cdm.process_diagonal(-3, np.atleast_2d(v[diag_indices_of(v, -3)]))
npt.assert_equal(cdm.distance_matrix, [[3.1, 0.5, 8., 2.7], [8.1, 5.8, 1., np.Inf]])
npt.assert_equal(cdm.match_index_query, [[2, 0, 2, 2], [5, 3, 3, -1]])
npt.assert_equal(cdm.match_index_series, [[1, 2, 4, 6], [1, 2, 5, -1]])
cdm.shift_series(1)
cdm.shift_query(2)
v = self.dist_matrix[4:8, 2:8]
npt.assert_equal(cdm.distance_matrix, [[5.8, 1., np.Inf, np.Inf], [np.Inf, np.Inf, np.Inf, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[3, 3, -1, -1], [-1, -1, -1, -1]])
npt.assert_equal(cdm.match_index_series, [[2, 5, -1, -1], [-1, -1, -1, -1]])
cdm.process_diagonal(-2, np.atleast_2d(v[diag_indices_of(v, -2)]))
cdm.process_diagonal(0, np.atleast_2d(v[diag_indices_of(v, 0)]))
cdm.process_diagonal(1, np.atleast_2d(v[diag_indices_of(v, 1)]))
cdm.process_diagonal(2, np.atleast_2d(v[diag_indices_of(v, 2)]))
cdm.process_diagonal(3, np.atleast_2d(v[diag_indices_of(v, 3)]))
cdm.process_diagonal(4, np.atleast_2d(v[diag_indices_of(v, 4)]))
npt.assert_equal(cdm.distance_matrix, [[1.2, 1., 1.2, np.Inf], [1.2, 3.7, 2.5, np.Inf]])
npt.assert_equal(cdm.match_index_query, [[6, 3, 4, -1], [6, 6, 7, -1]])
npt.assert_equal(cdm.match_index_series, [[2, 5, 6, -1], [2, 4, 6, -1]])
|
from . import BaseAnnotationDefinition
class Subtitle(BaseAnnotationDefinition):
"""
Subtitle annotation object
"""
type = "subtitle"
def __init__(self, text, label, attributes=None, description=None):
super().__init__(description=description, attributes=attributes)
self.text = text
self.label = label
def to_coordinates(self, color):
return {"text": self.text}
@staticmethod
def from_coordinates(coordinates):
return coordinates["text"]
@classmethod
def from_json(cls, _json):
if "coordinates" in _json:
text = cls.from_coordinates(coordinates=_json["coordinates"])
elif "data" in _json:
text = cls.from_coordinates(coordinates=_json["data"])
else:
raise ValueError('Bad json, "coordinates" or "data" not found')
return cls(
text=text,
label=_json["label"],
attributes=_json.get("attributes", None)
)
|
import string
class Base62(object):
"""
Helper class for encoding/decoding with Base 62. The code is borrowed from:
https://stackoverflow.com/a/2549514/1396314
"""
BASE_LIST = string.digits + string.ascii_letters
BASE_DICT = dict((c, i) for i, c in enumerate(BASE_LIST))
@staticmethod
def encode(num):
if num == 0:
return Base62.BASE_LIST[0]
length = len(Base62.BASE_LIST)
s = ''
while num != 0:
s = Base62.BASE_LIST[num % length] + s
num //= length
return s
@staticmethod
def decode(s):
length = len(Base62.BASE_DICT)
ret = 0
for i, c in enumerate(s[::-1]):
ret += (length ** i) * Base62.BASE_DICT[c]
return ret
|
#!/usr/bin/env python
# encoding=utf8
import os
import re
import sys
import struct
import pprint
import random
import argparse
import datetime
import tiddlywiki as tiddly
import cdam_gen_files as gen
import importlib
import bitarray
importlib.reload(sys)
# sys.setdefaultencoding('utf8')
VERSION = "1.0"
BINARY_VER = "1.0.5"
# For holding binary variable keys and values.
VARIABLES = {}
FLAGS = {}
TITLE_MAP = {}
STORY_MAP = {}
PASSAGES = {}
STORY_TITLE = ""
STORY_AUTHOR = ""
STORY_SUBTITLE = ""
STORY_CREDITS = ""
STORY_VERSION = ""
STORY_CONTACT = ""
STORY_LANGUAGE = ""
REPORT = ""
OPERATION_TEST = bytearray()
TOTAL_OPS = 0
VERBOSE = False
LINEAR = False
HTML = False
SEED = None
PP = pprint.PrettyPrinter(indent = 4)
kAppend = "<append>"
kContinue = "<continue>"
kContinueCopy = '<continue>'
kGotoTempTag = "-GOTO-"
class CDAMParser(argparse.ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' % message)
self.print_help()
sys.exit(2)
class CDAMTwine(tiddly.Tiddler):
def GetPassages(self):
return self.tiddlers
def main():
global STORY_TITLE
global STORY_AUTHOR
global STORY_SUBTITLE
global STORY_CREDITS
global STORY_CONTACT
global STORY_LANGUAGE
global STORY_VERSION
global LINEAR
global HTML
# To Make a Linear Story:
# python ./cdam_convert_twine.py --title
parser = CDAMParser(description='CDAM Twine Source Code Converter')
parser.add_argument('--dirname', default='NONE', action='store', help='Directory name for story on the file system.')
parser.add_argument('--title', default='Untitled', action='store', help='The story title.')
parser.add_argument('--subtitle', default='NONE', action='store', help='The story subtitle.')
parser.add_argument('--author', default='Anonymous', action='store', help='The author of the story.')
parser.add_argument('--pubdate', default='', action='store', help='The date this story was published')
parser.add_argument('--credits', default='', action='store', help='Additional story credits.')
parser.add_argument('--contact', default='Follow creator @j3rrytron online!', action='store', help='Misc contact info.')
parser.add_argument('--lang', default='eng', action='store', help='Up to four character language code.')
parser.add_argument('--ver', default='0.0.0', action='store', help='Story version in three parts, x.x.x')
parser.add_argument('--source', default='', action='store', help='The Twine source code file.')
parser.add_argument('--output', default='./', action='store', help='The location to create the output files.')
parser.add_argument('--filename', default='', action='store', help='The output filename.')
parser.add_argument('--json', action='store_true', help='Output as a JSON text file.')
parser.add_argument('--linear', action='store_true', help='Output as a linear text file for humans.')
parser.add_argument('--html', action='store_true', help='Output as html document.')
parser.add_argument('--randseed', default='', action='store', help='Optional seed to control random output.')
parser.add_argument('--binary', action='store_true', help='Output as a CDAM binary for the Choosatron v2.')
parser.add_argument('--verbose', action='store_true', help='Print additional info, including warnings.')
parser.add_argument('--operation', action='store_true', help='Output operations file too for debugging.')
parser.add_argument('--family', action='store_true', help='Mark this story as family friendly.')
parser.add_argument('--vartext', action='store_true', help='This story uses variable text logic.')
parser.add_argument('--mono', action='store_true', help='This story requires a monospaced font.')
#parser.add_argument('--update', action='store_true', help='Attempt to safely add to/update existing files without damaging existing data.')
#parser.add_argument('--force', action='store_true', help='Overwrite files that already exist.')
args = parser.parse_args()
STORY_SUBTITLE = args.subtitle
STORY_CREDITS = args.credits
STORY_CONTACT = args.contact
STORY_LANGUAGE = args.lang
STORY_VERSION = args.ver
print("--- " + args.title + " ---")
if args.randseed:
SEED = int(args.randseed)
random.seed(SEED)
else:
SEED = datetime.datetime.now().microsecond
#print "Random Seed for " + args.title + ": " + str(SEED)
random.seed(SEED)
LINEAR = args.linear
HTML = args.html
if HTML:
LINEAR = True
# Uncomment to override output and place wherever source was.
#args.output = os.path.dirname(args.source)
VERBOSE = args.verbose
if VERBOSE:
print(args.title)
FLAGS['family_friendly'] = args.family
FLAGS['variable_text'] = args.vartext
FLAGS['retry'] = True # Just default true for now.
FLAGS['monospace'] = args.mono
storyWiki = LoadSource(args.source)
if storyWiki == False:
return
result = BuildCDAMStory(storyWiki)
if result == False:
return
if args.dirname.upper() in PASSAGES:
print("[ERROR] Value of --dirname can't be the same as a passage title. Passage already exists named: " + args.dirname.upper())
return
SimplifyNaming()
genFile = gen.CDAMGenFiles()
if args.binary == True:
if args.title != "Untitled":
STORY_TITLE = args.title
if args.author != "Anonymous":
STORY_AUTHOR = args.author
# Generate Story Body
storyBody = genFile.GenerateBody(STORY_MAP, PASSAGES, VARIABLES)
if storyBody == False:
return
if len(VARIABLES) == 0:
FLAGS['logic'] = False
else:
FLAGS['logic'] = True
if 'ending_quality' not in FLAGS:
FLAGS['ending_quality'] = False
if 'points' not in FLAGS:
FLAGS['points'] = False
# Currently no images are supported.
FLAGS['images'] = False
# Generate Story Header
storyHeader = genFile.GenerateHeader(args.lang, args.title, args.subtitle, args.author, args.pubdate, args.credits, args.contact, BINARY_VER, args.ver, FLAGS, len(storyBody), len(VARIABLES))
if storyHeader == False:
return
bookPath = STORY_TITLE.lower().replace(" ", "_") + "_BIN.dam"
if args.filename != "":
bookPath = args.filename + "_BIN.dam"
bookPath = os.path.join(args.output, bookPath)
if os.path.exists(bookPath):
os.remove(bookPath)
genFile.WriteToFile(bookPath, storyHeader + storyBody)
if args.operation:
opPath = STORY_TITLE.lower().replace(" ", "_") + "_OPS.dam"
if args.filename != "":
opPath = args.filename + "_OPS.dam"
opPath = os.path.join(args.output, opPath)
opData = bytearray()
opData += bytearray(struct.pack('<H', TOTAL_OPS))
opData += OPERATION_TEST
genFile.WriteToFile(opPath, opData)
elif args.linear == True:
if args.title != "Untitled":
STORY_TITLE = args.title
if args.author != "Anonymous":
STORY_AUTHOR = args.author
if HTML:
bookPath = STORY_TITLE.lower().replace(" ", "_") + "_LINEAR.html"
if args.filename != "":
bookPath = args.filename + "_LINEAR.html"
else:
bookPath = STORY_TITLE.lower().replace(" ", "_") + "_LINEAR.txt"
if args.filename != "":
bookPath = args.filename + "_LINEAR.txt"
bookPath = os.path.join(args.output, bookPath)
book = ""
if HTML:
# Look for an HTML header to insert.
sourcePath = os.path.dirname(args.source)
headerPath = os.path.join(sourcePath, "header.txt")
try:
file = open(headerPath, 'r')
book += file.read()
except IOError:
print("[WARNING] No HTML header found at: " + headerPath)
book += "Title: " + STORY_TITLE + "\nSubtitle: " + STORY_SUBTITLE + "\nAuthor: " + STORY_AUTHOR
book += "\nCredits: " + STORY_CREDITS + "\nContact: " + STORY_CONTACT + "\nLanguage: " + STORY_LANGUAGE + "\nVersion: " + STORY_VERSION + "\nSeed: " + str(SEED) + "\n\n\n"
psgList = []
newMap = {}
allKeys = list(PASSAGES.keys())
key = "0"
p = PASSAGES[key]
psgList.append(p)
allKeys.remove(key)
newMap[key] = key
index = 0
while len(allKeys) > 0:
index += 1
if "cs" in p and len(p["cs"]) == 1 and p["cs"][0]["link"] in allKeys:
p = PASSAGES[p["cs"][0]["link"]]
key = p["key"]
# Map from old to new index.
newMap[key] = str(index)
if key in allKeys:
allKeys.remove(key)
psgList.append(p)
else:
key = random.choice(allKeys)
# If this passage has a single entrance, that passage should be
# put in first.
if "ik" in PASSAGES[key]:
while len(PASSAGES[key]["ik"]) == 1:
# Keep tracing back until we find the first passage in a series
# of single paths, or until we hit a passage already used.
if PASSAGES[key]["ik"][0] in allKeys:
key = PASSAGES[key]["ik"][0]
else:
break
if key in allKeys:
allKeys.remove(key)
p = PASSAGES[key]
newMap[key] = str(index)
psgList.append(p)
index = 0
for psg in psgList:
book += linearPassageText(psg, newMap)
index += 1
if index < len(psgList):
book += "\n\n\n"
# Look for an HTML footer to insert.
if HTML:
sourcePath = os.path.dirname(args.source)
footerPath = os.path.join(sourcePath, "footer.txt")
try:
file = open(footerPath, 'r')
book += file.read()
#print book
except IOError:
print("[WARNING] No HTML footer found at: " + footerPath)
if os.path.exists(bookPath):
os.remove(bookPath)
genFile.WriteToFile(bookPath, book)
else:
result = False;
if args.json == False:
result = genFile.UpdateManifest(args.output, args.title, args.dirname, args.author, args.json)
if result == False:
print("[ERROR] Failed to update manifest.")
else:
result = args.dirname
result = genFile.BuildCDAMStory(result, STORY_MAP, PASSAGES, args.output, args.title, args.author, args.json)
if result == False:
print("[ERROR] Failed to build story.")
print("--- Complete! ---\n")
#print STORY_MAP
#print PASSAGES
def linearPassageText(aPassage, aMap):
global HTML
psgText = ""
goto = " (go to "
key = aMap[aPassage["key"]]
if HTML:
psgText += "<p class='paragraph'><span class='number'>" + "[" + key + "] </span>" + aPassage['pt'] + "</p>"
psgText += "\n"
else:
psgText += "[" + key + "] " + aPassage['pt']
if aPassage['en'] == True:
psgText += "\n--- THE END ---"
#if aPassage['eq'] == 1:
# psgText += "\n* - THE END"#* Oh no! Better luck next adventure. * - THE END"
#elif aPassage['eq'] == 2:
# psgText += "\n** - THE END"#** I'm sure you can do better. ** - THE END"
#elif aPassage['eq'] == 3:
# psgText += "\n*** - THE END"#*** You win some, you lose some. *** - THE END"
#elif aPassage['eq'] == 4:
# psgText += "\n**** - THE END"#**** Not too bad! **** - THE END"
#elif aPassage['eq'] == 5:
# psgText += "\n***** - THE END"#***** Congratulations! You sure know your stuff. ***** - THE END"
else:
choiceText = ""
if HTML == False:
# Add a delimeter so we know it is done
choiceText += "\n---"
for choice in aPassage['choices']:
m = re.search(kGotoTempTag, psgText)
if HTML:
if psgText[m.start() - 1] == '\n':
choiceText += ("<span class='choice-title choice-standalone'>" + choice['text'] + "</span>" + "<span class='goto'>" + goto + aMap[choice['link']] + ")</span>")
else:
choiceText += ("<span class='choice-title'>" + choice['text'] + "</span>" + "<span class='goto'>" + goto + aMap[choice['link']] + ")</span>")
else:
choiceText += ("\n- " + choice['text'] + goto + aMap[choice['link']] + ")")
psgText = re.sub(kGotoTempTag, choiceText, psgText, 1);
choiceText = ""
return psgText
def linearPassageTextFull(aPassages, aStoryMap, aKey):
psgText = ""
goto = " (go to "
p = aPassages[aKey]
m = aStoryMap[aKey]
psgText += "[" + aKey + "] " + p['pt']
# Add a delimeter so we know it is done
psgText += "\n---"
if p['en'] == True:
if p['eq'] == 1:
psgText += "\n* - THE END"#* Oh no! Better luck next adventure. * - THE END"
elif p['eq'] == 2:
psgText += "\n** - THE END"#** I'm sure you can do better. ** - THE END"
elif p['eq'] == 3:
psgText += "\n*** - THE END"#*** You win some, you lose some. *** - THE END"
elif p['eq'] == 4:
psgText += "\n**** - THE END"#**** Not too bad! **** - THE END"
elif p['eq'] == 5:
psgText += "\n***** - THE END"#***** Congratulations! You sure know your stuff. ***** - THE END"
else:
if len(p['cs']) == 1:
psgText += ("\n- " + p['cs'][0] + goto + m[0] + ")")
else:
for index in range(0, len(p['cs'])):
psgText += ("\n- " + p['cs'][index] + goto + m[index] + ")")
return psgText
def twineBuild(storySource, path, storyDir, title, author):
STORY_MAP.clear()
PASSAGES.clear()
result = BuildCDAMStory(storySource)
if result == False:
return
SimplifyNaming()
genFile = gen.CDAMGenFiles()
result = genFile.UpdateManifest(path, title, storyDir, author)
if result == False:
print("[ERROR] Failed to update manifest.")
result = genFile.BuildCDAMStory(storyDir, STORY_MAP, PASSAGES, path, title, author)
if result == False:
print("[ERROR] Failed to build story.")
def LoadSource(path):
try:
file = open(path, 'r')
except IOError:
print("[ERROR] File not found: " + path)
return False
sourceStr = file.read()
file.close()
# Start reading from the first ':' character
index = 0
for char in sourceStr:
if char == ':':
break
index += 1
sourceStr = sourceStr[index:]
wiki = tiddly.TiddlyWiki()
wiki.addTwee(sourceStr)
return wiki
def BuildCDAMStory(wiki):
global STORY_TITLE
global STORY_AUTHOR
global LINEAR
for key in list(wiki.tiddlers.keys()):
upKey = key.strip().upper()
if upKey not in list(wiki.tiddlers.keys()):
wiki.tiddlers[upKey] = wiki.tiddlers[key]
del wiki.tiddlers[key]
for key in wiki.tiddlers:
if wiki.tiddlers[key].title == "StoryTitle":
if STORY_TITLE == "":
STORY_TITLE = wiki.tiddlers[key].text
continue
if wiki.tiddlers[key].title == "StorySubtitle":
continue
if wiki.tiddlers[key].title == "StoryAuthor":
if STORY_AUTHOR == "":
STORY_AUTHOR = wiki.tiddlers[key].text
continue
#print "Passage: " + key
passage = ParseForAttributes(wiki.tiddlers[key].tags)
if passage == False:
continue
# Is this the starting passage?
if key == "START":
if "ps" not in passage:
passage["ps"] = 0
if "cp" not in passage:
passage["cp"] = 0
if "sv" not in passage:
passage["sv"] = "1.0"
else:
if "ps" in passage:
if VERBOSE:
print("[WARNING] Only set perfect score ('ps' or 'perfect') in the story passage titled 'Start'.")
del passage["ps"]
if "cp" in passage:
if VERBOSE:
print("[WARNING] Only set continue penalty ('cp' or 'penalty') in the story passage titled 'Start'.")
del passage["cp"]
if "sv" in passage:
if VERBOSE:
print("[WARNING] Only set story version ('sv' or 'version') in the story passage titled 'Start'.")
del passage["sv"]
passage["pv"] = VERSION
if "pp" not in passage:
passage["pp"] = 0
else:
# Set the 'points' flag.
FLAGS['points'] = True
rss = wiki.tiddlers[key].toRss()
choicePairs = ParseForChoices(rss.description)
#PP.pprint(choicePairs) # Print pretty!
passage["pt"] = ParseForBody(rss.description)
if type(choicePairs) is bool:
# No choices in this passage.
if choicePairs == True:
if "eq" not in passage:
if VERBOSE:
print("[WARNING] Ending quality 'eq' not set for " + key)
# Default to average.
passage["eq"] = 3
else:
# Set the 'ending quality' flag.
FLAGS['ending_quality'] = True
STORY_MAP[key] = passage["eq"]
passage["en"] = True
if "cc" not in passage:
passage["cc"] = True
else:
print("[ERROR] Failed to parse for choices.")
return False
if type(choicePairs) is list:
nodes = []
choices = []
for item in choicePairs:
nodes.append(item['link'].strip().upper())
choices.append(item['text'])
if ValidateChoices(wiki.tiddlers, nodes) == False:
print("[ERROR] Failed to validate choices for node.")
return False
else:
STORY_MAP[key] = nodes
passage["en"] = False
#passage["cs"] = choices
#passage["ck"] = nodes
passage["cs"] = choicePairs
#print "Validating passage for node " + key
if ValidatePassage(passage) == False:
print("[ERROR] Failed to validate passage.")
return False
else:
PASSAGES[key] = passage
#print PASSAGES
def ParseOperation(opParts, iteration):
global REPORT
data = bytearray()
REPORT += "( "
types = ""
leftName = ""
rightName = ""
command = opParts.pop(0)
leftType = opParts.pop(0)
leftValue = bytearray()
rightValue = bytearray()
#print "Command: " + command
#print "LeftType: " + leftType
if leftType == "cmd":
types += "0011"
leftValue = ParseOperation(opParts, iteration + 1)
REPORT += " " + command + " "
else:
tempValue = opParts.pop(0)
if leftType == "var":
#print tempValue
leftName = tempValue
types += "0010"
if leftName not in VARIABLES:
VARIABLES[leftName] = len(VARIABLES)
REPORT += leftName + "[" + str(VARIABLES[leftName]) + "] " + command + " "
#print "Var #: " + str(VARIABLES[leftName])
leftValue = bytearray(struct.pack('<H', VARIABLES[leftName]))
else:
types += "0001"
leftValue = bytearray(struct.pack('<H', int(tempValue)))
REPORT += str(tempValue) + " " + command + " "
#print str(leftValue)
rightType = opParts.pop(0)
#print "RightType: " + rightType
rightPrintVal = 0
if rightType == "cmd":
types += "0011"
rightValue = ParseOperation(opParts, iteration + 1)
else:
tempValue = opParts.pop(0)
if rightType == "var":
#print tempValue
rightName = tempValue
types += "0010"
if rightName not in VARIABLES:
VARIABLES[rightName] = len(VARIABLES)
#print "Index: " + str(VARIABLES[rightName])
rightValue = bytearray(struct.pack('<H', VARIABLES[rightName]))
else:
types += "0001"
rightValue = bytearray(struct.pack('<H', int(tempValue)))
rightPrintVal = tempValue
#print str(rightValue)
data += bitarray.bitarray(types)
if command == "equal" or command == "==":
data += bytes.fromhex('01')
elif command == "not_equal" or command == "!=":
data += bytes.fromhex('02')
elif command == "greater" or command == ">":
data += bytes.fromhex('03')
elif command == "less" or command == "<":
data += bytes.fromhex('04')
elif command == "greater_equal" or command == ">=":
data += bytes.fromhex('05')
elif command == "less_equal" or command == "<=":
data += bytes.fromhex('06')
elif command == "and":
data += bytes.fromhex('07')
elif command == "or":
data += bytes.fromhex('08')
elif command == "xor":
data += bytes.fromhex('09')
elif command == "nand":
data += bytes.fromhex('0A')
elif command == "nor":
data += bytes.fromhex('0B')
elif command == "xnor":
data += bytes.fromhex('0C')
elif command == "visible":
data += bytes.fromhex('0D')
elif command == "mod" or command == "%":
data += bytes.fromhex('0E')
elif command == "set" or command == "=":
data += bytes.fromhex('0F')
elif command == "plus" or command == "add" or command == "+":
data += bytes.fromhex('10')
elif command == "minus" or command == "-":
data += bytes.fromhex('11')
elif command == "multiply" or command == "mult" or command == "*":
data += bytes.fromhex('12')
elif command == "divide" or command == "/":
data += bytes.fromhex('13')
elif command == "rand" or command == "random":
data += bytes.fromhex('14')
elif command == "dice" or command == "roll":
data += bytes.fromhex('15')
elif command == "if":
data += bytes.fromhex('16')
if rightType == "var":
REPORT += rightName + "[" + str(VARIABLES[rightName]) + "]"
elif rightType == "raw":
REPORT += str(rightPrintVal)
REPORT += " )"
data += leftValue
data += rightValue
return data
def ParseForAttributes(tags):
global REPORT
global OPERATION_TEST
global TOTAL_OPS
attributes = {}
attributes["vu"] = []
attributes["cvu"] = {}
#attributes["cvu"]["totalBytes"] = 0
attributes["cdc"] = {}
for attr in tags:
attr = attr.lower()
if attr == "ignore":
return False
#print attr
pair = attr.split(':')
#if pair[0] == "vars":
# pair.pop(0)
# for var in pair:
# varSet = var.split('|')
# VARIABLES[varSet[0]] = { "default" : varSet[1], "index" : len(VARIABLES) }
if pair[0] == "vu":
print(pair)
pair.pop(0)
REPORT = ""
data = bytearray()
data = ParseOperation(pair, 0)
print(":".join("{:02x}".format(ord(chr(c))) for c in data))
OPERATION_TEST += data
TOTAL_OPS += 1
print(REPORT)
#updates = { "operation" : pair[1], "leftType" : pair[2], "leftValue" : pair[3], "rightType" : pair[4], "rightValue" : pair[5] }
#if updates["leftType"] == "var":
# if updates["leftValue"] not in VARIABLES:
# print "New var added: " + updates["leftValue"]
# VARIABLES[updates["leftValue"]] = { "default" : 0, "index" : len(VARIABLES) }
#if updates["rightType"] == "var":
# if updates["rightValue"] not in VARIABLES:
# print "New var added: " + updates["rightValue"]
# VARIABLES[updates["rightValue"]] = { "default" : 0, "index" : len(VARIABLES) }
attributes["vu"].append(data)
elif pair[0] == "choice":
pair.pop(0)
index = int(pair.pop(0)) - 1
if attributes["cvu"].setdefault(index, None) == None:
attributes["cvu"][index] = { "data" : [], "totalBytes" : 0}
opType = pair.pop(0)
REPORT = ""
data = bytearray()
data = ParseOperation(pair, 0)
OPERATION_TEST += data
TOTAL_OPS += 1
#attributes["cvu"]["totalBytes"] = 0
#components = { "valueOne" : pair[3], "operation" : pair[4], "valueTwoType" : pair[5], "valueTwo" : pair[6] }
if opType == "vu": # Value updates
print("[VU] " + str(index) + " : " + REPORT)
#if attributes["cvu"].setdefault(index, None) == None:
#print "Fresh Choice: " + str(index)
#attributes["cvu"][index] = { "data" : [], "totalBytes" : 0}
#attributes["cvu"][index]["data"] = bytearray()
#attributes["cvu"][index]["totalBytes"] = 0
attributes["cvu"][index]["data"].append(data)
attributes["cvu"][index]["totalBytes"] += len(data)
elif opType == "dc": # Display conditionals
print("[DC] " + str(index) + " : " + REPORT)
attributes["cdc"].setdefault(index, []).append(data)
elif len(pair) == 2:
# Set Default Values
if pair[0] == "pp" or pair[0] == "points":
attributes["pp"] = int(pair[1])
elif pair[0] == "eq" or pair[0] == "quality":
attributes["eq"] = int(pair[1])
elif pair[0] == "cc" or pair[0] == "continue":
if pair[1] in ['true', '1', 't']:
attributes["cc"] = True
elif pair[1] in ['false', '0', 'f']:
attributes["cc"] = False
else:
if VERBOSE:
print("[WARNING] Invalid boolean value provided for tag: " + pair[0])
elif pair[0] == "ps" or pair[0] == "perfect":
attributes["ps"] = int(pair[1])
elif pair[0] == "cp" or pair[0] == "penalty":
attributes["cp"] = int(pair[1])
elif pair[0] == "lc" or pair[0] == "color":
if VERBOSE:
print("[WARNING] Color not currently supported.")
#attributes["lc"] = int(pair[1])
elif pair[0] == "sn" or pair[0] == "sound":
if VERBOSE:
print("[WARNING] Sound not currently supported.")
#attributes["sn"] = int(pair[1])
elif pair[0] == "sv" or pair[0] == "version":
attributes["sv"] = pair[1]
return attributes
def ParseForChoices(bodyText):
global LINEAR
global HTML
# Cleanse choices of carriage returns.
bodyText = bodyText.replace('\r', '\n')
if HTML:
bodyText = bodyText.replace('\n\n', '<br>\n')
#else:
#bodyText = bodyText.replace('\n\n', '\n')
choices = []
# Search for either [[Choice Text|Choice Key]] or [[Choice Key]] and warn about missing text.
matchCount = len(re.findall(r"\n*\[\[([^\[\]|]+)(?:\|([\w\d\s]+))?\]\]", bodyText))
for index in range(0, matchCount):
m = re.search(r"\n*\[\[([^\[\]|]+)(?:\|([\w\d\s]+))?\]\]", bodyText)
#for m in re.finditer(r"\[\[([^\[\]|]+)(?:\|([\w\d\s]+))?\]\]", text):
# For [[Run away.|1B]], m.group(0) is whole match, m.group(1) = 'Run away.', and m.group(2) = '1B'
# For [[Run away.]], same but there is no m.group(2)
choice = {}
choice['index'] = m.start()
choice['length'] = m.end() - m.start()
text = m.group(1)
link = m.group(2)
# No link means copy text & link text are the same.
if not link:
link = text
# Link is meant for auto-jumping.
if text.lower() == kAppend:
if len(choices) == 0:
# If only a choice key, label it for an auto jump to the passage.
if LINEAR:
text = "Continue..."
else:
text = "*"
else:
print("[ERROR] Can only have a single auto-jump choice per passage.")
return False
elif text.lower() == kContinue:
text = kContinueCopy # Set to <continue>
elif text.lower() == 'continue':
text = kContinueCopy # Set to <continue>
elif text.lower() == 'continue...':
text = kContinueCopy # Set to <continue>
choice['link'] = link.strip().upper()
choice['text'] = text.strip()
choices.append(choice)
replaceChoices = ""
if LINEAR:
replaceChoices = kGotoTempTag
bodyText = re.sub(r"\n*\s*\[\[([^\[\]|]+)(?:\|([\w\d\s]+))?\]\]\s*", replaceChoices, bodyText, 1)
if len(choices) == 0:
return True
return choices
def ParseForBody(text):
global LINEAR
global HTML
# Cleanse of carriage returns (but leave newlines!).
#
body = text
body = body.replace('\r', '\n')
if HTML:
body = body.replace('\n\n', '<br>\n')
#else:
#body = body.replace('\n\n', '\n')
replaceChoices = ""
if LINEAR:
replaceChoices = kGotoTempTag
body = re.sub(r"\n*\s*\[\[([^\[\]|]+)(?:\|([\w\d\s]+))?\]\]\s*", replaceChoices, text)
return body
def ValidateChoices(tiddlers, nodes):
#print tiddlers
for node in nodes:
if node not in tiddlers:
#print tiddlers
print("[ERROR] Choice key found without matching passage: " + node)
return False
return True
def ValidatePassage(passage):
if "cc" in passage:
if passage["cc"] == True and passage["en"] == False:
if VERBOSE:
print("[WARNING] Continue flag useless if a passage isn't an ending. Setting False.")
passage["cc"] = False
elif passage["cc"] == True and passage["eq"] == 5:
#print "[WARNING] Continue flag should be false if ending quality is 5."
passage["cc"] = False
if passage["en"] == True and "eq" not in passage:
print("[ERROR] Ending Quality (eq|quality) missing from ending passage.")
return False
if "eq" in passage:
if passage["eq"] > 5 or passage["eq"] < 1:
print("[ERROR] Ending Quality (eq|quality) value outside range of 1-5.")
return False
if passage["pp"] > 255 or passage["pp"] < 0:
print("[ERROR] Points (pp|points) value outside range of 0-255.")
return False
def SimplifyNaming():
i = 1
newMap = STORY_MAP.copy()
STORY_MAP.clear()
newPassages = PASSAGES.copy()
PASSAGES.clear()
for titleKey in newMap:
upTitleKey = titleKey.strip().upper()
if upTitleKey != "START":
# Create a map from all passage titles to its new numbered title.
TITLE_MAP[upTitleKey] = str(i)
i += 1
else:
TITLE_MAP["START"] = "0"
for titleKey in newMap:
upTitleKey = titleKey.strip().upper()
if type(newMap[upTitleKey]) is list:
i = 0
for val in newMap[upTitleKey]:
# Links always referenced in uppercase.
#print "HERE: " + titlekey + " : " + i
newMap[upTitleKey][i] = TITLE_MAP[val.strip().upper()]
i += 1
STORY_MAP[TITLE_MAP[upTitleKey]] = newMap[upTitleKey]
PASSAGES[TITLE_MAP[upTitleKey]] = newPassages[upTitleKey]
PASSAGES[TITLE_MAP[upTitleKey]]['key'] = TITLE_MAP[upTitleKey]
# Create array for all incoming links on a passage.
for key in PASSAGES:
psg = PASSAGES[key]
if "cs" in psg and len(psg["cs"]) > 0:
for choice in psg["cs"]:
choice["link"] = TITLE_MAP[choice["link"].strip().upper()]
psgKey = choice["link"].strip().upper()
if "ik" not in PASSAGES[psgKey]:
PASSAGES[psgKey]["ik"] = [""]
PASSAGES[psgKey]["ik"].append(psg["key"])
if __name__ == '__main__':
#global _UPDATE
#global _FORCE
main()
|
import numpy as np
from numpy.linalg import inv
import matplotlib.pyplot as plt
from matplotlib import animation
from matplotlib import patches
# import pylab
import time
import math
class KalmanFilter:
"""
Class to keep track of the estimate of the robots current state using the
Kalman Filter
"""
def __init__(self, markers):
"""
Initialize all necessary components for Kalman Filter, using the
markers (AprilTags) as the map
Input:
markers - an N by 4 array loaded from the parameters, with each element
consisting of (x,y,theta,id) where x,y gives the 2D position of a
marker/AprilTag, theta gives its orientation, and id gives its
unique id to identify which one you are seeing at any given
moment
"""
self.markers = markers
self.last_time = 0.0 # None # Used to keep track of time between measurements
self.Q_t = np.eye(2)
self.R_t = np.eye(3)
self.x_t = np.array([[0.345], [0.345], [1.570796]]) # Bad.
self.x_t_prediction = np.array([[0], [0], [0]])
self.P_t = 1000 * np.eye(3)
def prediction(self, v, imu_meas):
"""
Performs the prediction step on the state x_t and covariance P_t
Inputs:
v - a number representing in m/s the commanded speed of the robot
imu_meas - a 5 by 1 numpy array consisting of the values
(acc_x,acc_y,acc_z,omega,time), with the fourth of the values giving
the gyroscope measurement for angular velocity (which you should
use as ground truth) and time giving the current timestamp. Ignore
the first three values (they are for the linear acceleration which
we don't use)
Outputs: a tuple with two elements
predicted_state - a 3 by 1 numpy array of the prediction of the state
predicted_covariance - a 3 by 3 numpy array of the prediction of the
covariance
"""
# todo: change v[2] to omega from imu and dt from imu time
dt = imu_meas[4, 0] - self.last_time
omega = v[1]
# omega = imu_meas[3, 0]
self.last_time = imu_meas[4, 0]
G = None
N = None
# # G = df/dx
# # N = df/dn
# # Coursera's way
# if omega == 0.0:
# G = np.eye(3) + dt * np.array([[0, 0, -v[0] * np.sin(self.x_t[2, 0])],
# [0, 0, v[0] * np.cos(self.x_t[2, 0])], [0, 0, 0]])
# N = dt * np.array([[-np.sin(self.x_t[2, 0]), 0], [np.cos(self.x_t[2, 0]), 0], [0, 1]])
# self.x_t_prediction = self.x_t + dt * np.array(
# [[v[0] * np.cos(self.x_t[2, 0])], [v[0] * np.sin(self.x_t[2, 0])], [omega]])
# else:
# # Thurn's way
# # print "Predict: \n", self.x_t
# # print "V: \n", v
G = np.eye(3) + np.array([[0, 0, -(v[0] / omega * np.cos(self.x_t[2, 0])) + (v[0] / omega * np.cos(self.x_t[2, 0] + omega * dt))],
[0, 0, -(v[0] / omega * np.sin(self.x_t[2, 0])) + (v[0] / omega * np.sin(self.x_t[2, 0] + omega * dt))],
[0, 0, 0]])
N = np.array([[(-np.sin(self.x_t[2, 0]) + np.sin(self.x_t[2, 0] + omega * dt)) / omega,
(v[0] * (np.sin(self.x_t[2, 0]) - np.sin(self.x_t[2, 0] + omega * dt)) / (omega ** 2)) +
(v[0] * (np.cos(self.x_t[2, 0] + omega * dt) * dt) / omega)],
[(np.cos(self.x_t[2, 0]) - np.cos(self.x_t[2, 0] + omega * dt)) / omega,
(-v[0] * (np.cos(self.x_t[2, 0]) - np.cos(self.x_t[2, 0] + omega * dt)) / (omega ** 2)) +
(v[0] * (np.sin(self.x_t[2, 0] + omega * dt) * dt) / omega)],
[0, dt]]);
self.x_t_prediction = self.x_t + \
np.array([[-(v[0] / omega * np.sin(self.x_t[2, 0])) + (
v[0] / omega * np.sin(self.x_t[2, 0] + omega * dt))],
[(v[0] / omega * np.cos(self.x_t[2, 0])) - (
v[0] / omega * np.cos(self.x_t[2, 0] + omega * dt))],
[omega * dt]])
self.P_t_prediction = (G.dot(self.P_t)).dot(np.transpose(G)) + (N.dot(self.Q_t)).dot(np.transpose(N))
return (self.x_t_prediction, self.P_t_prediction)
def update(self, z_t):
"""
Performs the update step on the state x_t and covariance P_t
Inputs:
z_t - an array of length N with elements that are 4 by 1 numpy arrays.
Each element has the same form as the markers, (x,y,theta,id), with
x,y gives the 2D position of the measurement with respect to the
robot, theta the orientation of the marker with respect to the
robot, and the unique id of the marker, which you can find the
corresponding marker from your map
Outputs:
predicted_state - a 3 by 1 numpy array of the updated state
predicted_covariance - a 3 by 3 numpy array of the updated covariance
"""
H = np.eye(3)
# print "PT PREDICT: ", self.P_t_prediction
K = (self.P_t_prediction.dot(np.transpose(H))).dot(
inv((H.dot(self.P_t_prediction)).dot(np.transpose(H)) + self.R_t))
# print "Z_T: ", z_t, type(z_t)
if z_t != None and z_t != []:
for tag in z_t:
# retrieve pose of the tag in world frame from the map(markers)
tag_w_pose = self.tag_pos(tag[3])
# pose of the tag as measured from the robot
tag_r_pose = tag[:3]
# pose of the robot in the world frame
robot_pose = self.robot_pos(tag_w_pose, tag_r_pose)
self.x_t = self.x_t_prediction + K.dot(robot_pose - self.x_t_prediction)
else:
self.x_t = self.x_t_prediction
self.P_t = self.P_t_prediction - (K.dot(H)).dot(self.P_t_prediction)
return self.x_t, self.P_t
def robot_pos(self, w_pos, r_pos):
H_W = np.array([[math.cos(w_pos[2]), -math.sin(w_pos[2]), w_pos[0]],
[math.sin(w_pos[2]), math.cos(w_pos[2]), w_pos[1]],
[0, 0, 1]])
H_R = np.array([[math.cos(r_pos[2]), -math.sin(r_pos[2]), r_pos[0]],
[math.sin(r_pos[2]), math.cos(r_pos[2]), r_pos[1]],
[0, 0, 1]])
w_r = H_W.dot(inv(H_R))
robot_pose = np.array([[w_r[0, 2]], [w_r[1, 2]], [math.atan2(w_r[1, 0], w_r[0, 0])]])
return robot_pose
def tag_pos(self, marker_id):
for i in range(len(self.markers)):
marker_i = np.copy(self.markers[i])
if marker_i[3] == marker_id:
return marker_i[0:3]
return None
def step_filter(self, v, imu_meas, z_t):
"""
Perform step in filter, called every iteration (on robot, at 60Hz)
Inputs:
v, imu_meas - descriptions in prediction. Will be None value if
values are not available
z_t - description in update. Will be None value if measurement is not
available
Outputs:
x_t - current estimate of the state
"""
if imu_meas != None and imu_meas.shape == (5, 1):
if self.last_time == None:
self.last_time = imu_meas[4, 0]
else:
self.prediction(v, imu_meas)
self.update(z_t)
return self.x_t
|
"""
This script allows the user to find out actual full names of GitHub repositories
to avoid duplicated projects in a dataset.
The script makes requests to GitHub API, saves unique full names of repositories
and optionally saves all JSON responses containing repositories metadata.
It accepts
* path to csv file -- dataset, f.e. downloaded from https://seart-ghs.si.usi.ch/
* path to output directory
* optional parameter to save metadata
* index to start from (default 0)
"""
import os
import pandas as pd
import argparse
import logging
from typing import Set, TextIO, Tuple
from data_collection.data_collection_utils import save_repo_json, get_github_token, create_github_session
from utils import create_directory, Extensions
def main():
logging.basicConfig(level=logging.DEBUG)
args = parse_args()
jsons_dir_path, results_path = create_output_paths(args.output, args.save_metadata)
dataset = pd.read_csv(args.csv_path)
token = get_github_token()
headers = {'Authorization': f'token {token}'}
session = create_github_session()
with open(results_path, 'a') as results_fout:
if args.start_from == 0:
results_fout.truncate(0)
results_fout.write("full_name\n")
unique_names = set()
for index, project in enumerate(dataset.name[args.start_from:]):
username, project_name = project.split('/')
query_url = f"https://api.github.com/repos/{username}/{project_name}"
r = session.get(query_url, headers=headers)
response_json = r.json()
if r.status_code != 200:
if response_json["message"] == "Moved Permanently": # 301
logging.info(f"Repository {username}#{project_name} moved permanently, redirecting")
r = session.get(r.url, headers=headers)
response_json = r.json()
else:
logging.info(
f"Request failed with status code: {r.status_code}. Message: {response_json['message']}",
)
continue
save_full_name(response_json["full_name"], unique_names, results_fout)
if args.save_metadata:
save_repo_json(response_json, jsons_dir_path, f"{username}#{project_name}.{Extensions.JSON}")
logging.info(f"Last processed project: {args.start_from + index}")
def create_output_paths(output_directory: str, save_metadata: bool = False) -> Tuple[str, str]:
create_directory(output_directory)
jsons_dir_path = os.path.join(output_directory, "jsons/")
if save_metadata:
create_directory(jsons_dir_path)
results_path = os.path.join(output_directory, "results.csv")
return jsons_dir_path, results_path
def save_full_name(full_name: str, unique_names: Set[str], file_to_write: TextIO):
if full_name not in unique_names:
file_to_write.write(f"{full_name}\n")
unique_names.add(full_name)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("csv_path", metavar="csv-path", help="Path to csv file with github repositories data")
parser.add_argument("output", help="Output directory")
parser.add_argument('--save-metadata', help="Enable saving jsons containing project metadata", action='store_true')
parser.add_argument("--start-from", help="Index of the project to start from", nargs='?', default=0, type=int)
return parser.parse_args()
if __name__ == "__main__":
main()
|
import numpy as np
import tensorflow as tf
import tempfile
import urllib
import os
import math
import pandas as pd
import argparse
import sys
import csv
DIR_SUMMARY = './summary'
DIR_MODEL = './model'
class ML:
def __init__(self, learning_rate=0.05, epochs=10, batch_size=100,
data_path='./data/test_cases.csv', test_train_p=0.1, model_name = "ls_s2_1"):
self.learning_rate = learning_rate
self.epochs = epochs
self.batch_size = batch_size
self.data_path = data_path
self.model_name = model_name
self.g1_dfs, self.g2_dfs, self.label_dfs = self.data_process(data_path, test_train_p)
def to_dtype(self, df, dtype):
for i in xrange(df.shape[1]):
df[i] = df[i].astype(dtype=dtype)
return df
def data_process(self, path, test_train_p=0.1):
print('loading file: ' + path)
df = pd.read_csv(path, header=0)
self.batch_number = int(df.shape[0] * (1 - test_train_p) / self.batch_size + 1)
# g1_df = self.to_dtype(df['g1'].str.split(', ', expand=True), np.float32)
# g2_df = self.to_dtype(df['g2'].str.split(', ', expand=True), np.float32)
# self.tupe_length = g1_df.shape[1]
# label_df = self.to_dtype(pd.get_dummies(df['bis']), np.float32)
g1_df = df['g1'].str.split(', ', expand=True)
g2_df = df['g2'].str.split(', ', expand=True)
self.tupe_length = g1_df.shape[1]
label_df = pd.get_dummies(df['bis'])
test_g1 = g1_df.sample(frac=test_train_p)
train_g1 = np.array_split(g1_df.drop(test_g1.index), self.batch_number)
test_g2 = g2_df.loc[test_g1.index]
train_g2 = np.array_split(g2_df.drop(test_g1.index), self.batch_number)
test_label = label_df.loc[test_g1.index]
train_label = np.array_split(label_df.drop(test_g1.index), self.batch_number)
return [train_g1, test_g1], [train_g2, test_g2], [train_label, test_label]
def clean_old_record(self):
for dir in [DIR_MODEL + '/' + self.model_name ,
DIR_SUMMARY + '/' + self.model_name,
DIR_SUMMARY + '/' + self.model_name +'_test' ]:
if tf.gfile.Exists(dir):
tf.gfile.DeleteRecursively(dir)
tf.gfile.MakeDirs(dir)
def fc(self, continue_train=False):
kernel_initializer = tf.initializers.glorot_normal()
tf.reset_default_graph()
small_layer_number = int(math.log(self.tupe_length) * 5)
# print small_layer_number, self.tupe_length
with tf.name_scope('input'):
g1 = tf.placeholder(tf.float32, [None, self.tupe_length])
g2 = tf.placeholder(tf.float32, [None, self.tupe_length])
y = tf.placeholder(tf.float32, [None, 2])
with tf.name_scope('g1_p'):
with tf.variable_scope('graph_pross'):
g1_dence1 = tf.layers.dense(g1, self.tupe_length, activation=tf.nn.relu,
kernel_initializer=kernel_initializer,
bias_initializer=tf.random_normal_initializer(),
name='dence1')
g1_s_dence1 = tf.layers.dense(g1_dence1, small_layer_number, activation=tf.nn.relu,
kernel_initializer=kernel_initializer,
bias_initializer=tf.random_normal_initializer(),
name='s_dence1')
with tf.name_scope('g2_p'):
with tf.variable_scope('graph_pross', reuse=True):
g2_dence1 = tf.layers.dense(g2, self.tupe_length, activation=tf.nn.relu,
name='dence1',
reuse=True)
g2_s_dence1 = tf.layers.dense(g2_dence1, small_layer_number, activation=tf.nn.relu,
name='s_dence1',
reuse=True)
with tf.name_scope('merge'):
two_graphs = tf.concat([g1_s_dence1, g2_s_dence1], 1)
merge_layer = tf.layers.dense(two_graphs, small_layer_number, activation=tf.nn.relu,
kernel_initializer=kernel_initializer,
bias_initializer=tf.random_normal_initializer())
with tf.name_scope('logits'):
logits = tf.layers.dense(merge_layer, 2, activation=tf.identity,
kernel_initializer=kernel_initializer,
bias_initializer=tf.random_normal_initializer())
with tf.name_scope('loss'):
loss = tf.losses.softmax_cross_entropy(y, logits)
# loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=logits,labels=y))
tf.summary.scalar('loss', loss)
global_step = tf.Variable(0, trainable=False, name='global_step')
with tf.name_scope('train'):
train = tf.train.AdadeltaOptimizer(learning_rate=self.learning_rate).minimize(loss=loss, global_step=global_step)
acc_metric, acc_metric_update = tf.metrics.accuracy(tf.argmax(logits, 1), tf.argmax(y, 1), name='metric_acc')
pre_metric, pre_metric_update = tf.metrics.precision(tf.argmax(logits, 1), tf.argmax(y, 1), name='metric_pre')
recall_metric, recall_metric_update = tf.metrics.recall(tf.argmax(logits, 1), tf.argmax(y, 1), name='metric_recall')
tf.summary.scalar('accuracy', acc_metric_update)
tf.summary.scalar('precision', pre_metric_update)
tf.summary.scalar('recall', recall_metric_update)
metric_acc_var = tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES, scope="metric_acc")
acc_initializer = tf.variables_initializer(var_list=metric_acc_var)
metric_pre_var = tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES, scope="metric_pre")
pre_initializer = tf.variables_initializer(var_list=metric_pre_var)
metric_recall_var = tf.get_collection(tf.GraphKeys.LOCAL_VARIABLES, scope="metric_recall")
recall_initializer = tf.variables_initializer(var_list=metric_recall_var)
merged_summary = tf.summary.merge_all()
saver = tf.train.Saver()
# if not os.access('./data/', os.R_OK):
# os.mkdir('./data/')
# with open('./data/' + "temp" + '.csv', 'w') as csvfile:
# print('write in path: ' + './data/' + "temp" + '.csv')
# fieldnames = ['acc', 'pre', 'recall','g_step', 'acc_', 'pre_', 'recall_','g_step_']
# writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
# writer.writeheader()
with tf.Session() as sess:
if continue_train:
saver.restore(sess, DIR_MODEL + '/' + self.model_name + '/model.ckpt')
print('continue training, model loaded')
else:
self.clean_old_record()
print('new training, old record cleaned')
# initial
sess.run(tf.global_variables_initializer())
train_writer = tf.summary.FileWriter(DIR_SUMMARY + '/' + self.model_name, sess.graph)
test_writer = tf.summary.FileWriter(DIR_SUMMARY + '/' + self.model_name + '_test')
for epoch in range(self.epochs):
sess.run([acc_initializer, pre_initializer, recall_initializer])
loss_p = None
g_step = None
summary_11, g_step_1= sess.run([merged_summary,global_step],
feed_dict={g1: self.g1_dfs[1],
g2: self.g2_dfs[1],
y: self.label_dfs[1]})
test_writer.add_summary(summary_11, g_step_1)
for i in range(self.batch_number):
_, loss_p, summary, g_step = sess.run([train, loss, merged_summary, global_step],
feed_dict={g1: self.g1_dfs[0][i],
g2: self.g2_dfs[0][i],
y: self.label_dfs[0][i]})
train_writer.add_summary(summary, g_step)
# summary, g_step_1= sess.run([merged_summary,global_step],
# feed_dict={g1: self.g1_dfs[1],
# g2: self.g2_dfs[1],
# y: self.label_dfs[1]})
#
# # summary, g_step_1, acc, pre, recall= sess.run([merged_summary,global_step, acc_metric, pre_metric, recall_metric],
# # feed_dict={g1: self.g1_dfs[1],
# # g2: self.g2_dfs[1],
# # y: self.label_dfs[1]})
# test_writer.add_summary(summary, g_step_1)
#
# #
# # writer.writerow({fieldnames[0]: _1, fieldnames[1]: _2, fieldnames[2]: _3, fieldnames[3]: g_step,
# # fieldnames[4]: acc, fieldnames[5]: pre, fieldnames[6]: recall, fieldnames[7]: g_step_1})
if epoch % 10 == 0:
# summary,acc = sess.run([merged_summary ,acc_metric])
acc, pre, recall = sess.run([acc_metric, pre_metric, recall_metric])
log_str = "Epoch %d \t G_step %d \t Loss=%f \t Accuracy=%f \t Precision=%f \t Recall=%f "
print(log_str % (epoch, g_step, loss_p, acc, pre, recall))
saver.save(sess, DIR_MODEL + '/' + self.model_name + '/model.ckpt')
train_writer.close()
test_writer.close()
if __name__ == "__main__":
os.chdir(os.path.join(os.path.dirname(__file__), os.path.pardir))
# a = ML(epochs=10)
# a.fc()
parser = argparse.ArgumentParser()
parser.add_argument("-e", "--epoch", type=int, default=100, dest="epoch",
help="Number of training epochs")
parser.add_argument("-l", "--learning_rate", type=float, default=0.2, dest="learning_rate",
help="Initial learning rate")
parser.add_argument("-b", "--batch_size", type=int, default=100, dest="batch_size",
help="Number of data for one batch")
parser.add_argument("-p", "--data_path", default="./data/test_cases.csv", dest="data_path",
help="Path to input data")
parser.add_argument("-r", "--test_train_rate", type=float, default=0.5, dest="test_train_rate",
help="The rate of test cases and train cases")
parser.add_argument("-c", "--continue", type=bool, default=False, dest="continue_train",
help="Continue last training")
parser.add_argument("-n", "--model_name", type=str, default="ls_s2", dest="model_name",
help="The name of the model")
args = parser.parse_args()
# print args
print("=============== star training ===============")
trainer = ML(learning_rate=args.learning_rate,
epochs=args.epoch,
batch_size=args.batch_size,
data_path=args.data_path,
model_name=args.model_name)
trainer.fc(args.continue_train)
# self, learning_rate=0.05, epochs=10, batch_size=100, data_path = 'random_pairs.csv',test_train_p=0.1
|
# Copyright 2021-2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import (
Any,
Hashable,
Iterable,
Iterator,
MutableSet,
Optional,
TypeVar,
)
T = TypeVar("T", bound="Hashable")
class OrderedSet(MutableSet[T]):
"""
A set() variant whose iterator returns elements in insertion order.
The implementation of this class piggybacks off of the corresponding
iteration order guarantee for dict(), starting with Python 3.7. This is
useful for guaranteeing symmetric execution of algorithms on different
shards in a replicated context.
"""
def __init__(self, copy_from: Optional[Iterable[T]] = None) -> None:
self._dict: dict[T, None] = {}
if copy_from is not None:
for obj in copy_from:
self.add(obj)
def add(self, obj: T) -> None:
self._dict[obj] = None
def update(self, other: Iterable[T]) -> None:
for obj in other:
self.add(obj)
def discard(self, obj: T) -> None:
self._dict.pop(obj, None)
def __len__(self) -> int:
return len(self._dict)
def __contains__(self, obj: object) -> bool:
return obj in self._dict
def __iter__(self) -> Iterator[T]:
return iter(self._dict)
def remove_all(self, other: OrderedSet[T]) -> OrderedSet[T]:
return OrderedSet(obj for obj in self if obj not in other)
def cast_tuple(value: Any) -> tuple[Any, ...]:
return value if isinstance(value, tuple) else tuple(value)
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import mail
from django.template import Context, Template
from django.test.client import RequestFactory
from djblets.avatars.tests import DummyAvatarService
from djblets.extensions.extension import ExtensionInfo
from djblets.extensions.manager import ExtensionManager
from djblets.extensions.models import RegisteredExtension
from djblets.features.testing import override_feature_check
from djblets.mail.utils import build_email_address_for_user
from djblets.registries.errors import AlreadyRegisteredError, RegistrationError
from kgb import SpyAgency
from mock import Mock
from reviewboard.admin.widgets import (BaseAdminWidget,
Widget,
admin_widgets_registry)
from reviewboard.avatars import avatar_services
from reviewboard.deprecation import RemovedInReviewBoard50Warning
from reviewboard.extensions.base import Extension
from reviewboard.extensions.hooks import (AdminWidgetHook,
APIExtraDataAccessHook,
AvatarServiceHook,
BaseReviewRequestActionHook,
CommentDetailDisplayHook,
DiffViewerActionHook,
EmailHook,
HeaderActionHook,
HeaderDropdownActionHook,
HostingServiceHook,
NavigationBarHook,
ReviewPublishedEmailHook,
ReviewReplyPublishedEmailHook,
ReviewRequestActionHook,
ReviewRequestApprovalHook,
ReviewRequestClosedEmailHook,
ReviewRequestDropdownActionHook,
ReviewRequestFieldSetsHook,
ReviewRequestPublishedEmailHook,
UserInfoboxHook,
WebAPICapabilitiesHook)
from reviewboard.hostingsvcs.service import (get_hosting_service,
HostingService)
from reviewboard.reviews.actions import (BaseReviewRequestAction,
BaseReviewRequestMenuAction,
clear_all_actions)
from reviewboard.scmtools.errors import FileNotFoundError
from reviewboard.reviews.features import ClassBasedActionsFeature
from reviewboard.reviews.models.review_request import ReviewRequest
from reviewboard.reviews.fields import (BaseReviewRequestField,
BaseReviewRequestFieldSet)
from reviewboard.reviews.signals import (review_request_published,
review_published, reply_published,
review_request_closed)
from reviewboard.site.urlresolvers import local_site_reverse
from reviewboard.testing.testcase import TestCase
from reviewboard.webapi.base import ExtraDataAccessLevel, WebAPIResource
from reviewboard.webapi.tests.base import BaseWebAPITestCase
from reviewboard.webapi.tests.mimetypes import root_item_mimetype
from reviewboard.webapi.tests.urls import get_root_url
class ExtensionManagerMixin(object):
"""Mixin used to setup a default ExtensionManager for tests."""
def setUp(self):
super(ExtensionManagerMixin, self).setUp()
self.manager = ExtensionManager('')
class DummyExtension(Extension):
registration = RegisteredExtension()
class ActionHookTests(ExtensionManagerMixin, TestCase):
"""Tests the action hooks in reviewboard.extensions.hooks."""
class _TestAction(BaseReviewRequestAction):
action_id = 'test-action'
label = 'Test Action'
class _TestMenuAction(BaseReviewRequestMenuAction):
action_id = 'test-menu-instance-action'
label = 'Menu Instance'
def setUp(self):
super(ActionHookTests, self).setUp()
self.extension = DummyExtension(extension_manager=self.manager)
def tearDown(self):
super(ActionHookTests, self).tearDown()
self.extension.shutdown()
clear_all_actions()
def test_review_request_action_hook(self):
"""Testing ReviewRequestActionHook renders on a review request page but
not on a file attachment or a diff viewer page
"""
with override_feature_check(ClassBasedActionsFeature.feature_id,
enabled=True):
self._test_base_review_request_action_hook(
'review-request-detail', ReviewRequestActionHook, True)
self._test_base_review_request_action_hook(
'file-attachment', ReviewRequestActionHook, False)
self._test_base_review_request_action_hook(
'view-diff', ReviewRequestActionHook, False)
def test_diffviewer_action_hook(self):
"""Testing DiffViewerActionHook renders on a diff viewer page but not
on a review request page or a file attachment page
"""
with override_feature_check(ClassBasedActionsFeature.feature_id,
enabled=True):
self._test_base_review_request_action_hook(
'review-request-detail', DiffViewerActionHook, False)
self._test_base_review_request_action_hook(
'file-attachment', DiffViewerActionHook, False)
self._test_base_review_request_action_hook(
'view-diff', DiffViewerActionHook, True)
def test_review_request_dropdown_action_hook(self):
"""Testing ReviewRequestDropdownActionHook renders on a review request
page but not on a file attachment or a diff viewer page
"""
with override_feature_check(ClassBasedActionsFeature.feature_id,
enabled=True):
self._test_review_request_dropdown_action_hook(
'review-request-detail', ReviewRequestDropdownActionHook, True)
self._test_review_request_dropdown_action_hook(
'file-attachment', ReviewRequestDropdownActionHook, False)
self._test_review_request_dropdown_action_hook(
'view-diff', ReviewRequestDropdownActionHook, False)
def test_action_hook_init_raises_key_error(self):
"""Testing that action hook __init__ raises a KeyError"""
missing_url_action = {
'id': 'missing-url-action',
'label': 'This action dict is missing a mandatory URL key.',
}
missing_key = 'url'
error_message = ('ActionHook-style dicts require a %s key'
% repr(missing_key))
action_hook_classes = [
BaseReviewRequestActionHook,
ReviewRequestActionHook,
DiffViewerActionHook,
]
for hook_cls in action_hook_classes:
with self.assertRaisesMessage(KeyError, error_message):
hook_cls(extension=self.extension, actions=[
missing_url_action,
])
def test_action_hook_init_raises_value_error(self):
"""Testing that BaseReviewRequestActionHook __init__ raises a
ValueError"""
unsupported_type_action = [{
'id': 'unsupported-type-action',
'label': 'This action is a list, which is an unsupported type.',
'url': '#',
}]
error_message = ('Only BaseReviewRequestAction and dict instances are '
'supported')
action_hook_classes = [
BaseReviewRequestActionHook,
ReviewRequestActionHook,
DiffViewerActionHook,
ReviewRequestDropdownActionHook,
]
with override_feature_check(ClassBasedActionsFeature.feature_id,
enabled=True):
for hook_cls in action_hook_classes:
with self.assertRaisesMessage(ValueError, error_message):
hook_cls(extension=self.extension, actions=[
unsupported_type_action,
])
def test_dropdown_action_hook_init_raises_key_error(self):
"""Testing that ReviewRequestDropdownActionHook __init__ raises a
KeyError"""
missing_items_menu_action = {
'id': 'missing-items-menu-action',
'label': 'This menu action dict is missing a mandatory items key.',
}
missing_key = 'items'
error_message = ('ReviewRequestDropdownActionHook-style dicts require '
'a %s key' % repr(missing_key))
with self.assertRaisesMessage(KeyError, error_message):
ReviewRequestDropdownActionHook(extension=self.extension, actions=[
missing_items_menu_action,
])
def _test_base_review_request_action_hook(self, url_name, hook_cls,
should_render):
"""Test if the action hook renders or not at the given URL.
Args:
url_name (unicode):
The name of the URL where each action is to be rendered.
hook_cls (class):
The class of the action hook to be tested.
should_render (bool):
The expected rendering behaviour.
"""
hook = hook_cls(extension=self.extension, actions=[
{
'id': 'with-id-action',
'label': 'Yes ID',
'url': 'with-id-url',
},
self._TestAction(),
{
'label': 'No ID',
'url': 'without-id-url',
},
])
try:
context = self._get_context(url_name=url_name)
entries = hook.get_actions(context)
self.assertEqual(len(entries), 3)
self.assertEqual(entries[0].action_id, 'with-id-action')
self.assertEqual(entries[1].action_id, 'test-action')
self.assertEqual(entries[2].action_id, 'no-id-dict-action')
template = Template(
'{% load reviewtags %}'
'{% review_request_actions %}'
)
content = template.render(context)
self.assertNotIn('action', context)
self.assertEqual(should_render, 'href="with-id-url"' in content)
self.assertIn('>Test Action<', content)
self.assertEqual(should_render,
'id="no-id-dict-action"' in content)
finally:
hook.disable_hook()
content = template.render(context)
self.assertNotIn('href="with-id-url"', content)
self.assertNotIn('>Test Action<', content)
self.assertNotIn('id="no-id-dict-action"', content)
def _test_review_request_dropdown_action_hook(self, url_name, hook_cls,
should_render):
"""Test if the dropdown action hook renders or not at the given URL.
Args:
url_name (unicode):
The name of the URL where each action is to be rendered.
hook_cls (class):
The class of the dropdown action hook to be tested.
should_render (bool):
The expected rendering behaviour.
"""
hook = hook_cls(extension=self.extension, actions=[
self._TestMenuAction([
self._TestAction(),
]),
{
'id': 'test-menu-dict-action',
'label': 'Menu Dict',
'items': [
{
'id': 'with-id-action',
'label': 'Yes ID',
'url': 'with-id-url',
},
{
'label': 'No ID',
'url': 'without-id-url',
},
]
},
])
try:
context = self._get_context(url_name=url_name)
entries = hook.get_actions(context)
self.assertEqual(len(entries), 2)
self.assertEqual(entries[0].action_id, 'test-menu-instance-action')
self.assertEqual(entries[1].action_id, 'test-menu-dict-action')
dropdown_icon_html = \
'<span class="rb-icon rb-icon-dropdown-arrow"></span>'
template = Template(
'{% load reviewtags %}'
'{% review_request_actions %}'
)
content = template.render(context)
self.assertNotIn('action', context)
self.assertInHTML('<a href="#" id="test-action">Test Action</a>',
content)
self.assertInHTML(
('<a class="menu-title" href="#" id="test-menu-instance-action">'
'Menu Instance %s</a>'
% dropdown_icon_html),
content)
for s in (('id="test-menu-dict-action"',
'href="with-id-url"',
'id="no-id-dict-action"')):
if should_render:
self.assertIn(s, content)
else:
self.assertNotIn(s, content)
if should_render:
self.assertInHTML(
('<a class="menu-title" href="#" id="test-menu-dict-action">'
'Menu Dict %s</a>'
% dropdown_icon_html),
content)
else:
self.assertNotIn('Menu Dict', content)
finally:
hook.disable_hook()
content = template.render(context)
self.assertNotIn('Test Action', content)
self.assertNotIn('Menu Instance', content)
self.assertNotIn('id="test-menu-dict-action"', content)
self.assertNotIn('href="with-id-url"', content)
self.assertNotIn('id="no-id-dict-action"', content)
def _test_action_hook(self, template_tag_name, hook_cls):
action = {
'label': 'Test Action',
'id': 'test-action',
'image': 'test-image',
'image_width': 42,
'image_height': 42,
'url': 'foo-url',
}
hook = hook_cls(extension=self.extension, actions=[action])
context = Context({})
entries = hook.get_actions(context)
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0], action)
t = Template(
"{% load rb_extensions %}"
"{% " + template_tag_name + " %}")
self.assertEqual(t.render(context).strip(),
self._build_action_template(action))
def _test_dropdown_action_hook(self, template_tag_name, hook_cls):
action = {
'id': 'test-menu',
'label': 'Test Menu',
'items': [
{
'id': 'test-action',
'label': 'Test Action',
'url': 'foo-url',
'image': 'test-image',
'image_width': 42,
'image_height': 42
}
]
}
hook = hook_cls(extension=self.extension,
actions=[action])
context = Context({})
entries = hook.get_actions(context)
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0], action)
t = Template(
"{% load rb_extensions %}"
"{% " + template_tag_name + " %}")
content = t.render(context).strip()
self.assertIn(('id="%s"' % action['id']), content)
self.assertInHTML(
('<a href="#" id="test-menu">%s '
'<span class="rb-icon rb-icon-dropdown-arrow"></span></a>'
% action['label']),
content)
self.assertInHTML(self._build_action_template(action['items'][0]),
content)
def _build_action_template(self, action):
return ('<li><a id="%(id)s" href="%(url)s">'
'<img src="%(image)s" width="%(image_width)s" '
'height="%(image_height)s" border="0" alt="" />'
'%(label)s</a></li>' % action)
def test_header_hooks(self):
"""Testing HeaderActionHook"""
self._test_action_hook('header_action_hooks', HeaderActionHook)
def test_header_dropdown_action_hook(self):
"""Testing HeaderDropdownActionHook"""
self._test_dropdown_action_hook('header_dropdown_action_hooks',
HeaderDropdownActionHook)
def _get_context(self, user_pk='123', is_authenticated=True,
url_name='review-request-detail', local_site_name=None,
status=ReviewRequest.PENDING_REVIEW, submitter_id='456',
is_public=True, display_id='789', has_diffs=True,
can_change_status=True, can_edit_reviewrequest=True,
delete_reviewrequest=True):
request = Mock()
request.resolver_match = Mock()
request.resolver_match.url_name = url_name
request.user = Mock()
request.user.pk = user_pk
request.user.is_authenticated.return_value = is_authenticated
request._local_site_name = local_site_name
review_request = Mock()
review_request.status = status
review_request.submitter_id = submitter_id
review_request.public = is_public
review_request.display_id = display_id
if not has_diffs:
review_request.get_draft.return_value = None
review_request.get_diffsets.return_value = None
context = Context({
'request': request,
'review_request': review_request,
'perms': {
'reviews': {
'can_change_status': can_change_status,
'can_edit_reviewrequest': can_edit_reviewrequest,
'delete_reviewrequest': delete_reviewrequest,
},
},
})
return context
class NavigationBarHookTests(TestCase):
"""Tests the navigation bar hooks."""
def setUp(self):
super(NavigationBarHookTests, self).setUp()
manager = ExtensionManager('')
self.extension = DummyExtension(extension_manager=manager)
def tearDown(self):
super(NavigationBarHookTests, self).tearDown()
self.extension.shutdown()
def test_navigation_bar_hooks(self):
"""Testing navigation entry extension hooks"""
entry = {
'label': 'Test Nav Entry',
'url': 'foo-url',
}
hook = NavigationBarHook(extension=self.extension, entries=[entry])
request = self.client.request()
request.user = User(username='text')
context = Context({
'request': request,
'local_site_name': 'test-site',
})
entries = hook.get_entries(context)
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0], entry)
t = Template(
'{% load rb_extensions %}'
'{% navigation_bar_hooks %}')
self.assertEqual(t.render(context).strip(),
'<li><a href="%(url)s">%(label)s</a></li>' % entry)
def test_navigation_bar_hooks_with_is_enabled_for_true(self):
"""Testing NavigationBarHook.is_enabled_for and returns true"""
def is_enabled_for(**kwargs):
self.assertEqual(kwargs['user'], request.user)
self.assertEqual(kwargs['request'], request)
self.assertEqual(kwargs['local_site_name'], 'test-site')
return True
entry = {
'label': 'Test Nav Entry',
'url': 'foo-url',
}
hook = NavigationBarHook(extension=self.extension, entries=[entry],
is_enabled_for=is_enabled_for)
request = self.client.request()
request.user = User(username='text')
context = Context({
'request': request,
'local_site_name': 'test-site',
})
entries = hook.get_entries(context)
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0], entry)
t = Template(
'{% load rb_extensions %}'
'{% navigation_bar_hooks %}')
self.assertEqual(t.render(context).strip(),
'<li><a href="%(url)s">%(label)s</a></li>' % entry)
def test_navigation_bar_hooks_with_is_enabled_for_false(self):
"""Testing NavigationBarHook.is_enabled_for and returns false"""
def is_enabled_for(**kwargs):
self.assertEqual(kwargs['user'], request.user)
self.assertEqual(kwargs['request'], request)
self.assertEqual(kwargs['local_site_name'], 'test-site')
return False
entry = {
'label': 'Test Nav Entry',
'url': 'foo-url',
}
hook = NavigationBarHook(extension=self.extension, entries=[entry],
is_enabled_for=is_enabled_for)
request = self.client.request()
request.user = User(username='text')
context = Context({
'request': request,
'local_site_name': 'test-site',
})
entries = hook.get_entries(context)
self.assertEqual(len(entries), 0)
t = Template(
'{% load rb_extensions %}'
'{% navigation_bar_hooks %}')
self.assertEqual(t.render(context).strip(), '')
def test_navigation_bar_hooks_with_url_name(self):
"Testing navigation entry extension hooks with url names"""
entry = {
'label': 'Test Nav Entry',
'url_name': 'dashboard',
}
hook = NavigationBarHook(extension=self.extension, entries=[entry])
request = self.client.request()
request.user = User(username='text')
context = Context({
'request': request,
'local_site_name': 'test-site',
})
entries = hook.get_entries(context)
self.assertEqual(len(entries), 1)
self.assertEqual(entries[0], entry)
t = Template(
'{% load rb_extensions %}'
'{% navigation_bar_hooks %}')
self.assertEqual(
t.render(context).strip(),
'<li><a href="%(url)s">%(label)s</a></li>' % {
'label': entry['label'],
'url': '/dashboard/',
})
class TestService(HostingService):
hosting_service_id = 'test-service'
name = 'Test Service'
def get_file(self, repository, path, revision, *args, **kwargs):
"""Return the specified file from the repository.
If the given file path is ``/invalid-path``, the file will be assumed
to not exist and
:py:exc:`reviewboard.scmtools.errors.FileNotFoundError` will be raised.
Args:
repository (reviewboard.scmtools.models.Repository):
The repository the file belongs to.
path (unicode):
The file path.
revision (unicode):
The file revision.
*args (tuple):
Additional positional arguments.
**kwargs (dict):
Additional keyword arguments.
Returns:
unicode: The file data.
Raises:
reviewboard.scmtools.errors.FileNotFoundError:
Raised if the file does not exist.
"""
if path == '/invalid-path':
raise FileNotFoundError(path, revision)
return super(TestService, self).get_file(repository, path, revision,
*args, **kwargs)
def get_file_exists(self, repository, path, revision, *args, **kwargs):
"""Return the specified file from the repository.
If the given file path is ``/invalid-path``, the file will
be assumed to not exist.
Args:
repository (reviewboard.scmtools.models.Repository):
The repository the file belongs to.
path (unicode):
The file path.
revision (unicode):
The file revision.
*args (tuple):
Additional positional arguments.
**kwargs (dict):
Additional keyword arguments.
Returns:
bool: Whether or not the file exists.
"""
if path == '/invalid-path':
return False
return super(TestService, self).get_file_exists(
repository, path, revision, *args, **kwargs)
class HostingServiceHookTests(ExtensionManagerMixin, TestCase):
"""Testing HostingServiceHook."""
def setUp(self):
super(HostingServiceHookTests, self).setUp()
self.extension = DummyExtension(extension_manager=self.manager)
def tearDown(self):
super(HostingServiceHookTests, self).tearDown()
self.extension.shutdown()
def test_register(self):
"""Testing HostingServiceHook initializing"""
HostingServiceHook(self.extension, TestService)
self.assertEqual(get_hosting_service('test-service'),
TestService)
def test_register_without_hosting_service_id(self):
"""Testing HostingServiceHook initializing without hosting_service_id
"""
class TestServiceWithoutID(TestService):
hosting_service_id = None
message = 'TestServiceWithoutID.hosting_service_id must be set.'
with self.assertRaisesMessage(ValueError, message):
HostingServiceHook(self.extension, TestServiceWithoutID)
def test_unregister(self):
"""Testing HostingServiceHook uninitializing"""
hook = HostingServiceHook(self.extension, TestService)
hook.disable_hook()
self.assertIsNone(get_hosting_service('test-service'))
class MyLegacyAdminWidget(Widget):
widget_id = 'legacy-test-widget'
title = 'Legacy Testing Widget'
class MyAdminWidget(BaseAdminWidget):
widget_id = 'test-widget'
name = 'Testing Widget'
class AdminWidgetHookTests(ExtensionManagerMixin, TestCase):
"""Testing AdminWidgetHook."""
def setUp(self):
super(AdminWidgetHookTests, self).setUp()
self.extension = DummyExtension(extension_manager=self.manager)
def tearDown(self):
super(AdminWidgetHookTests, self).tearDown()
self.extension.shutdown()
def test_initialize(self):
"""Testing AdminWidgetHook.initialize"""
AdminWidgetHook(self.extension, MyAdminWidget)
self.assertIn(MyAdminWidget, admin_widgets_registry)
def test_initialize_with_legacy_widget(self):
"""Testing AdminWidgetHook.initialize with legacy Widget subclass"""
message = (
"AdminWidgetHook's support for legacy "
"reviewboard.admin.widgets.Widget subclasses is deprecated "
"and will be removed in Review Board 5.0. Rewrite %r "
"to subclass the modern "
"reviewboard.admin.widgets.baseAdminWidget instead. This "
"will require a full rewrite of the widget's functionality."
% MyLegacyAdminWidget
)
with self.assertWarns(RemovedInReviewBoard50Warning, message):
AdminWidgetHook(self.extension, MyLegacyAdminWidget)
self.assertIn(MyLegacyAdminWidget, admin_widgets_registry)
def test_shutdown(self):
"""Testing AdminWidgetHook.shutdown"""
hook = AdminWidgetHook(self.extension, MyAdminWidget)
hook.disable_hook()
self.assertNotIn(MyAdminWidget, admin_widgets_registry)
class WebAPICapabilitiesExtension(Extension):
registration = RegisteredExtension()
metadata = {
'Name': 'Web API Capabilities Extension',
}
id = 'WebAPICapabilitiesExtension'
def __init__(self, *args, **kwargs):
super(WebAPICapabilitiesExtension, self).__init__(*args, **kwargs)
class WebAPICapabilitiesHookTests(ExtensionManagerMixin, BaseWebAPITestCase):
"""Testing WebAPICapabilitiesHook."""
def setUp(self):
super(WebAPICapabilitiesHookTests, self).setUp()
self.extension = WebAPICapabilitiesExtension(
extension_manager=self.manager)
self.url = get_root_url()
def tearDown(self):
super(WebAPICapabilitiesHookTests, self).tearDown()
def test_register(self):
"""Testing WebAPICapabilitiesHook initializing"""
WebAPICapabilitiesHook(
extension=self.extension,
caps={
'sandboxed': True,
'thorough': True,
})
rsp = self.api_get(path=self.url,
expected_mimetype=root_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('capabilities', rsp)
caps = rsp['capabilities']
self.assertIn('WebAPICapabilitiesExtension', caps)
extension_caps = caps[self.extension.id]
self.assertTrue(extension_caps['sandboxed'])
self.assertTrue(extension_caps['thorough'])
self.extension.shutdown()
def test_register_fails_no_id(self):
"""Testing WebAPICapabilitiesHook initializing with ID of None"""
self.extension.id = None
self.assertRaisesMessage(
ValueError,
'The capabilities_id attribute must not be None',
WebAPICapabilitiesHook,
self.extension,
{
'sandboxed': True,
'thorough': True,
})
rsp = self.api_get(path=self.url,
expected_mimetype=root_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('capabilities', rsp)
caps = rsp['capabilities']
self.assertNotIn('WebAPICapabilitiesExtension', caps)
self.assertNotIn(None, caps)
# Note that the hook failed to enable, so there's no need to test
# shutdown().
def test_register_fails_default_capability(self):
"""Testing WebAPICapabilitiesHook initializing with default key"""
self.extension.id = 'diffs'
self.assertRaisesMessage(
KeyError,
'"diffs" is reserved for the default set of capabilities',
WebAPICapabilitiesHook,
self.extension,
{
'base_commit_ids': False,
'moved_files': False,
})
rsp = self.api_get(path=self.url,
expected_mimetype=root_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('capabilities', rsp)
caps = rsp['capabilities']
self.assertIn('diffs', caps)
diffs_caps = caps['diffs']
self.assertTrue(diffs_caps['base_commit_ids'])
self.assertTrue(diffs_caps['moved_files'])
# Note that the hook failed to enable, so there's no need to test
# shutdown().
def test_unregister(self):
"""Testing WebAPICapabilitiesHook uninitializing"""
hook = WebAPICapabilitiesHook(
extension=self.extension,
caps={
'sandboxed': True,
'thorough': True,
})
hook.disable_hook()
rsp = self.api_get(path=self.url,
expected_mimetype=root_item_mimetype)
self.assertEqual(rsp['stat'], 'ok')
self.assertIn('capabilities', rsp)
caps = rsp['capabilities']
self.assertNotIn('WebAPICapabilitiesExtension', caps)
self.extension.shutdown()
class GenericTestResource(WebAPIResource):
name = 'test'
uri_object_key = 'test_id'
extra_data = {}
item_mimetype = 'application/vnd.reviewboard.org.test+json'
fields = {
'extra_data': {
'type': dict,
'description': 'Extra data as part of the test resource. '
'This can be set by the API or extensions.',
},
}
allowed_methods = ('GET', 'PUT')
def get(self, *args, **kwargs):
return 200, {
'test': {
'extra_data': self.serialize_extra_data_field(self)
}
}
def put(self, request, *args, **kwargs):
fields = request.POST.dict()
self.import_extra_data(self, self.extra_data, fields)
return 200, {
'test': self.extra_data
}
def has_access_permissions(self, request, obj, *args, **kwargs):
return True
class APIExtraDataAccessHookTests(ExtensionManagerMixin, SpyAgency,
BaseWebAPITestCase):
"""Testing APIExtraDataAccessHook."""
fixtures = ['test_users']
class EverythingPrivateHook(APIExtraDataAccessHook):
"""Hook which overrides callable to return all fields as private."""
def get_extra_data_state(self, key_path):
self.called = True
return ExtraDataAccessLevel.ACCESS_STATE_PRIVATE
class InvalidCallableHook(APIExtraDataAccessHook):
"""Hook which implements an invalid callable"""
get_extra_data_state = 'not a callable'
def setUp(self):
super(APIExtraDataAccessHookTests, self).setUp()
self.resource_class = GenericTestResource
self.resource = self.resource_class()
class DummyExtension(Extension):
resources = [self.resource]
registration = RegisteredExtension()
self.extension_class = DummyExtension
entry_point = Mock()
entry_point.load = lambda: self.extension_class
entry_point.dist = Mock()
entry_point.dist.project_name = 'TestProjectName'
entry_point.dist.get_metadata_lines = lambda *args: [
'Name: Resource Test Extension',
]
self.manager._entrypoint_iterator = lambda: [entry_point]
self.manager.load()
self.extension = self.manager.enable_extension(self.extension_class.id)
self.registered = True
self.extension_class.info = ExtensionInfo.create_from_entrypoint(
entry_point, self.extension_class)
self.url = self.resource.get_item_url(test_id=1)
self.resource.extra_data = {
'public': 'foo',
'private': 'secret',
'readonly': 'bar',
}
def tearDown(self):
super(APIExtraDataAccessHookTests, self).tearDown()
if self.registered is True:
self.manager.disable_extension(self.extension_class.id)
def test_register(self):
"""Testing APIExtraDataAccessHook registration"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('public',), ExtraDataAccessLevel.ACCESS_STATE_PUBLIC)
])
self.assertNotEqual(set(),
set(self.resource.extra_data_access_callbacks))
def test_register_overridden_hook(self):
"""Testing overridden APIExtraDataAccessHook registration"""
self.EverythingPrivateHook(self.extension, self.resource, [])
self.assertNotEqual(set(),
set(self.resource.extra_data_access_callbacks))
def test_overridden_hook_get(self):
"""Testing overridden APIExtraDataAccessHook get"""
hook = self.EverythingPrivateHook(self.extension, self.resource, [])
rsp = self.api_get(self.url,
expected_mimetype=self.resource.item_mimetype)
# Since the hook registers the callback function on initialization,
# which stores a pointer to the method, we can't use SpyAgency after
# the hook has already been initialized. Since SpyAgency's spy_on
# function requires an instance of a class, we also cannot spy on the
# hook function before initialization. Therefore, as a workaround,
# we're setting a variable in the function to ensure that it is in
# fact being called.
self.assertTrue(hook.called)
self.assertNotIn('public', rsp['test']['extra_data'])
self.assertNotIn('readonly', rsp['test']['extra_data'])
self.assertNotIn('private', rsp['test']['extra_data'])
def test_overridden_hook_put(self):
"""Testing overridden APIExtraDataAccessHook put"""
hook = self.EverythingPrivateHook(self.extension, self.resource, [])
original_value = self.resource.extra_data['readonly']
modified_extra_fields = {
'extra_data.public': 'modified',
}
rsp = self.api_put(self.url, modified_extra_fields,
expected_mimetype=self.resource.item_mimetype)
# Since the hook registers the callback function on initialization,
# which stores a pointer to the method, we can't use SpyAgency after
# the hook has already been initialized. Since SpyAgency's spy_on
# function requires an instance of a class, we also cannot spy on the
# hook function before initialization. Therefore, as a workaround,
# we're setting a variable in the function to ensure that it is in
# fact being called.
self.assertTrue(hook.called)
self.assertEqual(original_value, rsp['test']['readonly'])
def test_register_invalid_hook(self):
"""Testing hook registration with invalid hook"""
self.registered = False
with self.assertRaises(RegistrationError):
self.InvalidCallableHook(self.extension, self.resource, [])
self.assertSetEqual(set(),
set(self.resource.extra_data_access_callbacks))
def test_register_hook_already_registered(self):
"""Testing hook registration with already registered callback"""
hook = APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('public',), ExtraDataAccessLevel.ACCESS_STATE_PUBLIC)
])
with self.assertRaises(AlreadyRegisteredError):
hook.resource.extra_data_access_callbacks.register(
hook.get_extra_data_state)
self.assertNotEqual(set(),
set(self.resource.extra_data_access_callbacks))
def test_public_state_get(self):
"""Testing APIExtraDataAccessHook public state GET"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('public',), ExtraDataAccessLevel.ACCESS_STATE_PUBLIC)
])
rsp = self.api_get(self.url,
expected_mimetype=self.resource.item_mimetype)
self.assertIn('public', rsp['test']['extra_data'])
def test_public_state_put(self):
"""Testing APIExtraDataAccessHook public state PUT"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('public',), ExtraDataAccessLevel.ACCESS_STATE_PUBLIC)
])
modified_extra_fields = {
'extra_data.public': 'modified',
}
rsp = self.api_put(self.url, modified_extra_fields,
expected_mimetype=self.resource.item_mimetype)
self.assertEqual(modified_extra_fields['extra_data.public'],
rsp['test']['public'])
def test_readonly_state_get(self):
"""Testing APIExtraDataAccessHook readonly state get"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('readonly',),
ExtraDataAccessLevel.ACCESS_STATE_PUBLIC_READONLY)
])
rsp = self.api_get(self.url,
expected_mimetype=self.resource.item_mimetype)
self.assertIn('readonly', rsp['test']['extra_data'])
def test_readonly_state_put(self):
"""Testing APIExtraDataAccessHook readonly state put"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('readonly',),
ExtraDataAccessLevel.ACCESS_STATE_PUBLIC_READONLY)
])
original_value = self.resource.extra_data['readonly']
modified_extra_fields = {
'extra_data.readonly': 'modified',
}
rsp = self.api_put(self.url, modified_extra_fields,
expected_mimetype=self.resource.item_mimetype)
self.assertEqual(original_value, rsp['test']['readonly'])
def test_private_state_get(self):
"""Testing APIExtraDataAccessHook private state get"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('private',), ExtraDataAccessLevel.ACCESS_STATE_PRIVATE)
])
rsp = self.api_get(self.url,
expected_mimetype=self.resource.item_mimetype)
self.assertNotIn('private', rsp['test']['extra_data'])
def test_private_state_put(self):
"""Testing APIExtraDataAccessHook private state put"""
APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('private',), ExtraDataAccessLevel.ACCESS_STATE_PRIVATE)
])
original_value = self.resource.extra_data['private']
modified_extra_fields = {
'extra_data.private': 'modified',
}
rsp = self.api_put(self.url, modified_extra_fields,
expected_mimetype=self.resource.item_mimetype)
self.assertEqual(original_value, rsp['test']['private'])
def test_unregister(self):
"""Testing APIExtraDataAccessHook unregistration"""
hook = APIExtraDataAccessHook(
self.extension,
self.resource,
[
(('public',), ExtraDataAccessLevel.ACCESS_STATE_PUBLIC)
])
hook.shutdown()
self.assertSetEqual(set(),
set(self.resource.extra_data_access_callbacks))
class SandboxExtension(Extension):
registration = RegisteredExtension()
metadata = {
'Name': 'Sandbox Extension',
}
id = 'reviewboard.extensions.tests.SandboxExtension'
def __init__(self, *args, **kwargs):
super(SandboxExtension, self).__init__(*args, **kwargs)
class SandboxReviewRequestApprovalTestHook(ReviewRequestApprovalHook):
def is_approved(self, review_request, prev_approved, prev_failure):
raise Exception
class SandboxNavigationBarTestHook(NavigationBarHook):
def get_entries(self, context):
raise Exception
class SandboxDiffViewerActionTestHook(DiffViewerActionHook):
def get_actions(self, context):
raise Exception
class SandboxHeaderActionTestHook(HeaderActionHook):
def get_actions(self, context):
raise Exception
class SandboxHeaderDropdownActionTestHook(HeaderDropdownActionHook):
def get_actions(self, context):
raise Exception
class SandboxReviewRequestActionTestHook(ReviewRequestActionHook):
def get_actions(self, context):
raise Exception
class SandboxReviewRequestDropdownActionTestHook(
ReviewRequestDropdownActionHook):
def get_actions(self, context):
raise Exception
class SandboxCommentDetailDisplayTestHook(CommentDetailDisplayHook):
def render_review_comment_detail(self, comment):
raise Exception
def render_email_comment_detail(self, comment, is_html):
raise Exception
class SandboxBaseReviewRequestTestShouldRenderField(BaseReviewRequestField):
field_id = 'should_render'
def should_render(self, value):
raise Exception
class SandboxBaseReviewRequestTestInitField(BaseReviewRequestField):
field_id = 'init_field'
def __init__(self, review_request_details):
raise Exception
class SandboxUserInfoboxHook(UserInfoboxHook):
def get_etag_data(self, user, request, local_site):
raise Exception
def render(self, user, request, local_site):
raise Exception
class TestIsEmptyField(BaseReviewRequestField):
field_id = 'is_empty'
class TestInitField(BaseReviewRequestField):
field_id = 'test_init'
class TestInitFieldset(BaseReviewRequestFieldSet):
fieldset_id = 'test_init'
field_classes = [SandboxBaseReviewRequestTestInitField]
class TestShouldRenderFieldset(BaseReviewRequestFieldSet):
fieldset_id = 'test_should_render'
field_classes = [SandboxBaseReviewRequestTestShouldRenderField]
class BaseReviewRequestTestIsEmptyFieldset(BaseReviewRequestFieldSet):
fieldset_id = 'is_empty'
field_classes = [TestIsEmptyField]
@classmethod
def is_empty(cls):
raise Exception
class BaseReviewRequestTestInitFieldset(BaseReviewRequestFieldSet):
fieldset_id = 'init_fieldset'
field_classes = [TestInitField]
def __init__(self, review_request_details):
raise Exception
class SandboxTests(ExtensionManagerMixin, TestCase):
"""Testing extension sandboxing"""
def setUp(self):
super(SandboxTests, self).setUp()
self.extension = SandboxExtension(extension_manager=self.manager)
self.factory = RequestFactory()
self.user = User.objects.create_user(username='reviewboard',
email='reviewboard@example.com',
password='password')
def tearDown(self):
super(SandboxTests, self).tearDown()
self.extension.shutdown()
def test_is_approved_sandbox(self):
"""Testing sandboxing ReviewRequestApprovalHook when
is_approved function throws an error"""
SandboxReviewRequestApprovalTestHook(extension=self.extension)
review = ReviewRequest()
review._calculate_approval()
def test_get_entries(self):
"""Testing sandboxing NavigationBarHook when get_entries function
throws an error"""
entry = {
'label': 'Test get_entries Function',
'url': '/dashboard/',
}
SandboxNavigationBarTestHook(extension=self.extension, entries=[entry])
context = Context({})
t = Template(
"{% load rb_extensions %}"
"{% navigation_bar_hooks %}")
t.render(context).strip()
def test_render_review_comment_details(self):
"""Testing sandboxing CommentDetailDisplayHook when
render_review_comment_detail throws an error"""
SandboxCommentDetailDisplayTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
t = Template(
"{% load rb_extensions %}"
"{% comment_detail_display_hook comment 'review'%}")
t.render(context).strip()
def test_email_review_comment_details(self):
"""Testing sandboxing CommentDetailDisplayHook when
render_email_comment_detail throws an error"""
SandboxCommentDetailDisplayTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
t = Template(
"{% load rb_extensions %}"
"{% comment_detail_display_hook comment 'html-email'%}")
t.render(context).strip()
def test_action_hooks_diff_viewer_hook(self):
"""Testing sandboxing DiffViewerActionHook when
action_hooks throws an error"""
SandboxDiffViewerActionTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
template = Template(
'{% load reviewtags %}'
'{% review_request_actions %}')
template.render(context)
def test_action_hooks_header_hook(self):
"""Testing sandboxing HeaderActionHook when
action_hooks throws an error"""
SandboxHeaderActionTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
t = Template(
"{% load rb_extensions %}"
"{% header_action_hooks %}")
t.render(context).strip()
def test_action_hooks_header_dropdown_hook(self):
"""Testing sandboxing HeaderDropdownActionHook when
action_hooks throws an error"""
SandboxHeaderDropdownActionTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
t = Template(
"{% load rb_extensions %}"
"{% header_dropdown_action_hooks %}")
t.render(context).strip()
def test_action_hooks_review_request_hook(self):
"""Testing sandboxing ReviewRequestActionHook when
action_hooks throws an error"""
SandboxReviewRequestActionTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
template = Template(
'{% load reviewtags %}'
'{% review_request_actions %}')
template.render(context)
def test_action_hooks_review_request_dropdown_hook(self):
"""Testing sandboxing ReviewRequestDropdownActionHook when
action_hooks throws an error"""
SandboxReviewRequestDropdownActionTestHook(extension=self.extension)
context = Context({'comment': 'this is a comment'})
template = Template(
'{% load reviewtags %}'
'{% review_request_actions %}')
template.render(context)
def test_is_empty_review_request_fieldset(self):
"""Testing sandboxing ReviewRequestFieldset is_empty function in
for_review_request_fieldset"""
fieldset = [BaseReviewRequestTestIsEmptyFieldset]
ReviewRequestFieldSetsHook(extension=self.extension,
fieldsets=fieldset)
review = ReviewRequest()
request = self.factory.get('test')
request.user = self.user
context = Context({
'review_request_details': review,
'request': request
})
t = Template(
"{% load reviewtags %}"
"{% for_review_request_fieldset review_request_details %}"
"{% end_for_review_request_fieldset %}")
t.render(context).strip()
def test_field_cls_review_request_field(self):
"""Testing sandboxing ReviewRequestFieldset init function in
for_review_request_field"""
fieldset = [TestInitFieldset]
ReviewRequestFieldSetsHook(extension=self.extension,
fieldsets=fieldset)
review = ReviewRequest()
context = Context({
'review_request_details': review,
'fieldset': TestInitFieldset
})
t = Template(
"{% load reviewtags %}"
"{% for_review_request_field review_request_details 'test_init' %}"
"{% end_for_review_request_field %}")
t.render(context).strip()
def test_fieldset_cls_review_request_fieldset(self):
"""Testing sandboxing ReviewRequestFieldset init function in
for_review_request_fieldset"""
fieldset = [BaseReviewRequestTestInitFieldset]
ReviewRequestFieldSetsHook(extension=self.extension,
fieldsets=fieldset)
review = ReviewRequest()
request = self.factory.get('test')
request.user = self.user
context = Context({
'review_request_details': review,
'request': request
})
t = Template(
"{% load reviewtags %}"
"{% for_review_request_fieldset review_request_details %}"
"{% end_for_review_request_fieldset %}")
t.render(context).strip()
def test_should_render_review_request_field(self):
"""Testing sandboxing ReviewRequestFieldset should_render function in
for_review_request_field"""
fieldset = [TestShouldRenderFieldset]
ReviewRequestFieldSetsHook(extension=self.extension,
fieldsets=fieldset)
review = ReviewRequest()
context = Context({
'review_request_details': review,
'fieldset': TestShouldRenderFieldset
})
t = Template(
"{% load reviewtags %}"
"{% for_review_request_field review_request_details"
" 'test_should_render' %}"
"{% end_for_review_request_field %}")
t.render(context).strip()
def test_user_infobox_hook(self):
"""Testing sandboxing of the UserInfoboxHook"""
SandboxUserInfoboxHook(self.extension, 'template.html')
self.client.get(
local_site_reverse('user-infobox', kwargs={
'username': self.user.username,
}))
class EmailHookTests(ExtensionManagerMixin, SpyAgency, TestCase):
"""Testing the e-mail recipient filtering capacity of EmailHooks."""
fixtures = ['test_users']
def setUp(self):
super(EmailHookTests, self).setUp()
self.extension = DummyExtension(extension_manager=self.manager)
mail.outbox = []
def tearDown(self):
super(EmailHookTests, self).tearDown()
self.extension.shutdown()
def test_review_request_published_email_hook(self):
"""Testing the ReviewRequestPublishedEmailHook"""
class DummyHook(ReviewRequestPublishedEmailHook):
def get_to_field(self, to_field, review_request, user):
return set([user])
def get_cc_field(self, cc_field, review_request, user):
return set([user])
hook = DummyHook(self.extension)
self.spy_on(hook.get_to_field)
self.spy_on(hook.get_cc_field)
review_request = self.create_review_request()
admin = User.objects.get(username='admin')
call_kwargs = {
'user': admin,
'review_request': review_request,
}
with self.siteconfig_settings({'mail_send_review_mail': True}):
review_request.publish(admin)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to,
[build_email_address_for_user(admin)])
self.assertTrue(hook.get_to_field.called)
self.assertTrue(hook.get_to_field.called_with(**call_kwargs))
self.assertTrue(hook.get_cc_field.called)
self.assertTrue(hook.get_cc_field.called_with(**call_kwargs))
def test_review_published_email_hook(self):
"""Testing the ReviewPublishedEmailHook"""
class DummyHook(ReviewPublishedEmailHook):
def get_to_field(self, to_field, review, user, review_request,
to_owner_only):
return set([user])
def get_cc_field(self, cc_field, review, user, review_request,
to_owner_only):
return set([user])
hook = DummyHook(self.extension)
self.spy_on(hook.get_to_field)
self.spy_on(hook.get_cc_field)
admin = User.objects.get(username='admin')
review_request = self.create_review_request(public=True)
review = self.create_review(review_request)
call_kwargs = {
'user': admin,
'review_request': review_request,
'review': review,
'to_owner_only': False,
}
with self.siteconfig_settings({'mail_send_review_mail': True}):
review.publish(admin)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to,
[build_email_address_for_user(admin)])
self.assertTrue(hook.get_to_field.called)
self.assertTrue(hook.get_to_field.called_with(**call_kwargs))
self.assertTrue(hook.get_cc_field.called)
self.assertTrue(hook.get_cc_field.called_with(**call_kwargs))
def test_review_reply_published_email_hook(self):
"""Testing the ReviewReplyPublishedEmailHook"""
class DummyHook(ReviewReplyPublishedEmailHook):
def get_to_field(self, to_field, reply, user, review,
review_request):
return set([user])
def get_cc_field(self, cc_field, reply, user, review,
review_request):
return set([user])
hook = DummyHook(self.extension)
self.spy_on(hook.get_to_field)
self.spy_on(hook.get_cc_field)
admin = User.objects.get(username='admin')
review_request = self.create_review_request(public=True)
review = self.create_review(review_request)
reply = self.create_reply(review)
call_kwargs = {
'user': admin,
'review_request': review_request,
'review': review,
'reply': reply,
}
with self.siteconfig_settings({'mail_send_review_mail': True}):
reply.publish(admin)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue(hook.get_to_field.called)
self.assertTrue(hook.get_to_field.called_with(**call_kwargs))
self.assertTrue(hook.get_cc_field.called)
self.assertTrue(hook.get_cc_field.called_with(**call_kwargs))
def test_review_request_closed_email_hook_submitted(self):
"""Testing the ReviewRequestClosedEmailHook for a review request being
submitted
"""
class DummyHook(ReviewRequestClosedEmailHook):
def get_to_field(self, to_field, review_request, user, close_type):
return set([user])
def get_cc_field(self, cc_field, review_request, user, close_type):
return set([user])
hook = DummyHook(self.extension)
self.spy_on(hook.get_to_field)
self.spy_on(hook.get_cc_field)
admin = User.objects.get(username='admin')
review_request = self.create_review_request(public=True)
call_kwargs = {
'user': admin,
'review_request': review_request,
'close_type': ReviewRequest.SUBMITTED,
}
with self.siteconfig_settings({'mail_send_review_close_mail': True}):
review_request.close(ReviewRequest.SUBMITTED, admin)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue(hook.get_to_field.called)
self.assertTrue(hook.get_to_field.called_with(**call_kwargs))
self.assertTrue(hook.get_cc_field.called)
self.assertTrue(hook.get_cc_field.called_with(**call_kwargs))
def test_review_request_closed_email_hook_discarded(self):
"""Testing the ReviewRequestClosedEmailHook for a review request being
discarded
"""
class DummyHook(ReviewRequestClosedEmailHook):
def get_to_field(self, to_field, review_request, user, close_type):
return set([user])
def get_cc_field(self, cc_field, review_request, user, close_type):
return set([user])
hook = DummyHook(self.extension)
self.spy_on(hook.get_to_field)
self.spy_on(hook.get_cc_field)
admin = User.objects.get(username='admin')
review_request = self.create_review_request(public=True)
call_kwargs = {
'user': admin,
'review_request': review_request,
'close_type': ReviewRequest.DISCARDED,
}
with self.siteconfig_settings({'mail_send_review_close_mail': True}):
review_request.close(ReviewRequest.DISCARDED, admin)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue(hook.get_to_field.called)
self.assertTrue(hook.get_to_field.called_with(**call_kwargs))
self.assertTrue(hook.get_cc_field.called)
self.assertTrue(hook.get_cc_field.called_with(**call_kwargs))
def test_generic_hook(self):
"""Testing that a generic e-mail hook works for all e-mail signals"""
hook = EmailHook(self.extension,
signals=[
review_request_published,
review_published,
reply_published,
review_request_closed,
])
self.spy_on(hook.get_to_field)
self.spy_on(hook.get_cc_field)
user = User.objects.create_user(username='testuser')
review_request = self.create_review_request(public=True,
target_people=[user])
review = self.create_review(review_request)
reply = self.create_reply(review)
siteconfig_settings = {
'mail_send_review_mail': True,
'mail_send_review_close_mail': True,
}
with self.siteconfig_settings(siteconfig_settings):
self.assertEqual(len(mail.outbox), 0)
review.publish()
call_kwargs = {
'user': review.user,
'review': review,
'review_request': review_request,
'to_owner_only': False,
}
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(len(hook.get_to_field.spy.calls), 1)
self.assertEqual(len(hook.get_cc_field.spy.calls), 1)
self.assertEqual(hook.get_to_field.spy.calls[-1].kwargs,
call_kwargs)
self.assertEqual(hook.get_cc_field.spy.calls[-1].kwargs,
call_kwargs)
reply.publish(reply.user)
call_kwargs.pop('to_owner_only')
call_kwargs['reply'] = reply
call_kwargs['user'] = reply.user
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(len(hook.get_to_field.spy.calls), 2)
self.assertEqual(len(hook.get_cc_field.spy.calls), 2)
self.assertEqual(hook.get_to_field.spy.calls[-1].kwargs,
call_kwargs)
self.assertEqual(hook.get_cc_field.spy.calls[-1].kwargs,
call_kwargs)
review_request.close(ReviewRequest.DISCARDED)
call_kwargs = {
'review_request': review_request,
'user': review_request.submitter,
'close_type': ReviewRequest.DISCARDED,
}
self.assertEqual(len(mail.outbox), 3)
self.assertEqual(len(hook.get_to_field.spy.calls), 3)
self.assertEqual(len(hook.get_cc_field.spy.calls), 3)
self.assertEqual(hook.get_to_field.spy.calls[-1].kwargs,
call_kwargs)
self.assertEqual(hook.get_cc_field.spy.calls[-1].kwargs,
call_kwargs)
review_request.reopen()
review_request.publish(review_request.submitter)
call_kwargs = {
'review_request': review_request,
'user': review_request.submitter,
}
self.assertEqual(len(mail.outbox), 4)
self.assertEqual(len(hook.get_to_field.spy.calls), 4)
self.assertEqual(len(hook.get_cc_field.spy.calls), 4)
self.assertEqual(hook.get_to_field.spy.calls[-1].kwargs,
call_kwargs)
self.assertEqual(hook.get_cc_field.spy.calls[-1].kwargs,
call_kwargs)
review_request.close(ReviewRequest.SUBMITTED)
call_kwargs['close_type'] = ReviewRequest.SUBMITTED
self.assertEqual(len(mail.outbox), 5)
self.assertEqual(len(hook.get_to_field.spy.calls), 5)
self.assertEqual(len(hook.get_cc_field.spy.calls), 5)
self.assertEqual(hook.get_to_field.spy.calls[-1].kwargs,
call_kwargs)
self.assertEqual(hook.get_cc_field.spy.calls[-1].kwargs,
call_kwargs)
class AvatarServiceHookTests(ExtensionManagerMixin, TestCase):
"""Test for reviewboard.extensions.hooks.AvatarServiceHook."""
@classmethod
def setUpClass(cls):
super(AvatarServiceHookTests, cls).setUpClass()
avatar_services.reset()
def setUp(self):
super(AvatarServiceHookTests, self).setUp()
self.extension = DummyExtension(extension_manager=self.manager)
def tearDown(self):
super(AvatarServiceHookTests, self).tearDown()
self.extension.shutdown()
avatar_services.reset()
def test_register(self):
"""Testing AvatarServiceHook registers services"""
self.assertNotIn(DummyAvatarService, avatar_services)
AvatarServiceHook(self.extension, DummyAvatarService,
start_enabled=True)
self.assertIn(DummyAvatarService, avatar_services)
avatar_services.enable_service(DummyAvatarService, save=False)
self.assertTrue(avatar_services.is_enabled(DummyAvatarService))
def test_unregister(self):
"""Testing AvatarServiceHook unregisters services on shutdown"""
self.assertNotIn(DummyAvatarService, avatar_services)
AvatarServiceHook(self.extension, DummyAvatarService,
start_enabled=True)
self.assertIn(DummyAvatarService, avatar_services)
self.extension.shutdown()
self.assertNotIn(DummyAvatarService, avatar_services)
|
from .about import NAME, VERSION, __version__
from .sonyflake import SonyFlake
__all__ = ["SonyFlake"]
|
# Copyright 2018 NOKIA
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import netaddr
from neutron._i18n import _
from neutron.api import extensions as neutron_extensions
from neutron_lib.api import validators
from neutron_lib import context as n_ctx
from neutron_lib.db import api as lib_db_api
from neutron_lib.db import model_query
from neutron_lib.db import utils as ndb_utils
from neutron_lib import exceptions as n_exc
from neutron_lib.services import base as service_base
from oslo_config import cfg
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
from nuage_neutron.plugins.common import base_plugin
from nuage_neutron.plugins.common import constants
from nuage_neutron.plugins.common import exceptions as nuage_exc
from nuage_neutron.plugins.common import gateway
from nuage_neutron.plugins.common import nuage_models
from nuage_neutron.plugins.common import nuagedb
from nuage_neutron.plugins.common import port_security
from nuage_neutron.plugins.common.service_plugins import externalsg
from nuage_neutron.plugins.common import utils as nuage_utils
from nuage_neutron.plugins.nuage_ml2 import extensions
from nuage_neutron.vsdclient.restproxy import RESTProxyError
LOG = logging.getLogger(__name__)
class NuageApi(base_plugin.BaseNuagePlugin,
service_base.ServicePluginBase,
externalsg.NuageexternalsgMixin,
gateway.NuagegatewayMixin):
supported_extension_aliases = ['net-partition', 'nuage-gateway',
'vsd-resource',
'nuage-external-security-group']
def __init__(self):
super(NuageApi, self).__init__()
# Prepare default and shared netpartitions
self._prepare_netpartitions()
neutron_extensions.append_api_extensions_path(extensions.__path__)
self.psec_handler = port_security.NuagePortSecurityHandler(
self.vsdclient, self)
def get_plugin_type(self):
return constants.NUAGE_APIS
def get_plugin_description(self):
return "Plugin providing Nuage-specific APIs."
def get_default_np_id(self):
return self._default_np_id
def _make_net_partition_dict(self, net_partition,
context=None, fields=None):
res = {
'id': net_partition['id'],
'name': net_partition['name'],
'l3dom_tmplt_id': net_partition['l3dom_tmplt_id'],
'l2dom_tmplt_id': net_partition['l2dom_tmplt_id'],
'isolated_zone': net_partition['isolated_zone'],
'shared_zone': net_partition['shared_zone']
}
if context:
res['tenant_id'] = context.tenant_id
return self._fields(res, fields)
def _make_project_net_partition_mapping(self, context, mapping,
fields=None):
res = {
'project': mapping['project'],
'net_partition_id': mapping['net_partition_id'],
}
if context:
res['tenant_id'] = context.tenant_id
return self._fields(res, fields)
@log_helpers.log_method_call
def _create_net_partition(self, session, net_part_name):
params = {
'name': net_part_name,
'fp_quota': str(cfg.CONF.RESTPROXY.default_floatingip_quota),
'externalID': net_part_name + '@openstack'
}
nuage_net_partition = self.vsdclient.create_net_partition(params)
net_partitioninst = None
if nuage_net_partition:
with session.begin(subtransactions=True):
net_partitioninst = NuageApi._add_net_partition(
session,
nuage_net_partition,
net_part_name)
if not net_partitioninst:
return {}
return self._make_net_partition_dict(net_partitioninst)
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def _validate_create_net_partition(self, context, net_part_name):
"""Sync net partition configuration between plugin and VSD"""
# About parallelism: This method could be executed in parallel
# by different neutron instances. The decorator
# retry_if_session_inactive makes sure that that the method will be
# retried when database transaction errors occur like deadlock and
# duplicate inserts.
self._validate_net_partition_name(net_part_name)
session = context.session
with session.begin(subtransactions=True):
netpart_db = nuagedb.get_net_partition_by_name(session,
net_part_name)
nuage_netpart = self.vsdclient.get_netpartition_data(net_part_name)
if nuage_netpart:
if netpart_db:
# Net-partition exists in neutron and vsd
def_netpart = (
cfg.CONF.RESTPROXY.default_net_partition_name)
share_netpart = constants.SHARED_INFRASTRUCTURE
if (def_netpart == net_part_name or
share_netpart == net_part_name):
if nuage_netpart['np_id'] != netpart_db['id']:
msg = ("Net-partition %s exists in "
"Neutron and VSD, but the id is different"
% net_part_name)
raise n_exc.BadRequest(resource='net_partition',
msg=msg)
self._update_net_partition(session,
net_part_name,
netpart_db,
nuage_netpart)
LOG.info("Net-partition %s already exists,"
" so will just use it", net_part_name)
return self._make_net_partition_dict(netpart_db)
else:
if nuage_netpart['np_id'] != netpart_db['id']:
msg = (('Net-partition %s already exists in '
'Neutron and VSD, but the id is '
'different') % net_part_name)
else:
msg = (('Net-partition %s already exists in '
'Neutron and VSD with same id') %
net_part_name)
raise n_exc.BadRequest(resource='net_partition',
msg=msg)
# Net-partition exists in vsd and not in neutron
netpart_db = NuageApi._add_net_partition(session,
nuage_netpart,
net_part_name)
return self._make_net_partition_dict(netpart_db)
else:
if netpart_db:
# Net-partition exists in neutron and not VSD
LOG.info("Existing net-partition %s will be deleted and "
"re-created in db", net_part_name)
nuagedb.delete_net_partition(session, netpart_db)
# Net-partition does not exist in neutron and VSD
return self._create_net_partition(session, net_part_name)
@staticmethod
def _validate_net_partition_name(name):
try:
name.encode('ascii')
except UnicodeEncodeError:
msg = _('Invalid netpartition name: Only ascii names are allowed')
raise n_exc.BadRequest(resource='net_partition', msg=msg)
@staticmethod
@log_helpers.log_method_call
def _add_net_partition(session, netpart, netpart_name):
l3dom_id = netpart['l3dom_tid']
if netpart_name == constants.SHARED_INFRASTRUCTURE:
l3isolated = None
l3shared = constants.SHARED_ZONE_TEMPLATE
else:
l3isolated = constants.DEF_NUAGE_ZONE_PREFIX + '-' + l3dom_id
l3shared = constants.DEF_NUAGE_ZONE_PREFIX + '-pub-' + l3dom_id
return nuagedb.add_net_partition(session,
netpart['np_id'],
l3dom_id,
netpart['l2dom_tid'],
netpart_name,
l3isolated,
l3shared)
@log_helpers.log_method_call
def _update_net_partition(self, session,
netpart_name,
net_partition_db,
vsd_net_partition):
l3dom_id = vsd_net_partition['l3dom_tid']
if netpart_name == constants.SHARED_INFRASTRUCTURE:
l3isolated = None
l3shared = constants.SHARED_ZONE_TEMPLATE
else:
l3isolated = constants.DEF_NUAGE_ZONE_PREFIX + '-' + l3dom_id
l3shared = constants.DEF_NUAGE_ZONE_PREFIX + '-pub-' + l3dom_id
with session.begin(subtransactions=True):
nuagedb.update_netpartition(net_partition_db, {
'l3dom_tmplt_id': l3dom_id,
'l2dom_tmplt_id': vsd_net_partition['l2dom_tid'],
'isolated_zone': l3isolated,
'shared_zone': l3shared,
})
@log_helpers.log_method_call
def _link_default_netpartition(self, netpart_name,
l2template, l3template,
l3isolated, l3shared):
params = {
'name': netpart_name,
'l3template': l3template,
'l2template': l2template
}
(np_id, l3dom_tid,
l2dom_tid) = self.vsdclient.link_default_netpartition(params)
# verify that the provided zones have been created already
shared_match, isolated_match = self.vsdclient.validate_zone_create(
l3dom_tid, l3isolated, l3shared)
if not shared_match or not isolated_match:
msg = ('Default zone names must be provided for '
'default net-partiton')
raise n_exc.BadRequest(resource='net_partition', msg=msg)
# basic verifications passed. add default netpartition to the DB
session = lib_db_api.get_writer_session()
netpartition = nuagedb.get_net_partition_by_name(session,
netpart_name)
with session.begin():
if netpartition:
nuagedb.delete_net_partition(session, netpartition)
nuagedb.add_net_partition(session,
np_id,
l3dom_tid,
l2dom_tid,
netpart_name,
l3isolated,
l3shared)
self._default_np_id = np_id
@log_helpers.log_method_call
def _prepare_netpartitions(self):
# prepare shared netpartition
shared_netpart_name = constants.SHARED_INFRASTRUCTURE
self._validate_create_net_partition(n_ctx.get_admin_context(),
shared_netpart_name)
# prepare default netpartition
default_netpart_name = cfg.CONF.RESTPROXY.default_net_partition_name
l3template = cfg.CONF.RESTPROXY.default_l3domain_template
l2template = cfg.CONF.RESTPROXY.default_l2domain_template
l3isolated = cfg.CONF.RESTPROXY.default_isolated_zone
l3shared = cfg.CONF.RESTPROXY.default_shared_zone
# if templates are not provided, create default templates
if l2template or l3template or l3isolated or l3shared:
if (not l2template or not l3template or not l3isolated or
not l3shared):
msg = 'Configuration of default net-partition not complete'
raise n_exc.BadRequest(resource='net_partition',
msg=msg)
'''NetPartition and templates already created. Just sync the
neutron DB. They must all be in VSD. If not, its an error
'''
self._link_default_netpartition(default_netpart_name,
l2template,
l3template,
l3isolated,
l3shared)
else:
default_netpart = self._validate_create_net_partition(
n_ctx.get_admin_context(),
default_netpart_name)
self._default_np_id = default_netpart['id']
@nuage_utils.handle_nuage_api_errorcode
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def create_net_partition(self, context, net_partition):
ent = net_partition['net_partition']
return self._validate_create_net_partition(context, ent['name'])
@nuage_utils.handle_nuage_api_errorcode
@log_helpers.log_method_call
def _validate_delete_net_partition(self, context, id, net_partition_name):
if net_partition_name == constants.SHARED_INFRASTRUCTURE:
msg = _("Can't delete net_partition {}").format(net_partition_name)
raise n_exc.BadRequest(resource='net_partition', msg=msg)
ent_rtr_mapping = nuagedb.get_ent_rtr_mapping_by_entid(
context.session, id)
ent_l2dom_mapping = nuagedb.get_ent_l2dom_mapping_by_entid(
context.session, id)
if ent_rtr_mapping:
msg = (_("One or more router still attached to "
"net_partition %s") % net_partition_name)
raise n_exc.BadRequest(resource='net_partition', msg=msg)
if ent_l2dom_mapping:
msg = (_("One or more L2 Domain Subnet present in the "
"net_partition %s") % net_partition_name)
raise n_exc.BadRequest(resource='net_partition', msg=msg)
@nuage_utils.handle_nuage_api_errorcode
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def delete_net_partition(self, context, id):
net_partition = nuagedb.get_net_partition_by_id(context.session, id)
if not net_partition:
raise nuage_exc.NuageNotFound(resource='net_partition',
resource_id=id)
self._validate_delete_net_partition(context, id, net_partition['name'])
self.vsdclient.delete_net_partition(net_partition['id'])
with context.session.begin(subtransactions=True):
nuagedb.delete_net_partition(context.session,
net_partition)
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def get_net_partition(self, context, id, fields=None):
net_partition = nuagedb.get_net_partition_by_id(context.session,
id)
if not net_partition:
raise nuage_exc.NuageNotFound(resource='net_partition',
resource_id=id)
return self._make_net_partition_dict(net_partition, context=context,
fields=fields)
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def get_net_partitions(self, context, filters=None, fields=None):
net_partitions = nuagedb.get_net_partitions(context.session,
filters=filters)
return [self._make_net_partition_dict(net_partition, context, fields)
for net_partition in net_partitions]
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def create_project_net_partition_mapping(self, context,
project_net_partition_mapping):
session = context.session
p2n = project_net_partition_mapping['project_net_partition_mapping']
project = p2n['project']
net_partition_id = p2n['net_partition_id']
err = validators.validate_uuid(project)
if err:
raise nuage_exc.NuageBadRequest(resource='net_partition', msg=err)
# Validate netpartition
netpart = nuagedb.get_net_partition_by_id(session, net_partition_id)
if not netpart:
msg = _('Net partition {} is not a valid netpartition '
'ID.').format(net_partition_id)
raise nuage_exc.NuageBadRequest(
resource='project_net_partition_mapping', msg=msg)
with session.begin(subtransactions=True):
existing_mapping = nuagedb.get_project_net_partition_mapping(
session, project)
if existing_mapping:
session.delete(existing_mapping)
mapping = nuagedb.add_project_net_partition_mapping(
session, net_partition_id, project)
return self._make_project_net_partition_mapping(context, mapping)
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def delete_project_net_partition_mapping(self, context,
project_id):
session = context.session
err = validators.validate_uuid(project_id)
if err:
raise nuage_exc.NuageBadRequest(
resource='project_net_partition_mapping', msg=err)
with session.begin(subtransactions=True):
existing_mapping = nuagedb.get_project_net_partition_mapping(
session, project_id)
if existing_mapping:
session.delete(existing_mapping)
else:
msg = _('Project {} does not currently '
'have a default net-partition associated.').format(
project_id)
raise nuage_exc.NuageBadRequest(
resource='project_net_partition_mapping', msg=msg)
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def get_project_net_partition_mapping(self, context, id, fields=None):
mapping = nuagedb.get_project_net_partition_mapping(context.session,
id)
if not mapping:
raise nuage_exc.NuageNotFound(
resource='project_net_partition_mapping', resource_id=id)
return self._make_project_net_partition_mapping(context, mapping,
fields)
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def get_project_net_partition_mappings(
self, context, filters=None, fields=None, sorts=None,
limit=None, marker=None, page_reverse=False):
marker_obj = ndb_utils.get_marker_obj(
self, context, 'project_net_partition_mapping', limit, marker)
return model_query.get_collection(
context, nuage_models.NetPartitionProject,
functools.partial(self._make_project_net_partition_mapping,
context),
filters=filters, fields=fields, sorts=sorts,
limit=limit, marker_obj=marker_obj, page_reverse=page_reverse)
@nuage_utils.handle_nuage_api_errorcode
@lib_db_api.retry_if_session_inactive()
@log_helpers.log_method_call
def get_vsd_subnet(self, context, id, fields=None):
subnet = self.vsdclient.get_nuage_subnet_by_id(
id, required=True)
subnet_dict = self._calculate_vsd_subnet_dict(subnet)
if subnet['type'] == constants.SUBNET:
domain_id = self.vsdclient.get_l3domain_id_by_domain_subnet_id(
subnet_dict['id'])
netpart_id = self.vsdclient.get_router_np_id(domain_id)
else:
netpart_id = subnet['parentID']
net_partition = self.vsdclient.get_net_partition_name_by_id(
netpart_id)
subnet_dict['net_partition'] = net_partition
return self._fields(subnet_dict, fields)
@nuage_utils.handle_nuage_api_errorcode
@log_helpers.log_method_call
def get_vsd_subnets(self, context, filters=None, fields=None):
if 'vsd_zone_id' not in filters:
msg = _('vsd_zone_id is a required filter parameter for this API')
raise n_exc.BadRequest(resource='vsd-subnets', msg=msg)
l3subs = self.vsdclient.get_domain_subnet_by_zone_id(
filters['vsd_zone_id'][0])
sub_dicts = [self._calculate_vsd_subnet_dict(sub) for sub in l3subs]
return [self._fields(subnet_dict, fields) for subnet_dict in sub_dicts]
def _calculate_vsd_subnet_dict(self, subnet):
backend_subnet = subnet
if subnet['associatedSharedNetworkResourceID']:
backend_subnet = self.vsdclient.get_nuage_subnet_by_id(
subnet['associatedSharedNetworkResourceID'])
subnet_dict = {'id': subnet['ID'],
'name': subnet['name'],
'cidr': self._calc_cidr(backend_subnet),
'ipv6_cidr': backend_subnet.get('IPv6Address'),
'gateway': backend_subnet.get('gateway'),
'ipv6_gateway': backend_subnet.get('IPv6Gateway'),
'ip_version': backend_subnet['IPType'],
'enable_dhcpv4': backend_subnet['enableDHCPv4'],
'enable_dhcpv6': backend_subnet['enableDHCPv6']}
return subnet_dict
@nuage_utils.handle_nuage_api_errorcode
@log_helpers.log_method_call
def get_vsd_domains(self, context, filters=None, fields=None):
l3domains = []
l2domains = []
if 'vsd_organisation_id' in filters:
# get domains by enterprise id
l3domains.extend(self.vsdclient.get_routers_by_netpart(
filters['vsd_organisation_id'][0]))
l2domains.extend(self.vsdclient.get_subnet_by_netpart(
filters['vsd_organisation_id'][0]))
elif 'os_router_ids' in filters:
# get domain by Openstack router id
for os_id in filters['os_router_ids']:
l3_domain = self.vsdclient.get_l3domain_by_external_id(os_id)
if l3_domain:
l3domains.append(l3_domain)
else:
msg = _('vsd_organisation_id or os_router_ids is a required filter'
' parameter for this API')
raise n_exc.BadRequest(resource='vsd-domains', msg=msg)
# add type to the domains (used by horizon linkedselect)
for domain in l3domains:
domain.update({'type': 'L3'})
for domain in l2domains:
domain.update({'type': 'L2'})
vsd_to_os = {
'ID': 'id',
'name': 'name',
'type': 'type',
# L2
'net_partition_id': 'net_partition_id',
'dhcp_managed': 'dhcp_managed',
'IPType': 'ip_version',
'ipv4_cidr': 'cidr',
'IPv6Address': 'ipv6_cidr',
'ipv4_gateway': 'gateway',
'IPv6Gateway': 'ipv6_gateway',
'enableDHCPv4': 'enable_dhcpv4',
'enableDHCPv6': 'enable_dhcpv6',
# L3
'parentID': 'net_partition_id',
'routeDistinguisher': 'rd',
'routeTarget': 'rt',
'backHaulVNID': 'backhaul_vnid',
'backHaulRouteDistinguisher': 'backhaul_rd',
'backHaulRouteTarget': 'backhaul_rt',
'templateID': 'router_template_id',
'tunnelType': 'tunnel_type',
'ECMPCount': 'ecmp_count'
}
return self._trans_vsd_to_os(l3domains + l2domains, vsd_to_os,
filters, fields)
def _calc_cidr(self, subnet):
if subnet.get('address'):
ip = netaddr.IPNetwork(subnet['address'] + '/' +
subnet['netmask'])
return str(ip)
else:
return None
@nuage_utils.handle_nuage_api_errorcode
@log_helpers.log_method_call
def get_vsd_zones(self, context, filters=None, fields=None):
if 'vsd_domain_id' not in filters:
msg = _('vsd_domain_id is a required filter parameter for this '
'API')
raise n_exc.BadRequest(resource='vsd-zones', msg=msg)
try:
vsd_zones = self.vsdclient.get_zone_by_domainid(
filters['vsd_domain_id'][0])
except RESTProxyError as e:
if e.code == 404:
return []
else:
raise e
vsd_zones = [self._update_dict(zone, 'vsd_domain_id',
filters['vsd_domain_id'][0])
for zone in vsd_zones]
vsd_to_os = {
'zone_id': 'id',
'zone_name': 'name',
'vsd_domain_id': 'vsd_domain_id'
}
return self._trans_vsd_to_os(vsd_zones, vsd_to_os, filters, fields)
def _update_dict(self, dict, key, val):
dict[key] = val
return dict
@nuage_utils.handle_nuage_api_errorcode
@log_helpers.log_method_call
def get_vsd_organisations(self, context, filters=None, fields=None):
netpartitions = self.vsdclient.get_net_partitions()
vsd_to_os = {
'net_partition_id': 'id',
'net_partition_name': 'name'
}
return self._trans_vsd_to_os(netpartitions, vsd_to_os, filters, fields)
def _trans_vsd_to_os(self, vsd_list, mapping, filters, fields):
os_list = []
if not filters:
filters = {}
for filter in filters:
filters[filter] = [value.lower() for value in filters[filter]]
for vsd_obj in vsd_list:
os_obj = {}
for vsd_key in mapping:
if callable(vsd_key):
os_obj[mapping[vsd_key]] = vsd_key(vsd_obj)
elif vsd_key in vsd_obj:
os_obj[mapping[vsd_key]] = vsd_obj[vsd_key]
if self._passes_filters(os_obj, filters):
self._fields(os_obj, fields)
os_list.append(os_obj)
return os_list
@staticmethod
def _passes_filters(obj, filters):
for filter in filters:
if (filter in obj and
str(obj[filter]).lower() not in filters[filter]):
return False
return True
|
from web.api import BaseAPI
from data_models import government_models
from utils import mongo
class LordApi(BaseAPI):
def __init__(self):
BaseAPI.__init__(self)
self._db = mongo.MongoInterface()
self._db_table = 'api_lords'
def request(self, args):
name = args["name"]
result, _ = self._db.query(self._db_table, query=args)
if len(result) > 0:
lord = government_models.Lord(name)
result = {
'name': result[0]['name'],
'influences_summary': result[0]['influences'],
'influences_detail': {
"meetings": self._influencer_urls(lord.meetings),
"register_of_interests": self._interest_urls(lord.interests),
"electoral_commission": self._recipient_urls(lord.donations),
},
"government_departments": self._department_detail_urls(
result[0]["government_departments"]
),
"government_positions": result[0]["government_positions"],
}
return result
def _interest_urls(self, interests):
results = []
for category in interests:
updated_interests = []
for interest in category["interests"]:
updated = interest
interest_name = interest["interest"]["name"]
interest_labels = interest["interest"]["labels"]
urls = self.named_entity_resources(interest_name, interest_labels)
updated["interest"]["details_url"] = urls[0]
updated["interest"]["api_url"] = urls[1]
updated_interests.append(updated)
if len(updated_interests) > 0:
category["interests"] = updated_interests
results.append(category)
return results
def _recipient_urls(self, donations):
results = []
for donation in donations:
updated = donation
recipient_name = donation["recipient"]["name"]
recipient_labels = donation["recipient"]["labels"]
urls = self.named_entity_resources(recipient_name, recipient_labels)
updated["recipient"]["details_url"] = urls[0]
updated["recipient"]["api_url"] = urls[1]
results.append(updated)
return results
def _influencer_urls(self, meetings):
results = []
for meeting in meetings:
updated = meeting
attendee_name = {"name": meeting["attendee"], "details_url": None}
if meeting["attendee"]:
urls = self.named_entity_resources(meeting["attendee"], "influencer")
attendee_name["details_url"] = urls[0]
updated["attendee"] = attendee_name
results.append(updated)
return results
|
# unp.py - functions for handling Belarusian UNP numbers
# coding: utf-8
#
# Copyright (C) 2020 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""УНП, UNP (Учетный номер плательщика, the Belarus VAT number).
The УНП (UNP) or Учетный номер плательщика (Uchetniy nomer platel'shika,
Payer account number) is issued to organisations and individuals for tax
purposes. The number consists of 9 digits (numeric for organisations,
alphanumeric for individuals) and contains a region identifier, a serial per
region and a check digit.
More information:
* https://be.wikipedia.org/wiki/Уліковы_нумар_плацельшчыка
* http://pravo.levonevsky.org/bazaby09/sbor37/text37892/index3.htm
>>> validate('200988541')
'200988541'
>>> validate('УНП MA1953684')
'MA1953684'
>>> validate('200988542')
Traceback (most recent call last):
...
InvalidChecksum: ...
"""
from stdnum.exceptions import *
from stdnum.util import clean, isdigits, to_unicode
# Mapping of Cyrillic letters to Latin letters
_cyrillic_to_latin = dict(zip(
u'АВЕКМНОРСТ',
u'ABEKMHOPCT',
))
def compact(number):
"""Convert the number to the minimal representation. This strips the
number of any valid separators and removes surrounding whitespace."""
number = clean(number, ' ').upper().strip()
for prefix in ('УНП', u'УНП', 'UNP', u'UNP'):
if type(number) == type(prefix) and number.startswith(prefix):
number = number[len(prefix):]
# Replace Cyrillic letters with Latin letters
cleaned = ''.join(_cyrillic_to_latin.get(x, x) for x in to_unicode(number))
if type(cleaned) != type(number): # pragma: no cover (Python2 only)
cleaned = cleaned.encode('utf-8')
return cleaned
def calc_check_digit(number):
"""Calculate the check digit for the number."""
number = compact(number)
alphabet = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ'
weights = (29, 23, 19, 17, 13, 7, 5, 3)
if not isdigits(number):
number = number[0] + str('ABCEHKMOPT'.index(number[1])) + number[2:]
c = sum(w * alphabet.index(n) for w, n in zip(weights, number)) % 11
if c > 9:
raise InvalidChecksum()
return str(c)
def validate(number):
"""Check if the number is a valid number. This checks the length,
formatting and check digit."""
number = compact(number)
if len(number) != 9:
raise InvalidLength()
if not isdigits(number[2:]):
raise InvalidFormat()
if not isdigits(number[:2]) and not all(x in 'ABCEHKMOPT' for x in number[:2]):
raise InvalidFormat()
if number[0] not in '1234567ABCEHKM':
raise InvalidComponent()
if number[-1] != calc_check_digit(number):
raise InvalidChecksum()
return number
def is_valid(number):
"""Check if the number is a valid number."""
try:
return bool(validate(number))
except ValidationError:
return False
def check_nalog(number, timeout=30): # pragma: no cover (not part of normal test suite)
"""Retrieve registration information from the portal.nalog.gov.by web site.
This basically returns the JSON response from the web service as a dict.
Will return ``None`` if the number is invalid or unknown.
"""
# this function isn't automatically tested because it would require
# network access for the tests and unnecessarily load the web service
import requests
from pkg_resources import resource_filename
# Since the nalog.gov.by web site currently provides an incomplete
# certificate chain, we provide our own.
certificate = resource_filename(__name__, 'portal.nalog.gov.by.crt')
response = requests.get(
'https://www.portal.nalog.gov.by/grp/getData',
params={
'unp': compact(number),
'charset': 'UTF-8',
'type': 'json'},
timeout=timeout,
verify=certificate)
if response.ok:
return response.json()['ROW']
|
import ase
import ase.calculators.lj
from interlayer_energies_demo import generate_geometry
import scipy.optimize
import numpy as np
def eval_energy(df, sigma, epsilon):
"""
"""
print(sigma, epsilon)
energy = []
for it, row in df.iterrows():
atoms = generate_geometry.create_graphene_geom(row['d'], row['disregistry'])
calc = ase.calculators.lj.LennardJones(sigma=sigma, epsilon=epsilon)
atoms.calc=calc
energy.append(atoms.get_potential_energy()/len(atoms))
lj_en = np.asarray(energy)- np.min(energy) + np.min(df['energy'])
return lj_en
def fit_lj(df, sigma0=3.5, epsilon0=3e-2):
ydata = df['energy']
popt, pcov = scipy.optimize.curve_fit(eval_energy, df, ydata, p0=(sigma0, epsilon0))
return popt
if __name__=="__main__":
import load
df = load.load_data()
import matplotlib.pyplot as plt
import seaborn as sns
g = sns.FacetGrid(hue='disregistry', data =df, height=3)
g.map(plt.errorbar,'d', 'energy', 'energy_err', marker='o', mew=1, mec='k')
g.add_legend()
plt.xlabel("Interlayer distance (Angstroms)")
plt.ylabel("Energy (eV/atom)")
plt.savefig("qmc_data.pdf", bbox_inches='tight')
sigma, epsilon = fit_lj(df, sigma0=3.5, epsilon0=3e-2)
df['lj_en'] = eval_energy(df, sigma=sigma, epsilon=epsilon)
g = sns.FacetGrid(hue='disregistry', col='disregistry',data =df)
g.map( plt.plot,'d','lj_en')
g.map( plt.errorbar,'d','energy', 'energy_err', marker='o', mew=1, mec='k', linestyle="")
print(df)
g.add_legend()
plt.show()
|
database = 'medidas.db'
def selecionar_tabela():
"Escolhe a tabela desejada"
opcoes = {"1": "peso", "2": "peito", "3": "braco", "4": "quadril", "5": "perna", "q": "sair"}
print("Digite o numero da tabela a seguir:")
print("[1] peso")
print("[2] peito")
print("[3] braco")
print("[4] quadril")
print("[5] perna")
print("[q] sair")
print()
opcao_tabela = input(">> ")
return opcoes[opcao_tabela]
def selecionar_funcao():
"Escolhe a funcao desejada"
print("Digite o numero da opcao desejada:")
print("[1] Visualizar Desenvolvimento")
print("[2] Visualizar Tudo")
print("[3] Diferenca entre a Simetria Perfeita")
print("[0] Inserir Dados")
print("[q] Sair")
print()
opcao_funcao = input(">> ")
return opcao_funcao
def inserir_dados(tabela):
import sqlite3
conexao = sqlite3.connect(database)
db = conexao.cursor()
while True:
print("Entre com a Data no formato 'dd/mm/yy'")
print("Caso deseje sair, digite 'q'")
data = input(">> ")
if data == "q":
conexao.close()
break
print("Entre com a Medida em cm, apenas numeros, exemplo '4.9'")
print("Caso deseje sair, digite 'q'")
medida = input(">> ")
if medida == "q":
conexao.close()
break
try:
db.execute("INSERT INTO " + tabela + " (data, medida) VALUES ('"+ data +"', " + repr(medida) + ")")
conexao.commit()
conexao.close()
return True
except sqlite3.Error as e:
print("Um erro ocorreu: ", e.args[0])
def visualizar_desenvolvimento(tabela):
import matplotlib.pyplot as plot
import datetime as DT
import sqlite3
conexao = sqlite3.connect(database)
db = conexao.cursor()
x = []
y = []
for row in db.execute("SELECT * FROM " + repr(tabela)):
x.append(DT.datetime.strptime(row[0], "%d/%m/%Y"))
y.append(row[1])
plot.plot_date(x, y, linestyle='-', marker=None)
ymin, ymax = plot.ylim()
plot.ylim(ymin, ymax + 0.01)
plot.gcf().autofmt_xdate()
plot.title("Desenvolvimento " + tabela + " (" + repr(y[-1]) + " cm/kg)")
plot.xlabel("Data")
plot.ylabel("Medida (cm ou kg)")
plot.show()
return True
def diferenca_simetria():
import matplotlib.pyplot as plot
import sqlite3
conexao = sqlite3.connect(database)
db = conexao.cursor()
print("==================== Simetria Perfeita ====================")
# Simetria ideal Arnold (para 180 cm)
#simetria = {'braco': 53.6, 'peito': 138.78, 'quadril': 100.1,
# 'perna': 69.318, 'peso': 101.95}
# Simetria ideal segundo tabela (no coeficiente)
simetria = {'braco': 40, 'pescoco': 42.5, 'antebraco': 33.4,
'peito': 110.9, 'cintura': 83.1, 'quadril': 99.9,
'perna': 59.9, 'panturrilha': 40, 'coeficiente': 0.478,
'peso': 86.1}
tabelas = []
for tabela in db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"):
tabelas.append(tabela[0])
for tabela in tabelas:
for row in db.execute("SELECT medida FROM " + tabela + " ORDER BY id DESC LIMIT 1"):
diferenca = simetria[tabela] - row[0]
print("Diferenca de medida " + tabela + ": {:03.2f} cm \t(Atual: {:03.2f} cm, Ideal: {:03.2f} cm)".format(diferenca, row[0], simetria[tabela]))
print("============================================================")
print()
return True
def visualizar_tudo():
import sqlite3
import matplotlib.pyplot as plot
import datetime as DT
conexao = sqlite3.connect(database)
db = conexao.cursor()
x = []
y = []
plots = []
tabelas = []
fig = plot.figure()
fig.subplots_adjust(left=0.2, wspace=1)
plot_number = 230
k = 0
for tabela in db.execute("SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%'"):
tabelas.append(tabela[0])
for tabela in tabelas:
for row in db.execute("SELECT * FROM " + tabela):
x.append(DT.datetime.strptime(row[0], "%d/%m/%Y"))
y.append(row[1])
plot_number = plot_number + 1
p = fig.add_subplot(plot_number)
p.plot(x, y)
ymin, ymax = plot.ylim()
plot.ylim(ymin, ymax + 0.01)
plot.gcf().autofmt_xdate()
if (tabela == "peso"):
plot.title(tabela + " (" + repr(y[-1]) + " kg)")
else:
plot.title(tabela + " (" + repr(y[-1]) + " cm)")
plot.xlabel("Data")
plot.ylabel("Medida (cm ou kg)")
x = []
y = []
k = k + 1
plot.show()
# TEST ZONE
#print(selecionar_tabela())
#visualizar_desenvolvimento("peso")
#visualizar_tudo()
#diferenca_simetria()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# code for python2
import rospy
import serial
import time
import signal
import sys
from kondo_b3mservo_rosdriver.msg import Multi_servo_command
from kondo_b3mservo_rosdriver.msg import Multi_servo_info
import Kondo_B3M_functions as Kondo_B3M
id = []
num = 0
initial_process_flag = 1
MINIMUM_STEP_OF_TARGET_TORQUE = 300
battery_voltage_warn_flag = 0
battery_voltage_fatal_flag = 0
BATTERY_VOLTAGE_WARN = 14200
BATTERY_VOLTAGE_FATAL = 13800
voltage = []
ser = serial.Serial('/dev/Kondo_USB-RS485_converter', 1500000)
time.sleep(0.1)
def set_the_num_of_servo():
global num
if rospy.has_param('num_of_servo'):
num = rospy.get_param('num_of_servo')
else:
rospy.logwarn(
"you haven't set ros parameter indicates the number of servos. Plsease command '$rosparam set /num_of_servo THE_NUMBER_OF_SERVOS'")
print("")
try:
if num < 0:
raise Exception()
except:
rospy.logerr("value error: the number of servos")
sys.exit(1)
return num
def set_servo_id():
global id
if rospy.has_param('~multi_servo_id'):
id = rospy.get_param('~multi_servo_id')
else:
rospy.logwarn(
"you haven't set ros parameter indicates the IDs of servos. Plsease command '$rosparam set /multi_servo_id [ID1,ID2,etc]]'")
try:
if id < 0:
raise Exception()
except:
rospy.logerr("value error: servo_id")
sys.exit(1)
return id
def callback_multi_velocity_control(multi_servo_command):
global num, id, initial_process_flag, voltage
# execute only single time
if initial_process_flag == 1:
num = set_the_num_of_servo()
id = set_servo_id()
for i in range(num):
Kondo_B3M.resetServo(id[i])
Kondo_B3M.enFreeServo(id[i])
Kondo_B3M.reset_encoder_total_count(id[i])
# mode : 00>positionCTRL, 04>velocityCTRL, 08>current(torque)CTRL, 12>feedforwardCTRL
Kondo_B3M.change_servocontrol_mode(id[i], 4)
print("")
rospy.logwarn("you are controlling [ " + str(num) + " ] servos whose IDs is : " + str(id) +
" with VELOCITY CONTROL MODE. If you want to change the number of servos or their IDs, abort this code and try again after execute <$ rosparam set /num_of_servo THE_NUMBER_OF_SERVOS> and <$ rosparam set /multi_servo_id [YOUR_ID#1, YOUR_ID#2 etc]> or change them via launch file")
initial_process_flag = 0
target_velocity = multi_servo_command.target_velocity
target_velocity = list(target_velocity)
for i in range(num):
Kondo_B3M.control_servo_by_Velocity(
id[i], target_velocity[i])
publish_servo_info()
def publish_servo_info():
global id, num, battery_voltage_warn_flag, battery_voltage_fatal_flag, voltage
multi_servo_info = Multi_servo_info()
for i in range(num):
multi_servo_info.encoder_count.append(
Kondo_B3M.get_encoder_total_count(id[i]))
multi_servo_info.input_voltage.append(
Kondo_B3M.get_servo_voltage(id[i]))
voltage = multi_servo_info.input_voltage
if voltage[i] < BATTERY_VOLTAGE_WARN and battery_voltage_warn_flag == 0:
print("")
rospy.logwarn('battery voltage is low !')
battery_voltage_warn_flag = 1
elif voltage[i] < BATTERY_VOLTAGE_FATAL and battery_voltage_fatal_flag == 0:
print("")
rospy.logfatal('battery voltage is fatally low !')
multi_servo_info.motor_velocity.append(
Kondo_B3M.get_servo_Velocity(id[i]))
multi_servo_info_pub.publish(multi_servo_info)
del multi_servo_info
def enfree_servo_after_node_ends(signal, frame):
global id
for i in range(num):
Kondo_B3M.enFreeServo(id[i])
sys.exit(0)
signal.signal(signal.SIGINT, enfree_servo_after_node_ends)
if __name__ == '__main__':
rospy.init_node('multi_torque_control')
multi_servo_info_pub = rospy.Publisher(
'multi_servo_info', Multi_servo_info, queue_size=1)
rospy.Subscriber('multi_servo_command', Multi_servo_command,
callback_multi_velocity_control, queue_size=1)
rospy.spin()
|
from task_allocation.allocation import Allocation
class RandomAllocation(Allocation):
def __init__(self, randomizer, agents, jobs):
super().__init__()
if len(agents) == len(jobs):
matching = randomizer.sample(jobs, k=len(agents))
else:
matching = randomizer.choices(jobs, k=len(agents))
self.allocation = {
agent: job
for agent, job in zip(agents, matching)
}
def get_allocation(self):
return self.allocation
|
from propara.data.proglobal_dataset_reader import ProGlobalDatasetReader
from allennlp.common.testing import AllenNlpTestCase
class TestDataReader(AllenNlpTestCase):
def test_read_from_file(self):
sc_reader = ProGlobalDatasetReader()
dataset = sc_reader.read('tests/fixtures/proglobal_toy_data.tsv')
instances = dataset
assert len(instances) == 20
# read first instance
fields = instances[0].fields
assert fields["tokens_list"].sequence_length() == fields["positions_list"].sequence_length()
tokens_list_fields = fields["tokens_list"].field_list
field0 = tokens_list_fields[0]
field0_tokens = [t.text for t in field0.tokens[0:10]]
correct_field0_tokens = ["when", "water", "freeze", "it", "become", "10", "%", "bigger", ",", "or"]
assert field0_tokens == correct_field0_tokens
before_loc_start_field = fields["before_loc_start"].sequence_index
before_loc_end_field = fields["before_loc_end"].sequence_index
assert before_loc_start_field == 0
assert before_loc_end_field == 0
after_loc_start_fields = fields["after_loc_start_list"].field_list
after_loc_end_fields = fields["after_loc_end_list"].field_list
after_loc_start_fields0 = after_loc_start_fields[0].sequence_index
after_loc_end_fields0 = after_loc_end_fields[0].sequence_index
assert after_loc_start_fields0 == 0
assert after_loc_end_fields0 == 0
before_category = fields["before_category"].sequence_index
assert before_category == 1
after_category_fields = fields["after_category_list"].field_list
after_category_fields0 = after_category_fields[0].sequence_index
assert after_category_fields0 == 1
before_category_mask = fields["before_category_mask"].sequence_index
assert before_category_mask == 0
after_category_mask_fields = fields["after_category_mask_list"].field_list
after_category_mask_fields0 = after_category_mask_fields[0].sequence_index
assert after_category_mask_fields0 == 0
|
#! /usr/bin/env python3
import sys
#1 DONE!!! Passed 9/10
def translate_sequence(rna_sequence, genetic_code):
pass
# Read and get the RNA string
rna = rna_sequence.upper()
print ("\n \n RNA String: ", rna)
x=len(rna)
if x < 3:
return ''
# RNA codon table(make sure you have it)
protein_string = ""
# Generate protein string
for i in range(0, len(rna),3):
if genetic_code[rna[i:i+3]] == "*" :
break
protein_string += genetic_code[rna[i:i+3]]
return protein_string
# return protein_string
# Print the protein string
print ("\n \n Protein String: ", protein_string)
# End
#2 Passed 2/4 All giving protein but not passing
def get_all_translations(rna_sequence, genetic_code):
pass
# Read and get the RNA string
DNA = rna_sequence.upper()
print ("\n \n RNA String1: ", DNA)
if (DNA.find('AUG') != -1):
pass
# print ("Contains given substring ")
else:
return []
# print ("Doesn't contains given substring")
start = DNA.find('AUG')
protein_string1 = ""
if start!= -1:
while start+2 < len(DNA):
codon = DNA[start:start+3]
if genetic_code[codon] == "*":
break
protein_string1 += genetic_code[codon]
return [protein_string1]
start+=3
# print ("\n \n Protein String1: ", protein_string1)
DNA2= DNA[1:]
print ("\n \n RNA2 String: ", DNA2)
if (DNA2.find('AUG') != -1):
pass
# print ("Contains given substring ")
else:
return []
# print ("Doesn't contains given substring")
start = DNA2.find('AUG')
protein_string2 = ""
if start!= -1:
while start+2 < len(DNA2):
codon = DNA2[start:start+3]
if genetic_code[codon] == "*":
break
protein_string2 += genetic_code[codon]
return [protein_string2]
start+=3
# print ("\n \n Protein String2: ", protein_string2)
DNA3= DNA[2:]
print ("\n \n RNA3 String: ", DNA3)
if (DNA3.find('AUG') != -1):
pass
# print ("Contains given substring ")
else:
return []
# print ("Doesn't contains given substring")
start = DNA3.find('AUG')
protein_string3 = ""
if start!= -1:
while start+2 < len(DNA3):
codon = DNA3[start:start+3]
if genetic_code[codon] == "*":
break
protein_string3 += genetic_code[codon]
return [protein_string3]
start+=3
# print ("\n \n Protein String3: ", protein_string3)
#3 DONE Passed All
def get_reverse(sequence):
pass
sequence = sequence.upper()
re = []
x = len(sequence)
for i in sequence:
x = x - 1
re.append(sequence[x])
return ''.join(re)
#4 DONE Passed All
def get_complement(sequence):
pass
sequence = sequence.upper()
com = []
for i in sequence:
if i == "U":
com.append("A")
if i == "A":
com.append("U")
if i == "G":
com.append("C")
if i == "C":
com.append("G")
return ''.join(com)
#5 DONE Passed All
def reverse_and_complement(sequence):
pass
sequence = sequence.upper()
re = []
x = len(sequence)
for i in sequence:
x = x - 1
re.append(sequence[x])
# return ''.join(re)
com = []
for i in re:
if i == "U":
com.append("A")
if i == "A":
com.append("U")
if i == "G":
com.append("C")
if i == "C":
com.append("G")
return ''.join(com)
#6
def get_longest_peptide(rna_sequence, genetic_code):
"""Get the longest peptide encoded by an RNA sequence.
Explore six reading frames of `rna_sequence` (the three reading frames of
`rna_sequence`, and the three reading frames of the reverse and complement
of `rna_sequence`) and return (as a string) the longest sequence of amino
acids that it encodes, according to the `genetic_code`.
If no amino acids can be translated from `rna_sequence` nor its reverse and
complement, an empty string is returned.
Parameters
----------
rna_sequence : str
A string representing an RNA sequence (upper or lower-case).
genetic_code : dict
A dictionary mapping all 64 codons (strings of three RNA bases) to
amino acids (string of single-letter amino acid abbreviation). Stop
codons should be represented with asterisks ('*').
Returns
-------
str
A string of the longest sequence of amino acids encoded by
`rna_sequence`.
"""
pass
# Read and get the RNA string
DNA = rna_sequence.upper()
print ("\n \n RNA String1: ", DNA)
if (DNA.find('AUG') != -1):
pass
else:
return ""
DNA2= DNA[1:]
# print ("\n \n RNA2 String: ", DNA2)
if (DNA2.find('AUG') != -1):
pass
else:
return ""
DNA3= DNA[2:]
# print ("\n \n RNA3 String: ", DNA3)
if (DNA3.find('AUG') != -1):
pass
else:
return ""
sequence = DNA[1:]
# Find orf 2
# Find all AUG indexs
start_position = 1
start_indexs = []
stop_indexs = []
for i in range(1, len(sequence), 3):
if sequence[i:i+3] == "AUG":
start_indexs.append(i)
# Find all stop codon indexs
for i in range(1, len(sequence), 3):
stops =["UAA", "UGA", "UAG"]
if sequence[i:i+3] in stops:
stop_indexs.append(i)
orf = []
mark = 0
for i in range(0,len(start_indexs)):
for j in range(0, len(stop_indexs)):
if start_indexs[i] < stop_indexs[j] and start_indexs[i] > mark:
orf.append(sequence[start_indexs[i]:stop_indexs[j]+3])
mark = stop_indexs[j]+3
break
# return orf
orf2 = orf
print(orf2)
pass
sequence = DNA[2:]
# Find all ATG indexs
start_position = 2
start_indexs = []
stop_indexs = []
for i in range(2, len(sequence), 3):
if sequence[i:i+3] == "AUG":
start_indexs.append(i)
# Find all stop codon indexs
for i in range(2, len(sequence), 3):
stops =["UAA", "UGA", "UAG"]
if sequence[i:i+3] in stops:
stop_indexs.append(i)
orf = []
mark = 0
start_position = {}
for i in range(0,len(start_indexs)):
for j in range(0, len(stop_indexs)):
if start_indexs[i] < stop_indexs[j] and start_indexs[i] > mark:
orf.append(sequence[start_indexs[i]:stop_indexs[j]+3])
start_position[len(sequence[start_indexs[i]:stop_indexs[j]+3])] = start_indexs[i]
mark = stop_indexs[j]+3
break
# return orf
orf3 = orf
print(orf3)
pass
sequence = DNA[0:]
start_position = 0
start_indexs = []
stop_indexs = []
for i in range(0, len(sequence), 3):
if sequence[i:i+3] == "AUG":
start_indexs.append(i)
# Find all stop codon indexs
for i in range(0, len(sequence), 3):
stops =["UAA", "UGA", "UAG"]
if sequence[i:i+3] in stops:
stop_indexs.append(i)
orf = []
mark = 0
start_position = {}
for i in range(0,len(start_indexs)):
for j in range(0, len(stop_indexs)):
if start_indexs[i] < stop_indexs[j] and start_indexs[i] > mark:
orf.append(sequence[start_indexs[i]:stop_indexs[j]+3])
start_position[len(sequence[start_indexs[i]:stop_indexs[j]+3])] = start_indexs[i]
mark = stop_indexs[j]+3
break
# return orf
orf1 = orf
print(orf1)
pass
print ("\n \n RNA1 String: ", orf2)
#list2str
orf2_str =''.join([str(elem) for elem in orf2])
print("\n orf2_str", orf2_str)
start = orf2_str.find('AUG')
# print(start)
protein_string = ""
if start!= -1:
while start+2 < len(orf2_str):
codon = orf2_str[start:start+3]
if genetic_code[codon] == "*":
break
protein_string += genetic_code[codon]
# return protein_string
# print ("\n \n Protein String: ", protein_string)
start+=3
orf2_protein=protein_string
print ("\n longest Forward peptide:", orf2_protein)
###Reverse sequence longest peptide
DNA = rna_sequence.upper()
sequence = DNA
re = []
x = len(sequence)
for i in sequence:
x = x - 1
re.append(sequence[x])
# return ''.join(re)
com = []
for i in re:
if i == "U":
com.append("A")
if i == "A":
com.append("U")
if i == "G":
com.append("C")
if i == "C":
com.append("G")
# return ''.join(com)
rDNA=''.join(com)
print ("\n \n RNA String_Reverse1: ", rDNA)
if (rDNA.find('AUG') != -1):
pass
else:
pass
# return ""
rDNA2= rDNA[1:]
print ("\n \n RNA String_reverse2: ", rDNA2)
if (rDNA2.find('AUG') != -1):
pass
else:
# return ""
pass
rDNA3= rDNA[2:]
print ("\n \n RNA3 String_reverse3: ", rDNA3)
if (rDNA3.find('AUG') != -1):
pass
else:
# return ""
pass
sequence = rDNA[1:]
# Find orf 2
# Find all AUG indexs
start_position = 1
start_indexs = []
stop_indexs = []
for i in range(1, len(sequence), 3):
if sequence[i:i+3] == "AUG":
start_indexs.append(i)
# Find all stop codon indexs
for i in range(1, len(sequence), 3):
stops =["UAA", "UGA", "UAG"]
if sequence[i:i+3] in stops:
stop_indexs.append(i)
orf = []
mark = 0
for i in range(0,len(start_indexs)):
for j in range(0, len(stop_indexs)):
if start_indexs[i] < stop_indexs[j] and start_indexs[i] > mark:
orf.append(sequence[start_indexs[i]:stop_indexs[j]+3])
mark = stop_indexs[j]+3
break
# return orf
reverse_orf2 = orf
print("\nORF2reverse: ",reverse_orf2)
pass
sequence = rDNA[2:]
# Find all ATG indexs
start_position = 2
start_indexs = []
stop_indexs = []
for i in range(2, len(sequence), 3):
if sequence[i:i+3] == "AUG":
start_indexs.append(i)
# Find all stop codon indexs
for i in range(2, len(sequence), 3):
stops =["UAA", "UGA", "UAG"]
if sequence[i:i+3] in stops:
stop_indexs.append(i)
orf = []
mark = 0
start_position = {}
for i in range(0,len(start_indexs)):
for j in range(0, len(stop_indexs)):
if start_indexs[i] < stop_indexs[j] and start_indexs[i] > mark:
orf.append(sequence[start_indexs[i]:stop_indexs[j]+3])
start_position[len(sequence[start_indexs[i]:stop_indexs[j]+3])] = start_indexs[i]
mark = stop_indexs[j]+3
break
# return orf
reverse_orf3 = orf
print("\nORF3reverse: ",reverse_orf3)
pass
sequence = rDNA[0:]
start_position = 0
start_indexs = []
stop_indexs = []
for i in range(0, len(sequence), 3):
if sequence[i:i+3] == "AUG":
start_indexs.append(i)
# Find all stop codon indexs
for i in range(0, len(sequence), 3):
stops =["UAA", "UGA", "UAG"]
if sequence[i:i+3] in stops:
stop_indexs.append(i)
orf = []
mark = 0
start_position = {}
for i in range(0,len(start_indexs)):
for j in range(0, len(stop_indexs)):
if start_indexs[i] < stop_indexs[j] and start_indexs[i] > mark:
orf.append(sequence[start_indexs[i]:stop_indexs[j]+3])
start_position[len(sequence[start_indexs[i]:stop_indexs[j]+3])] = start_indexs[i]
mark = stop_indexs[j]+3
break
# return orf
reverse_orf1 = orf
print("\nORF1reverse: ",reverse_orf1)
pass
# print ("\n \n RNA1 String_reverse: ", orf2)
#list2str
orf2_str =''.join([str(elem) for elem in orf2])
print("\n orf2_str", orf2_str)
start = orf2_str.find('AUG')
# print(start)
protein_string = ""
if start!= -1:
while start+2 < len(orf2_str):
codon = orf2_str[start:start+3]
if genetic_code[codon] == "*":
break
protein_string += genetic_code[codon]
# return protein_string
# print ("\n \n Protein String: ", protein_string)
start+=3
orf2_protein=protein_string
print ("\n longest Reverese peptide:", orf2_protein)
if __name__ == '__main__':
genetic_code = {'GUC': 'V', 'ACC': 'T', 'GUA': 'V', 'GUG': 'V', 'ACU': 'T', 'AAC': 'N', 'CCU': 'P', 'UGG': 'W', 'AGC': 'S', 'AUC': 'I', 'CAU': 'H', 'AAU': 'N', 'AGU': 'S', 'GUU': 'V', 'CAC': 'H', 'ACG': 'T', 'CCG': 'P', 'CCA': 'P', 'ACA': 'T', 'CCC': 'P', 'UGU': 'C', 'GGU': 'G', 'UCU': 'S', 'GCG': 'A', 'UGC': 'C', 'CAG': 'Q', 'GAU': 'D', 'UAU': 'Y', 'CGG': 'R', 'UCG': 'S', 'AGG': 'R', 'GGG': 'G', 'UCC': 'S', 'UCA': 'S', 'UAA': '*', 'GGA': 'G', 'UAC': 'Y', 'GAC': 'D', 'UAG': '*', 'AUA': 'I', 'GCA': 'A', 'CUU': 'L', 'GGC': 'G', 'AUG': 'M', 'CUG': 'L', 'GAG': 'E', 'CUC': 'L', 'AGA': 'R', 'CUA': 'L', 'GCC': 'A', 'AAA': 'K', 'AAG': 'K', 'CAA': 'Q', 'UUU': 'F', 'CGU': 'R', 'CGC': 'R', 'CGA': 'R', 'GCU': 'A', 'GAA': 'E', 'AUU': 'I', 'UUG': 'L', 'UUA': 'L', 'UGA': '*', 'UUC': 'F'}
rna_seq = ("AUG"
"UAC"
"UGG"
"CAC"
"GCU"
"ACU"
"GCU"
"CCA"
"UAU"
"ACU"
"CAC"
"CAG"
"AAU"
"AUC"
"AGU"
"ACA"
"GCG")
longest_peptide = get_longest_peptide(rna_sequence = rna_seq,
genetic_code = genetic_code)
assert isinstance(longest_peptide, str), "Oops: the longest peptide is {0}, not a string".format(longest_peptide)
message = "The longest peptide encoded by\n\t'{0}'\nis\n\t'{1}'\n".format(
rna_seq,
longest_peptide)
sys.stdout.write(message)
if longest_peptide == "MYWHATAPYTHQNISTA":
sys.stdout.write("Indeed.\n")
|
'''
test dssNet generation
'''
import pipe
import sys
import time
import logging
Gen_ID = sys.argv[1]
logging.basicConfig(filename='%s.log'%Gen_ID,level=logging.DEBUG)
pipeout=pipe.setup_pipe_l(Gen_ID)
pipin = pipe.setup_pipe_w()
# send to control center
def send_netCoord(val):
update = 'update n p controllable_generator post_controllable_generator %s %s 1 %s\n' %(time.time(),Gen_ID, val)
pipe.send_sync_event(update.encode('UTF-8'), pipin)
while 1:
time.sleep(1)
send_netCoord(1.5)
|
from lxml.etree import HTML as LXHTML
from lxml.etree import XML as LXML
from xdict.jprint import pdir,pobj
from nvhtml import txt
from nvhtml import lvsrch
from nvhtml import fs
from nvhtml import engine
from nvhtml import utils
from nvhtml.consts import *
import lxml.sax
import argparse
from efdir import fs
import elist.elist as elel
import edict.edict as eded
import estring.estring as eses
import spaint.spaint as spaint
#import qtable.qtable as qtb
#from pandas.io import sql
#import sqlite3
import math
import copy
import re
def get_ansi_colors_vl():
ANSI_COLORS_VL = elel.init_range(1,231,1)
ANSI_COLORS_VL.remove(15)
ANSI_COLORS_VL.remove(16)
# vl_odds = elel.select_odds(ANSI_COLORS_VL)
# vl_evens = elel.select_evens(ANSI_COLORS_VL)
# vl_evens.reverse()
# ANSI_COLORS_VL = vl_odds + vl_evens
return(ANSI_COLORS_VL)
ANSI_COLORS_VL = get_ansi_colors_vl()
TAGS,_ = eded.d2kvlist(TAG_DESCS)
TAGS.append('<comment>')
TAGS.append('svg')
TAG_COLOR_MD = eded.kvlist2d(TAGS,ANSI_COLORS_VL)
def modi_children_s0(ele):
ele['children'] = []
return(ele)
def del_unecessary(ele):
ele = eded.sub_algo(ele,['breadth','depth','pbreadth','samepl_sibseq','samepl_breadth','tag','sibseq','width','children'])
return(ele)
def scan_s0(mat):
mat = elel.mat_mapv(mat,modi_children_s0)
mat = elel.mat_mapv(mat,del_unecessary)
mat = engine.fill_children_attr(mat)
return(mat)
# 先不调整宽度
# display_mat
def parr(arr,*args):
args = list(args)
if(args.__len__()==0):
pass
else:
try:
arr = elel.mat_mapv(arr,lambda ele:eded.sub_algo(ele,args))
except:
arr = elel.mapv(arr,lambda ele:eded.sub_algo(ele,args))
elel.for_each(arr,print)
def creat_display_mat(mat):
display_mat = [[{"loc":(0,0),"empty":False}]]
depth = len(mat)
for i in range(0,depth-1):
layer = display_mat[i]
breadth = len(layer)
next_display_layer = []
for j in range(breadth):
ele = layer[j]
loc = ele["loc"]
pointed_ele = mat[loc[0]][loc[1]]
children_locs = pointed_ele['children']
if(len(children_locs) == 0): # if the pointed mat-ele have no chilren
next_display_layer.append({"loc":ele['loc'],"empty":True})
else:
disp = elel.mapv(children_locs,lambda loc:{"loc":loc,"empty":False,})
next_display_layer.extend(disp)
display_mat.append(next_display_layer)
return(display_mat)
####################################################################################################
####################################################################################################
def modi_width_s0(ele):
'''
初始化宽度,左右各一个空格
'''
width = len(ele['tag'])
width = width + 2
ele['width'] = width
return(ele)
####################################################################################################
####################################################################################################
def sum_children_width(children):
lngth = children.__len__()
width = 0
for i in range(lngth):
child = children[i]
width = width + child['width'] + 1
width = width - 1
return(width)
def get_children_eles(ele,mat):
children_locs = ele['children']
children_eles = elel.mapv(children_locs,lambda child_loc:mat[child_loc[0]][child_loc[1]])
return(children_eles)
# from bottom to top 保证所有父元素大于子元素宽度之和
def modi_width_s1(ele,mat):
'''
from bottom to top 保证所有父元素大于子元素宽度之和
'''
children_eles = get_children_eles(ele,mat)
children_eles = elel.sortDictList(children_eles,cond_keys=['width'])
width = ele['width']
lngth = children_eles.__len__()
if(lngth == 0):
pass
else:
children_width_sum = sum_children_width(children_eles)
if(width < children_width_sum):
ele['width'] = children_width_sum
else:
pass
return(ele)
def scan_s1(mat):
depth = len(mat)
for i in range(depth-1,-1,-1):
layer = mat[i]
breadth = len(layer)
for j in range(breadth):
ele = layer[j]
ele = modi_width_s1(ele,mat)
mat[i][j] = ele
return(mat)
####################################################################################################
####################################################################################################
# from top to bottom 保证所有 子元素宽度之和 等于 父元素宽度
def modi_width_s2(ele,mat):
children_eles = get_children_eles(ele,mat)
children_eles = elel.sortDictList(children_eles,cond_keys=['width'])
width = ele['width']
lngth = children_eles.__len__()
if(lngth == 0):
pass
else:
children_width_sum = sum_children_width(children_eles)
if(width > children_width_sum):
lefted = ele['width']
q = ele['width'] // lngth
c = 0
for i in range(lngth):
child_width = children_eles[i]['width']
lefted = lefted - child_width
if(child_width>=q):
pass
else:
c = c + 1
c1 = 0
r1 = lefted % c
q1 = lefted // c
for i in range(lngth):
child_width = children_eles[i]['width']
if(child_width>q):
pass
else:
children_eles[i]['width'] = children_eles[i]['width'] + q1
if(c1<r1):
children_eles[i]['width'] = children_eles[i]['width'] + 1
else:
pass
c1 = c1 + 1
else:
pass
return(ele)
def scan_s2(mat):
depth = len(mat)
for i in range(0,depth-1):
layer = mat[i]
breadth = len(layer)
for j in range(breadth):
ele = layer[j]
ele = modi_width_s2(ele,mat)
mat[i][j] = ele
return(mat)
###############################################
def modi_display_str_s0(ele):
tag_lngth = len(ele['tag'])
width = ele['width']
lefted = width - tag_lngth
pre = math.floor(lefted /2)
post = math.ceil(lefted /2)
s = " "*pre + ele['tag'] + " "*post
ele['display_str'] = s
try:
ele['display_color'] = TAG_COLOR_MD[ele['tag']]
except:
ele['display_color'] = 15
else:
pass
return(ele)
def scan_s3(mat):
depth = len(mat)
for i in range(0,depth):
layer = mat[i]
breadth = len(layer)
for j in range(breadth):
ele = layer[j]
ele = modi_display_str_s0(ele)
mat[i][j] = ele
return(mat)
#
def scan_disp_mat_s0(disp_mat,mat,color_enable=False):
depth = len(disp_mat)
for i in range(0,depth):
layer = disp_mat[i]
breadth = len(layer)
for j in range(breadth):
ele = disp_mat[i][j]
loc = ele['loc']
s = mat[loc[0]][loc[1]]["display_str"]
color = mat[loc[0]][loc[1]]["display_color"]
if(ele['empty']):
s = " "*len(s)
ele = s
else:
ele = s
if(color_enable):
disp_mat[i][j]= spaint.slpaint(ele,color,rtrn=True)
else:
disp_mat[i][j]= ele
return(disp_mat)
#######################3
####################################################
def get_orig_length(line):
line = re.sub("\x1b\[.*?0m","",line)
return(len(line))
def show_tag_tb(disp_mat):
s = ""
layer = disp_mat[0]
line = "|" + elel.join(layer,"|") + "|"
orig_length = get_orig_length(line)
boundary = "-" * orig_length
print(boundary)
print(line)
print(boundary)
s = s + boundary +"\n" + line +"\n" + boundary +"\n"
depth = len(disp_mat)
for i in range(1,depth):
layer = disp_mat[i]
breadth = len(layer)
line = "|" + elel.join(layer,"|") + "|"
print(line)
print(boundary)
s = s + line +"\n"+ boundary + "\n"
return(s)
|
from django import forms
INTERVAL_CHOICES = [(k, k) for k in ('minutes', 'hours', 'days', 'weeks',
'months', 'years')]
class StatsFilterForm(forms.Form):
"""Form for filtering the statistics shown in the admin interface ."""
start = forms.DateTimeField()
end = forms.DateTimeField()
interval = forms.ChoiceField(choices=INTERVAL_CHOICES)
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import jax.numpy as np
def _xyz_vec2str(x):
"""Convert a vector to string.
"""
return "\t".join([str(i) for i in x])
def write_xyz(filename, *args):
"""Write arrays to xyz file format.
Args:
filename: Output filename.
args: Arguments can be 1D or 2D arrays where length (along axis=0) is equal to number of atoms.
Examples:
write_xyz('minimize.xyz', R)
write_xyz('minimize.xyz', species, R)
write_xyz('minimize.xyz', species, R, velocities, forces)
"""
vars = [arg[:, np.newaxis] if len(arg.shape)==1 else arg for arg in args]
vars = np.hstack(vars)
with open(filename, "w+") as f:
N = len(vars)
str_ = f"{N}" + "\n\n"
f.write(str_)
for j in range(N):
str_ = f"{j+1}\t" + _xyz_vec2str(vars[j, :]) + "\n"
f.write(str_)
|
"""
ddupdate plugin updating data on changeip.com.
See: ddupdate(8)
See:
http://www.changeip.com/accounts/knowledgebase.php?action=displayarticle&id=34
"""
from ddupdate.ddplugin import ServicePlugin, ServiceError
from ddupdate.ddplugin import http_basic_auth_setup, get_response
class ChangeAddressPlugin(ServicePlugin):
"""
Update a dns entry on changeip.com.
Supports using most address plugins including default-web-ip, default-if
and ip-disabled. ipv6 addresses are not supported.
Free accounts has limitations both to number of hosts and that unused
host are expired. See the website for more.
netrc: Use a line like
machine nic.ChangeIP.com login <username> password <password>
Options:
none
"""
_name = 'changeip.com'
_oneliner = 'Updates on http://changeip.com/'
_url = "https://nic.ChangeIP.com/nic/update?&hostname={0}"
def register(self, log, hostname, ip, options):
"""Implement ServicePlugin.register."""
url = self._url.format(hostname)
if ip:
url += "&ip=" + ip.v4
http_basic_auth_setup(url)
html = get_response(log, url)
if 'uccessful' not in html:
raise ServiceError("Bad update reply: " + html)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.