content stringlengths 5 1.05M |
|---|
# -*- coding: utf-8 -*-
""" An example *.py file.
Some examples of coding in a *.py file as opposed to a Jupyter notebook file.
These examples are meant to be viewed in the Spyder IDE.
The concept of cells as used below is specific to Spyder.
"""
#%% This is a cell similar to a Jupyter notebook cell.
# You can run this cell in the Spyder IDE in the same way (Shift-Enter) when it is selected.
# The end of the cell is determined by the start of the next cell.
favcolor = "green"
favshape = "triangle"
""" After running this cell check out the variable explorer tab.
"""
#%%
print('hi') # The output should show up in the console tab.
#%% A simple function.
def sayhi(name):
""" Say hi to someone. """
print(f"Hi, {name}")
#%% Let's use the function above.
sayhi("Sam")
#%% Create an array and check it out in the Variables tab.
import numpy as np # typically all imports at top
x = np.random.random((5,5))
""" Double-click it in the Variables tab to get a nice view of it.
"""
#%% Plot the middle row of x.
import matplotlib.pyplot as plt
plt.plot(np.arange(5), x[2], '-o')
plt.xlabel("Column #")
plt.ylabel("x")
|
#-*- coding:utf-8 -*-
import threading
import bisect
import random
import logging
import sched
import time
try:
import queue
except ImportError:
import Queue as queue
from .base import BaseObject,EMPTY_LIST,DAY_FINALIZE_TICK
from .utils import now2next,TList
from .macro_command_queue import mcq_stub
class SyncLastMap(object):
'''
多线程访问的合约最后更新时间
'''
def __init__(self):
self._lock = threading.Lock()
self._last_map = {}
def reset(self,contract_names):
with self._lock:
self._last_map = dict([(cid,BaseObject(time='',msec='',volume=0)) for cid in contract_names])
def check(self,cid,dtime,dmsec,dvolume):
assert cid in self._last_map,'contract %s 不在 订阅的合约列表中,dtime=%s,订阅合约=%s' % (cid,dtime,self._last_map.keys())
with self._lock:
dlast = self._last_map[cid]
if dvolume <= dlast.volume and (dtime < dlast.time or (dtime == dlast.time and dmsec <= dlast.msec)):
#不能仅靠时间.因为夜盘存在, 而当日夜盘归属下一日,所以晚的时间在前面,白天的time反而在后面发生
#不仅仅依靠volume,是为了记录volume未变但是买卖盘改变的tick
return False
else:
dlast.volume = dvolume
dlast.time = dtime
dlast.msec = dmsec
return True
class Controller(object):
'''
所有事项均在线程内处理
管理诸多Agent
并且处理换日
原则上1个Environ只能有一个Controller, 这样ticks数据在append时就不会出现竞争和出现重复(因为contract实际上只有一份)
或者通过contracts的垂直切分,多个controller也不会互相竞争
本类是线程内版本, 可用于backtesting
子类TController是单独线程的队列版本, 用于实盘.
两者共享大部分代码
'''
def __init__(self,mc_queue=mcq_stub):
'''
command_queue: 宏命令队列,用于驱动
last_map放置在这里的原因是 一个controller可以对应多个listener, 而last_map必须在这些个listener中共享
至于为何不把new_tick的所有判断都放在controller中,是考虑到一旦判断为新的tick,其从CTP格式到标准格式的转换完全是平台相关的,不应在这个层面出现
调用顺序:
ctl = Controller()
...
ctl.register_agent(..)
...
ctl.add_listener(..)
...
ctl.reset()
...
'''
self._cur = BaseObject(date=0,time=0,msec=0)
self._lock = threading.Lock()
self._contract2agents = {}
self._contracts = {}
self._last_map = SyncLastMap()
self._agents = set()
self._listeners = set()
self._mc_queue = mc_queue
self._lock = threading.Lock()
#self.time_trigger = TimeTrigger(save_point,self.day_finalize,save_interval)
@property
def agents(self):
return self._agents
@property
def contracts(self):
return self._contracts.values()
def add_listener(self,listener):
''' 非常重要,必须在初始化后马上设定
在日结时须更新其监听的合约
'''
self._listeners.add(listener)
def register_agent(self,agent):
'''
仅用于动态新增Agent
'''
self._agents.add(agent)
self._register_contract(agent)
def reset(self):
self._reset_contracts()
self._update_listened_contracts()
self._last_map.reset(self._contract2agents.keys())
def check_last(self,cid,dtime,dmesc,dvolume):
'''
被listener调用
'''
return self._last_map.check(cid,dtime,dmesc,dvolume)
def new_tick(self,ctick):
'''
自行同步
对于线程化后队列方式,则不需要这样同步. 队列本身就是同步装置
'''
#print("in controller new_tick")
with self._lock:
self._new_tick(ctick)
def _new_tick(self,ctick): #
#在ticks中添加tick.
# 对于本类,因为是线程内操作,故也是同步的. 虽然有竞争风险(因为使用了多个接收线程, 但通过SyncLastMap能解决绝大部分问题)
# 对于TController,因为在另一线程中通过队列操作,故已经同步了,不需要再加锁
self._contracts[ctick.instrument].ticks.append(ctick) #必须在这里添加, 对TController此处是已同步点
#print(ctick.instrument,ctick.time)
#驱动macroCommand队列
if self._check_next(ctick):
#print("before trigger mc_queue")
self._mc_queue.trigger(ctick.time)
#print("after trigger mc_queue")
##驱动数据处理
#print('TC:',time.time()-ctick.create_time)
for agent in self._contract2agents.get(ctick.instrument,EMPTY_LIST):
try:
#print("new_tick arrived:",ctick.instrument,ctick.time,agent.name)
agent.new_tick(ctick)
#print("new_tick handled:",ctick.instrument,ctick.time,agent.name)
except Exception as einst:
logging.error('agent new_tick handler exception:%s' % (str(einst),))
logging.exception('agent new_tick handler exception') #打印堆栈
#print('in controller,%d' % (ctick.time,))
def day_finalize(self):
''' 需要被外部调用以完成日结. 不自己搞定是为了模拟一致性
'''
self._day_finalize()
def _day_finalize(self):
'''
同步后调用
'''
logging.info('day_finalizing.....')
for contract in self._contracts.values():
contract.ticks.append(DAY_FINALIZE_TICK)
#print("dayfinalize1:",contract.name,len(contract.ticks))
self._cur.date = self._cur.time = self._cur.msec = -1
for agent in self._agents:
agent.day_finalize()
self._agents = [agent for agent in self._agents if agent.is_active] #不再监听不活跃的agent
for contract in self._contracts.values():
#print("dayfinalize2:",contract.name,len(contract.ticks))
contract.ticks.remove_all()
#print("dayfinalize3:",contract.name,len(contract.ticks))
self.reset()
logging.info('day_finalized')
def _check_next2(self,ctick):
'''
@deprecated, 不需要lock
判断当前tick是否已经切换到下一tick
因为存在多合约,所以可能多次进入的是同一tick的不同合约的数据
'''
with self._lock:
if ctick.time > self._cur.time or (ctick.time == self._cur.time and ctick.msec > self._cur.msec):
self._cur.date = ctick.date
self._cur.time = ctick.time
self._cur.msec = ctick.msec
return True
return False
def _check_next(self,ctick):
'''
判断当前tick是否已经切换到下一tick
因为存在多合约,所以可能多次进入的是同一tick的不同合约的数据
'''
if ctick.time > self._cur.time or (ctick.time == self._cur.time and ctick.msec > self._cur.msec):
self._cur.date = ctick.date
self._cur.time = ctick.time
self._cur.msec = ctick.msec
return True
return False
def _update_listened_contracts(self):
contracts = self._contract2agents.keys()
#print(contracts)
logging.info('listened contracts:%s',str(contracts))
for listener in self._listeners:
listener.update_instruments(contracts)
def _reset_contracts(self):
self._contract2agents.clear()
#self._contracts.clear()
for agent in self._agents:
self._register_contract(agent)
def _register_contract(self,agent):
c2as = self._contract2agents
#print(agent.contracts)
for contract in agent.contracts:
#print("contract name:",contract.name,agent.name)
c2as.setdefault(contract.name,[]).append(agent)
if contract not in self._contracts:
self._contracts[contract.name] = contract
#print(c2as)
class TController(Controller,threading.Thread):
"""
单独线程处理版本, 通过队列之后, 线程顺序处理到来的tick. 即内部的tick处理是线程安全的
"""
def __init__(self,mc_queue=mcq_stub):
Controller.__init__(self,mc_queue)
threading.Thread.__init__(self)
self._queue = queue.Queue()
#self.stwaste = 0
#self.nt = 0
def new_tick(self,ctick):
self._queue.put(ctick)
def day_finalize(self):
super(TController, self).day_finalize()
def _new_tick(self,ctick):
#twaste = time.time() - ctick.create_time
#self.stwaste += twaste
#self.nt += 1
#print(twaste,self.stwaste/self.nt,self.queue.qsize())
#print(ctick.instrument,ctick.time)
Controller._new_tick(self,ctick)
def run(self):
while 1:
ctick = self._queue.get()
if ctick.price > 0:
self._new_tick(ctick)
else:
self._day_finalize()
"""
class NullCommandManager(object):
def next_round(self):
pass
class ResumableController(Controller):
'''
用于交易agent,需要断点恢复
TODO
'''
def __init__(self,actor=NullCommandManager()):
Controller.__init__(self,actor)
"""
class Scheduler(object):
'''
仅被SaveAgent使用
定时驱动
使用sched实现
目前已经转为使用MacroCommandQueue
'''
def __init__(self,tpoint,target,tround=24*60*60):
'''
tpoint为HHMMSS,其中hour为24小时计时
tround为第一次触发后,后续触发的时间间隔
'''
self._tpoint = tpoint
self._target = target
self._tround = tround
self._scheduler = sched.scheduler(time.time,time.sleep)
self._thread = threading.Thread(target=self._start)
self._thread.daemon = True
self._thread.start()
def _start(self):
print('scheduler starting...')
seconds = now2next(self._tpoint//10000,self._tpoint%10000//100,self._tpoint%100)
logging.info('start time trigger %s' % (seconds,))
print(seconds)
self._scheduler.enter(seconds,1,self._func,())
self._scheduler.run()
def _func(self):
logging.info('in time trigger....')
self._scheduler.enter(self._tround,1,self._func,())
print('next trigger:',self._tround)
self._target()
logging.info('time trigger end,next trig:%s' % (self._tround,))
#CS = BaseObject(REDAY=100,EXECUTING=101,SUCCESS=200,CS_CANCELD=300,FAILED=400)
|
"""
Color Directives
There are many different way to specify color; we support all of the color formats below and will convert between the different color formats.
"""
from math import atan2, cos, exp, pi, radians, sin, sqrt
from mathics.builtin.colors.color_internals import convert_color
from mathics.builtin.base import (
Builtin,
BoxConstructError,
)
from mathics.builtin.drawing.graphics_internals import _GraphicsDirective, get_class
from mathics.core.element import ImmutableValueMixin
from mathics.core.expression import Expression, to_expression
from mathics.core.atoms import (
Integer,
Real,
MachineReal,
String,
from_python,
)
from mathics.core.symbols import Symbol, SymbolList
from mathics.core.systemsymbols import SymbolApply
from mathics.core.number import machine_epsilon
SymbolOpacity = Symbol("Opacity")
def _cie2000_distance(lab1, lab2):
# reference: https://en.wikipedia.org/wiki/Color_difference#CIEDE2000
e = machine_epsilon
kL = kC = kH = 1 # common values
L1, L2 = lab1[0], lab2[0]
a1, a2 = lab1[1], lab2[1]
b1, b2 = lab1[2], lab2[2]
dL = L2 - L1
Lm = (L1 + L2) / 2
C1 = sqrt(a1 ** 2 + b1 ** 2)
C2 = sqrt(a2 ** 2 + b2 ** 2)
Cm = (C1 + C2) / 2
a1 = a1 * (1 + (1 - sqrt(Cm ** 7 / (Cm ** 7 + 25 ** 7))) / 2)
a2 = a2 * (1 + (1 - sqrt(Cm ** 7 / (Cm ** 7 + 25 ** 7))) / 2)
C1 = sqrt(a1 ** 2 + b1 ** 2)
C2 = sqrt(a2 ** 2 + b2 ** 2)
Cm = (C1 + C2) / 2
dC = C2 - C1
h1 = (180 * atan2(b1, a1 + e)) / pi % 360
h2 = (180 * atan2(b2, a2 + e)) / pi % 360
if abs(h2 - h1) <= 180:
dh = h2 - h1
elif abs(h2 - h1) > 180 and h2 <= h1:
dh = h2 - h1 + 360
elif abs(h2 - h1) > 180 and h2 > h1:
dh = h2 - h1 - 360
dH = 2 * sqrt(C1 * C2) * sin(radians(dh) / 2)
Hm = (h1 + h2) / 2 if abs(h2 - h1) <= 180 else (h1 + h2 + 360) / 2
T = (
1
- 0.17 * cos(radians(Hm - 30))
+ 0.24 * cos(radians(2 * Hm))
+ 0.32 * cos(radians(3 * Hm + 6))
- 0.2 * cos(radians(4 * Hm - 63))
)
SL = 1 + (0.015 * (Lm - 50) ** 2) / sqrt(20 + (Lm - 50) ** 2)
SC = 1 + 0.045 * Cm
SH = 1 + 0.015 * Cm * T
rT = (
-2
* sqrt(Cm ** 7 / (Cm ** 7 + 25 ** 7))
* sin(radians(60 * exp(-((Hm - 275) ** 2 / 25 ** 2))))
)
return sqrt(
(dL / (SL * kL)) ** 2
+ (dC / (SC * kC)) ** 2
+ (dH / (SH * kH)) ** 2
+ rT * (dC / (SC * kC)) * (dH / (SH * kH))
)
def _CMC_distance(lab1, lab2, l, c):
# reference https://en.wikipedia.org/wiki/Color_difference#CMC_l:c_.281984.29
L1, L2 = lab1[0], lab2[0]
a1, a2 = lab1[1], lab2[1]
b1, b2 = lab1[2], lab2[2]
dL, da, db = L2 - L1, a2 - a1, b2 - b1
e = machine_epsilon
C1 = sqrt(a1 ** 2 + b1 ** 2)
C2 = sqrt(a2 ** 2 + b2 ** 2)
h1 = (180 * atan2(b1, a1 + e)) / pi % 360
dC = C2 - C1
dH2 = da ** 2 + db ** 2 - dC ** 2
F = C1 ** 2 / sqrt(C1 ** 4 + 1900)
T = (
0.56 + abs(0.2 * cos(radians(h1 + 168)))
if (164 <= h1 and h1 <= 345)
else 0.36 + abs(0.4 * cos(radians(h1 + 35)))
)
SL = 0.511 if L1 < 16 else (0.040975 * L1) / (1 + 0.01765 * L1)
SC = (0.0638 * C1) / (1 + 0.0131 * C1) + 0.638
SH = SC * (F * T + 1 - F)
return sqrt((dL / (l * SL)) ** 2 + (dC / (c * SC)) ** 2 + dH2 / SH ** 2)
def _component_distance(a, b, i):
return abs(a[i] - b[i])
def _euclidean_distance(a, b):
return sqrt(sum((x1 - x2) * (x1 - x2) for x1, x2 in zip(a, b)))
class Opacity(_GraphicsDirective):
"""
<dl>
<dt>'Opacity[$level$]'
<dd> is a graphics directive that sets the opacity to $level$.
</dl>
>> Graphics[{Blue, Disk[{.5, 1}, 1], Opacity[.4], Red, Disk[], Opacity[.2], Green, Disk[{-.5, 1}, 1]}]
= -Graphics-
>> Graphics3D[{Blue, Sphere[], Opacity[.4], Red, Cuboid[]}]
= -Graphics3D-
Notice that 'Opacity' does not overwrite the value of the alpha channel if it is set in a color directive:
>> Graphics[{Blue, Disk[], RGBColor[1,0,0,1],Opacity[.2], Rectangle[{0,0},{1,1}]}]
= -Graphics-
"""
def init(self, item=None, *args, **kwargs):
if isinstance(item, (int, float)):
item = Expression(SymbolOpacity, MachineReal(item))
super(Opacity, self).init(None, item)
self.opacity = item.leaves[0].to_python()
def to_css(self):
try:
if 0.0 <= self.opacity <= 1.0:
return self.opacity
except:
pass
return None
@staticmethod
def create_as_style(klass, graphics, item):
return klass(item)
class _ColorObject(_GraphicsDirective, ImmutableValueMixin):
formats = {
# we are adding ImageSizeMultipliers in the rule below, because we do _not_ want color boxes to
# diminish in size when they appear in lists or rows. we only want the display of colors this
# way in the notebook, so we restrict the rule to StandardForm.
(
("StandardForm",),
"%(name)s[x__?(NumericQ[#] && 0 <= # <= 1&)]",
): "Style[Graphics[{EdgeForm[Black], %(name)s[x], Rectangle[]}, ImageSize -> 16], "
+ "ImageSizeMultipliers -> {1, 1}]"
}
rules = {"%(name)s[x_List]": "Apply[%(name)s, x]"}
components_sizes = []
default_components = []
def init(self, item=None, components=None):
super(_ColorObject, self).init(None, item)
if item is not None:
elements = item.elements
if len(elements) in self.components_sizes:
# we must not clip here; we copy the components, without clipping,
# e.g. RGBColor[-1, 0, 0] stays RGBColor[-1, 0, 0]. this is especially
# important for color spaces like LAB that have negative components.
components = [value.round_to_float() for value in elements]
if None in components:
raise ColorError
# the following lines always extend to the maximum available
# default_components, so RGBColor[0, 0, 0] will _always_
# become RGBColor[0, 0, 0, 1]. does not seem the right thing
# to do in this general context. poke1024
if len(components) < 3:
components.extend(self.default_components[len(components) :])
self.components = components
else:
raise ColorError
elif components is not None:
self.components = components
@staticmethod
def create(expr):
head = expr.get_head()
cls = get_class(head)
if cls is None:
raise ColorError
return cls(expr)
@staticmethod
def create_as_style(klass, graphics, item):
return klass(item)
def to_css(self):
rgba = self.to_rgba()
alpha = rgba[3] if len(rgba) > 3 else None
return (
r"rgb(%f%%, %f%%, %f%%)" % (rgba[0] * 100, rgba[1] * 100, rgba[2] * 100),
alpha,
)
def to_js(self):
return self.to_rgba()
def to_expr(self):
return to_expression(self.get_name(), *self.components)
def to_rgba(self):
return self.to_color_space("RGB")
def to_color_space(self, color_space):
components = convert_color(self.components, self.color_space, color_space)
if components is None:
raise ValueError(
"cannot convert from color space %s to %s."
% (self.color_space, color_space)
)
return components
class CMYKColor(_ColorObject):
"""
<dl>
<dt>'CMYKColor[$c$, $m$, $y$, $k$]'
<dd>represents a color with the specified cyan, magenta,
yellow and black components.
</dl>
>> Graphics[MapIndexed[{CMYKColor @@ #1, Disk[2*#2 ~Join~ {0}]} &, IdentityMatrix[4]], ImageSize->Small]
= -Graphics-
"""
color_space = "CMYK"
components_sizes = [3, 4, 5]
default_components = [0, 0, 0, 0, 1]
class ColorDistance(Builtin):
"""
<dl>
<dt>'ColorDistance[$c1$, $c2$]'
<dd>returns a measure of color distance between the colors $c1$ and $c2$.
<dt>'ColorDistance[$list$, $c2$]'
<dd>returns a list of color distances between the colors in $list$ and $c2$.
</dl>
The option DistanceFunction specifies the method used to measure the color
distance. Available options are:
<ul>
<li>CIE76: Euclidean distance in the LABColor space
<li>CIE94: Euclidean distance in the LCHColor space
<li>CIE2000 or CIEDE2000: CIE94 distance with corrections
<li>CMC: Color Measurement Committee metric (1984)
<li>DeltaL: difference in the L component of LCHColor
<li>DeltaC: difference in the C component of LCHColor
<li>DeltaH: difference in the H component of LCHColor
</ul>
It is also possible to specify a custom distance.
>> ColorDistance[Magenta, Green]
= 2.2507
>> ColorDistance[{Red, Blue}, {Green, Yellow}, DistanceFunction -> {"CMC", "Perceptibility"}]
= {1.0495, 1.27455}
#> ColorDistance[Blue, Red, DistanceFunction -> "CIE2000"]
= 0.557976
#> ColorDistance[Red, Black, DistanceFunction -> (Abs[#1[[1]] - #2[[1]]] &)]
= 0.542917
"""
summary_text = "distance between two colors"
options = {"DistanceFunction": "Automatic"}
requires = ("numpy",)
messages = {
"invdist": "`1` is not Automatic or a valid distance specification.",
"invarg": "`1` and `2` should be two colors or a color and a lists of colors or "
+ "two lists of colors of the same length.",
}
# If numpy is not installed, 100 * c1.to_color_space returns
# a list of 100 x 3 elements, instead of doing elementwise multiplication
requires = ("numpy",)
# the docs say LABColor's colorspace corresponds to the CIE 1976 L^* a^* b^* color space
# with {l,a,b}={L^*,a^*,b^*}/100. Corrections factors are put accordingly.
_distances = {
"CIE76": lambda c1, c2: _euclidean_distance(
c1.to_color_space("LAB")[:3], c2.to_color_space("LAB")[:3]
),
"CIE94": lambda c1, c2: _euclidean_distance(
c1.to_color_space("LCH")[:3], c2.to_color_space("LCH")[:3]
),
"CIE2000": lambda c1, c2: _cie2000_distance(
100 * c1.to_color_space("LAB")[:3], 100 * c2.to_color_space("LAB")[:3]
)
/ 100,
"CIEDE2000": lambda c1, c2: _cie2000_distance(
100 * c1.to_color_space("LAB")[:3], 100 * c2.to_color_space("LAB")[:3]
)
/ 100,
"DeltaL": lambda c1, c2: _component_distance(
c1.to_color_space("LCH"), c2.to_color_space("LCH"), 0
),
"DeltaC": lambda c1, c2: _component_distance(
c1.to_color_space("LCH"), c2.to_color_space("LCH"), 1
),
"DeltaH": lambda c1, c2: _component_distance(
c1.to_color_space("LCH"), c2.to_color_space("LCH"), 2
),
"CMC": lambda c1, c2: _CMC_distance(
100 * c1.to_color_space("LAB")[:3], 100 * c2.to_color_space("LAB")[:3], 1, 1
)
/ 100,
}
def apply(self, c1, c2, evaluation, options):
"ColorDistance[c1_, c2_, OptionsPattern[ColorDistance]]"
distance_function = options.get("System`DistanceFunction")
compute = None
if isinstance(distance_function, String):
compute = ColorDistance._distances.get(distance_function.get_string_value())
if not compute:
evaluation.message("ColorDistance", "invdist", distance_function)
return
elif distance_function.has_form("List", 2):
if distance_function.elements[0].get_string_value() == "CMC":
if distance_function.elements[1].get_string_value() == "Acceptability":
compute = (
lambda c1, c2: _CMC_distance(
100 * c1.to_color_space("LAB")[:3],
100 * c2.to_color_space("LAB")[:3],
2,
1,
)
/ 100
)
elif (
distance_function.elements[1].get_string_value() == "Perceptibility"
):
compute = ColorDistance._distances.get("CMC")
elif distance_function.elements[1].has_form("List", 2):
if isinstance(
distance_function.elements[1].elements[0], Integer
) and isinstance(
distance_function.elements[1].elements[1], Integer
):
if (
distance_function.elements[1].elements[0].get_int_value()
> 0
and distance_function.elements[1]
.elements[1]
.get_int_value()
> 0
):
lightness = (
distance_function.elements[1]
.elements[0]
.get_int_value()
)
chroma = (
distance_function.elements[1]
.elements[1]
.get_int_value()
)
compute = (
lambda c1, c2: _CMC_distance(
100 * c1.to_color_space("LAB")[:3],
100 * c2.to_color_space("LAB")[:3],
lightness,
chroma,
)
/ 100
)
elif (
isinstance(distance_function, Symbol)
and distance_function.get_name() == "System`Automatic"
):
compute = ColorDistance._distances.get("CIE76")
else:
def compute(a, b):
return Expression(
SymbolApply,
distance_function,
Expression(
SymbolList,
Expression(
SymbolList, *[Real(val) for val in a.to_color_space("LAB")]
),
Expression(
SymbolList, *[Real(val) for val in b.to_color_space("LAB")]
),
),
)
if compute is None:
evaluation.message("ColorDistance", "invdist", distance_function)
return
def distance(a, b):
try:
py_a = _ColorObject.create(a)
py_b = _ColorObject.create(b)
except ColorError:
evaluation.message("ColorDistance", "invarg", a, b)
raise
result = from_python(compute(py_a, py_b))
return result
try:
if c1.get_head_name() == "System`List":
if c2.get_head_name() == "System`List":
if len(c1.elements) != len(c2.elements):
evaluation.message("ColorDistance", "invarg", c1, c2)
return
else:
return Expression(
SymbolList,
*[distance(a, b) for a, b in zip(c1.elements, c2.elements)],
)
else:
return Expression(
SymbolList, *[distance(c, c2) for c in c1.elements]
)
elif c2.get_head_name() == "System`List":
return Expression(SymbolList, *[distance(c1, c) for c in c2.elements])
else:
return distance(c1, c2)
except ColorError:
return
except NotImplementedError:
evaluation.message("ColorDistance", "invdist", distance_function)
return
class ColorError(BoxConstructError):
pass
class GrayLevel(_ColorObject):
"""
<dl>
<dt>'GrayLevel[$g$]'
<dd>represents a shade of gray specified by $g$, ranging from
0 (black) to 1 (white).
<dt>'GrayLevel[$g$, $a$]'
<dd>represents a shade of gray specified by $g$ with opacity $a$.
</dl>
"""
color_space = "Grayscale"
components_sizes = [1, 2]
default_components = [0, 1]
class Hue(_ColorObject):
"""
<dl>
<dt>'Hue[$h$, $s$, $l$, $a$]'
<dd>represents the color with hue $h$, saturation $s$, lightness $l$ and opacity $a$.
<dt>'Hue[$h$, $s$, $l$]'
<dd>is equivalent to 'Hue[$h$, $s$, $l$, 1]'.
<dt>'Hue[$h$, $s$]'
<dd>is equivalent to 'Hue[$h$, $s$, 1, 1]'.
<dt>'Hue[$h$]'
<dd>is equivalent to 'Hue[$h$, 1, 1, 1]'.
</dl>
>> Graphics[Table[{EdgeForm[Gray], Hue[h, s], Disk[{12h, 8s}]}, {h, 0, 1, 1/6}, {s, 0, 1, 1/4}]]
= -Graphics-
>> Graphics[Table[{EdgeForm[{GrayLevel[0, 0.5]}], Hue[(-11+q+10r)/72, 1, 1, 0.6], Disk[(8-r) {Cos[2Pi q/12], Sin[2Pi q/12]}, (8-r)/3]}, {r, 6}, {q, 12}]]
= -Graphics-
"""
color_space = "HSB"
components_sizes = [1, 2, 3, 4]
default_components = [0, 1, 1, 1]
def hsl_to_rgba(self) -> tuple:
h, s, l = self.components[:3]
if l < 0.5:
q = l * (1 + s)
else:
q = l + s - l * s
p = 2 * l - q
rgb = (h + 1 / 3, h, h - 1 / 3)
def map(value):
if value < 0:
value += 1
if value > 1:
value -= 1
return value
def trans(t):
if t < 1 / 6:
return p + ((q - p) * 6 * t)
elif t < 1 / 2:
return q
elif t < 2 / 3:
return p + ((q - p) * 6 * (2 / 3 - t))
else:
return p
result = tuple([trans(list(map(t))) for t in rgb]) + (self.components[3],)
return result
class LABColor(_ColorObject):
"""
<dl>
<dt>'LABColor[$l$, $a$, $b$]'
<dd>represents a color with the specified lightness, red/green and yellow/blue
components in the CIE 1976 L*a*b* (CIELAB) color space.
</dl>
"""
color_space = "LAB"
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class LCHColor(_ColorObject):
"""
<dl>
<dt>'LCHColor[$l$, $c$, $h$]'
<dd>represents a color with the specified lightness, chroma and hue
components in the CIELCh CIELab cube color space.
</dl>
"""
color_space = "LCH"
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class LUVColor(_ColorObject):
"""
<dl>
<dt>'LCHColor[$l$, $u$, $v$]'
<dd>represents a color with the specified components in the CIE 1976 L*u*v* (CIELUV) color space.
</dl>
"""
color_space = "LUV"
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
class RGBColor(_ColorObject):
"""
<dl>
<dt>'RGBColor[$r$, $g$, $b$]'
<dd>represents a color with the specified red, green and blue
components.
</dl>
>> Graphics[MapIndexed[{RGBColor @@ #1, Disk[2*#2 ~Join~ {0}]} &, IdentityMatrix[3]], ImageSize->Small]
= -Graphics-
>> RGBColor[0, 1, 0]
= RGBColor[0, 1, 0]
>> RGBColor[0, 1, 0] // ToBoxes
= StyleBox[GraphicsBox[...], ...]
"""
color_space = "RGB"
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
def to_rgba(self):
return self.components
class XYZColor(_ColorObject):
"""
<dl>
<dt>'XYZColor[$x$, $y$, $z$]'
<dd>represents a color with the specified components in the CIE 1931 XYZ color space.
</dl>
"""
color_space = "XYZ"
components_sizes = [3, 4]
default_components = [0, 0, 0, 1]
def expression_to_color(color):
try:
return _ColorObject.create(color)
except ColorError:
return None
def color_to_expression(components, colorspace):
if colorspace == "Grayscale":
converted_color_name = "GrayLevel"
elif colorspace == "HSB":
converted_color_name = "Hue"
else:
converted_color_name = colorspace + "Color"
return to_expression(converted_color_name, *components)
|
from openmm_systems.test_systems._legacy_all import *
|
# -*- coding: utf-8 -*-
from guizero import App, Box, Text, ButtonGroup, TextBox, Combo, PushButton, info
from math import pi, sqrt
def update_txt_value():
if btn_duct_type.value == "okrągły":
txt_size_value.value = "Średnica:"
else:
txt_size_value.value = "Długość boku:"
flow_units = ["m³/h", "m³/min", "m³/s", "L/h", "L/min", "L/s"]
speed_units = ["m/s", "cm/s"]
def update_duct_type():
if btn_input_type.value == "natężenie":
box_duct_type.enabled = True
txt_input_unit.clear()
for i in range(len(flow_units)):
txt_input_unit.insert(i, flow_units[i])
else:
box_duct_type.enabled = False
txt_input_unit.clear()
for i in range(len(speed_units)):
txt_input_unit.insert(i, speed_units[i])
def is_float(to_check):
values = to_check.split(".")
if len(values) not in [1, 2]:
return False
for value in values:
if value.isdigit() != True:
return False
return True
def check_values():
correct = True
correct *= is_float(tbox_input_value.value)
if tbox_size_value.enabled:
correct *= is_float(tbox_size_value.value)
correct *= is_float(tbox_probe_flow_value.value)
if correct:
calculate_probe()
else:
txt_probe_diameter_value.value = ""
info("Błąd", "Podano niepoprawną wartość.\nPamiętaj o używaniu kropki w ułamkach.")
def calculate_probe():
if btn_input_type.value == "prędkość liniowa":
duct_speed = float(tbox_input_value.value)
if txt_input_unit.value == "cm/s":
duct_speed = duct_speed / 100 # in m/s
else:
duct_flow = float(tbox_input_value.value)
if txt_input_unit.value == "m³/h":
duct_flow = duct_flow / 3600 # in m^3/s
elif txt_input_unit.value == "m³/min":
duct_flow = duct_flow / 60 # in m^3/s
elif txt_input_unit.value == "L/h":
duct_flow = duct_flow / 1000 / 3600 # in m^3/s
elif txt_input_unit.value == "L/min":
duct_flow = duct_flow / 1000 / 60 # in m^3/s
elif txt_input_unit.value == "L/s":
duct_flow = duct_flow / 1000 # in m^3/s
duct_size = float(tbox_size_value.value) / 1000 # in m
if btn_duct_type.value == "okrągły":
duct_cross_area = pi * (duct_size)**2 / 4
else:
duct_cross_area = (duct_size)**2
duct_speed = duct_flow / duct_cross_area
probe_flow = float(tbox_probe_flow_value.value)
if txt_probe_flow_unit.value == "L/h":
probe_flow = probe_flow / 1000 / 3600 # in m^3/s
elif txt_probe_flow_unit.value == "L/min":
probe_flow = probe_flow / 1000 / 60 # in m^3/s
elif txt_probe_flow_unit.value == "L/s":
probe_flow = probe_flow / 1000 # in m^3/s
isokinetic_area = probe_flow / duct_speed
probe_diameter = sqrt(4 * isokinetic_area / pi)
probe_diameter *= 1000 # in mm
probe_diameter = round(probe_diameter, 1)
txt_probe_diameter_value.value = str(probe_diameter) + " mm"
app = App(title="Sonda izokinetyczna", width=500, height=320)
txt_header = Text(app, text="Dobór sondy izokinetycznej")
box_top_row = Box(app, align="top")
box_input_type = Box(box_top_row, align="left", width=250, height=150)
txt_choose_input = Text(box_input_type, text="Znany parametr przepływu:")
btn_input_type = ButtonGroup(box_input_type, options=["natężenie", "prędkość liniowa"], selected="natężenie", command=update_duct_type)
text_input_value = Text(box_input_type, text="Wartość:")
box_input_value = Box(box_input_type)
tbox_input_value = TextBox(box_input_value, align="left")
txt_input_unit = Combo(box_input_value, options=flow_units, selected="m³/h", align="left")
box_duct_type = Box(box_top_row, align="right", width=250, height=150, enabled=True)
txt_duct_type = Text(box_duct_type, text="Przekrój kanału:")
btn_duct_type = ButtonGroup(box_duct_type, options=["okrągły", "kwadratowy"], selected="okrągły", command=update_txt_value)
txt_size_value = Text(box_duct_type, text="Średnica:")
box_size_value = Box(box_duct_type)
tbox_size_value = TextBox(box_size_value, align="left")
txt_size_unit = Text(box_size_value, text=" mm", align="left")
box_probe_flow = Box(app, width=500, height=80)
txt_probe_flow = Text(box_probe_flow, text="Przepływ przez sondę:")
box_probe_flow_value = Box(box_probe_flow)
tbox_probe_flow_value = TextBox(box_probe_flow_value, align="left")
txt_probe_flow_unit = Combo(box_probe_flow_value, options=["L/h", "L/min", "L/s"], enabled="L/h", align="left")
box_calculate = Box(app)
box_btn_calculate = Box(box_calculate, width=200, height=150, align="left")
btn_calculate = PushButton(box_btn_calculate, text="Oblicz", command=check_values)
box_probe_diameter = Box(box_calculate, width=200, height=150, align="right")
txt_probe_diameter = Text(box_probe_diameter, text="Średnica sondy:")
txt_probe_diameter_value = Text(box_probe_diameter, text="")
app.display()
|
# -*- encoding: utf-8 -*-
from django.contrib.auth.decorators import login_required
from djongo import models
from django.shortcuts import render, redirect, get_object_or_404, get_list_or_404
from django.utils.translation import activate
from cliente.forms import ClienteForm, ClienteTodosForm
from cliente.models import Cliente
#TODO: ELIMINAR: Está solo de referencia de los estilos, no incluir en la versión final
def mng(request):
context = {'foo': 'bar'}
return render(request, 'cliente/manage.html', context)
@login_required(login_url="/login/")
def cliente(request):
activate('es')
if request.method == "POST":
form = ClienteForm(request.POST)
if form.is_valid():
try:
form.save()
return redirect('/cliente/todos')
except:
pass
else:
form = ClienteForm()
if form.errors:
for field in form:
for error in field.errors:
#print(field.name % " | " % error)
print(error)
# for error in form.non_field_errors:
# print('NFE | ')
# print(error)
return render(request, 'cliente/agregar.html', {'form': form})
@login_required(login_url="/login/")
def todos(request):
activate('es')
print('en clientes')
clientes = Cliente.objects.all()
for cliente in clientes:
cliente.id = str(cliente._id)
return render(request, "cliente/todos.html", {'form': clientes})
@login_required(login_url="/login/")
def editar(request, id):
activate('es')
cliente = get_object_or_404(Cliente, _id=id)
form = ClienteForm(request.POST or None, instance=cliente)
return render(request, 'cliente/editar.html', { 'form' : form })
@login_required(login_url="/login/")
def actualizar(request, id):
activate('es')
cliente = get_object_or_404(Cliente, _id=id)
form = ClienteForm(request.POST or None, instance=cliente)
print(form.__dict__)
if form.is_valid():
form.save()
return redirect("/cliente/todos")
return render(request, 'cliente/editar.html', { 'form' : form })
@login_required(login_url="/login/")
def eliminar(request, id):
activate('es')
try:
cliente = Cliente.objects.get(_id=id)
cliente.delete()
except Exception as e:
print('%s (%s)' % (e, type(e)))
pass
#TODO: Enviar mensaje de eliminado
return redirect("/cliente/todos") |
#-*- coding: utf-8 -*-
#文章类
class Article(object):
def __init__(self, id, title, content, author, tags, catagory, date, url):
self.id = id
self.title = title
self.content = content
self.author = author
self.tags = tags
self.catagory = catagory
self.date = date
self.url = url
print('Article实体化')
def toList(self):
list = []
list.append(self.id)
list.append(self.title)
list.append(self.content)
list.append(self.author)
list.append(self.tags)
list.append(self.catagory)
list.append(self.date)
list.append(self.url)
return list
def __init__(self):
print('Article实体化')
def getId(self):
return self.id
def setId(self, id):
self.id = id
def getTitle(self):
return self.title
def setTitle(self, title):
self.title = title
def getContent(self):
return self.content
def setContent(self, content):
self.content = content
def getAuthor(self):
return self.author
def setAuthor(self, author):
self.author = author
def getTags(self):
return self.tags
def setTags(self, tags):
self.tags = tags
def getCatagory(self):
return self.catagory
def setCatagory(self, catagory):
self.catagory = catagory
def getDate(self):
return self.date
def setDate(self, date):
self.date = date
def getUrl(self):
return self.url
def setUrl(self, url):
self.url = url |
import autodisc as ad
import os
def test_twodmatrixcppnneatevolution():
dir_path = os.path.dirname(os.path.realpath(__file__))
################################################################
# normal evolution
evo_config = ad.cppn.TwoDMatrixCCPNNEATEvolution.default_config()
evo_config.neat_config_file = os.path.join(dir_path, 'test_neat.cfg')
evo_config.is_verbose = False
evo_config.keep_results = 'all_gen'
evo_config.matrix_size = (50, 50)
evo_config.is_pytorch = True
evo = ad.cppn.TwoDMatrixCCPNNEATEvolution(fitness_function=lambda mat, genome: 0, config=evo_config)
evo.do_next_generation() # generate first generation
assert len(evo.results) == 1
assert len(evo.results[0]) == evo.neat_config.pop_size
evo.do_next_generation() # generate first generation
assert len(evo.results) == 2
assert len(evo.results[0]) == evo.neat_config.pop_size
assert len(evo.results[1]) == evo.neat_config.pop_size
################################################################
# keep ony last generation
evo_config = ad.cppn.TwoDMatrixCCPNNEATEvolution.default_config()
evo_config.neat_config_file = os.path.join(dir_path, 'test_neat.cfg')
evo_config.is_verbose = False
evo_config.keep_results = 'last_gen'
evo_config.matrix_size = (50, 50)
evo_config.is_pytorch = True
evo = ad.cppn.TwoDMatrixCCPNNEATEvolution(fitness_function=lambda mat, genome: 0, config=evo_config)
evo.do_next_generation() # generate first generation
assert len(evo.results) == 1
assert len(evo.results[0]) == evo.neat_config.pop_size
evo.do_next_generation() # generate first generation
assert len(evo.results) == 1
assert len(evo.results[1]) == evo.neat_config.pop_size
################################################################
# population_size == 1
evo_config = ad.cppn.TwoDMatrixCCPNNEATEvolution.default_config()
evo_config.neat_config_file = os.path.join(dir_path, 'test_neat_single.cfg')
evo_config.is_verbose = False
evo_config.keep_results = 'all_gen'
evo_config.matrix_size = (50, 50)
evo_config.is_pytorch = True
evo = ad.cppn.TwoDMatrixCCPNNEATEvolution(fitness_function=lambda mat, genome: 0, config=evo_config)
evo.do_next_generation() # generate first generation
assert len(evo.results) == 1
assert len(evo.results[0]) == evo.neat_config.pop_size
evo.do_next_generation() # generate first generation
assert len(evo.results) == 2
assert len(evo.results[0]) == evo.neat_config.pop_size
assert len(evo.results[1]) == evo.neat_config.pop_size
|
def interlock(s1,s2,s3):
if len(s1)!=len(s2):
return False
elif len(s3)!=2*len(s1):
return False
else:
for i in range(len(s1)):
if s1[i]!=s3[2*i] or s2[i]!=s3[2*i+1]:
return False
return True
s1=raw_input('s1:')
s2=raw_input('s2:')
s3=raw_input('s3:')
print interlock(s1,s2,s3)
|
import argparse
import random
from PIL import ImageFont, Image, ImageDraw
parser = argparse.ArgumentParser(description='Generate a desktop background using random characters and colors.')
def size(s):
defaults = {
'4k': (3840, 2160),
'2k': (2048, 1080),
'1440p': (2560, 1440),
'1080p': (1920, 1080),
'720p': (1280, 720)
}
if defaults.get(s.lower()) is not None:
return defaults[s.lower()]
try:
width, height = map(int, s.split('x'))
return width, height
except:
raise argparse.ArgumentTypeError('Size must be "WIDTHxHEIGHT"')
parser.add_argument('-s', '--size', type=size, default=(1920, 1080), metavar='WIDTHxHEIGHT',
help='WIDTHxHEIGHT or 4k/2k/1440p/1080p/720p.')
parser.add_argument('-f', '--font', type=str, metavar='PATH', help='Path to font file.')
parser.add_argument('-fs', '--font-size', type=int, default=24, help='Font size in pixels.')
parser.add_argument('-vp', '--vertical-padding', type=int, default=8, dest='v_padd', metavar='HORIZONTAL',
help='Vertical padding in pixels.')
parser.add_argument('-hp', '--horizontal-padding', type=int, default=4, dest='h_padd', metavar='VERTICAL',
help='Horizontal padding in pixels.')
def color(s):
try:
s = s.lstrip('#')
r, g, b = tuple(int(s[i:i + 2], 16) for i in (0, 2, 4))
if len(s) == 8:
a = int(s[-2:], 16)
else:
a = 255
return r, g, b, a
except:
raise argparse.ArgumentTypeError('Color must be in hexadecimal format.')
parser.add_argument('-bg', '--background', type=color, default=(40, 43, 53, 255), dest='bg', metavar='BACKGROUND',
help='Background color in hexadecimal.')
parser.add_argument('-ch', '--characters', type=str, default="""`~!@#$%^&*()_+=-[]\{}|;':",./<>?""", dest='chars',
help='Characters to use in generation.')
default_colors = [
(95, 80, 74, 255),
(81, 98, 79, 255),
(105, 55, 63, 255),
(38, 94, 109, 255),
(133, 100, 78, 255),
(92, 51, 89, 255),
(57, 99, 69, 255)
]
parser.add_argument('-c', '--colors', type=color, nargs='+', default=default_colors,
help='Colors to use in generation.')
parser.add_argument('-o', '--output', type=str, default='./output.png', metavar='PATH')
args = parser.parse_args()
txt = Image.new('RGBA', args.size, args.bg)
try:
if args.font is None:
raise IOError
font = ImageFont.truetype(args.font, args.font_size)
except IOError:
font = ImageFont.load_default()
draw_ctx = ImageDraw.Draw(txt)
for x in range(-args.h_padd, txt.size[0], args.h_padd + args.font_size):
for y in range(-args.v_padd, txt.size[1], args.v_padd + args.font_size):
draw_ctx.text((x, y), random.choice(args.chars), random.choice(args.colors), font)
txt.save(args.output)
|
from ...error import GraphQLError
from .base import ValidationRule
class UniqueOperationNames(ValidationRule):
__slots__ = 'known_operation_names',
def __init__(self, context):
super(UniqueOperationNames, self).__init__(context)
self.known_operation_names = {}
def enter_OperationDefinition(self, node, key, parent, path, ancestors):
operation_name = node.name
if not operation_name:
return
if operation_name.value in self.known_operation_names:
return GraphQLError(
self.duplicate_operation_name_message(operation_name.value),
[self.known_operation_names[operation_name.value], operation_name]
)
self.known_operation_names[operation_name.value] = operation_name
@staticmethod
def duplicate_operation_name_message(operation_name):
return 'There can only be one operation named "{}".'.format(operation_name)
|
""" spectrum_overload Setup.py
My first attempt at a setup.py file. It is based off
A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Licensed under the MIT Licence
# To use a consistent encoding
import codecs
import os
# Always prefer setuptools over distutils
from setuptools import find_packages, setup
here = os.path.abspath(os.path.dirname(__file__))
# Get the long description from the README file
with codecs.open(os.path.join(here, "README.md")) as f:
long_description = f.read()
about = {}
with codecs.open(os.path.join(here, "spectrum_overload", "__about__.py")) as f:
exec(f.read(), about)
# https://www.reddit.com/r/Python/comments/3uzl2a/setuppy_requirementstxt_or_a_combination/
with codecs.open(os.path.join(here, "requirements.txt")) as f:
requirements = f.read().splitlines()
setup(
name="spectrum_overload",
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=about["__version__"],
description="Spectrum class that overloads operators.",
long_description=long_description,
long_description_content_type="text/markdown",
# The project's main homepage.
url="https://github.com/jason-neal/spectrum_overload",
download_url="https://github.com/jason-neal/spectrum_overload",
# Author details
author=about["__author__"],
author_email=about["__email__"],
license=about["__license__"],
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Astronomy",
"Topic :: Scientific/Engineering :: Physics",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Natural Language :: English",
],
keywords=["astronomy", "spectra", "spectroscopy", "CRIRES"],
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=["contrib", "docs", "tests"]),
# test_suite=[],
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# py_modules=["spectrum/Spectrum"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
# install_requires=requirements,
install_requires=["numpy", "scipy", "astropy", "pyastronomy", "matplotlib"],
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4",
setup_requires=["pytest-runner"],
tests_require=["pytest", "hypothesis", "pytest-cov"],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
"dev": ["check-manifest"],
"test": ["coverage", "pytest", "pytest-cov", "python-coveralls", "hypothesis"],
"docs": ["sphinx >= 1.4", "sphinx_rtd_theme", "pyastronomy"],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={"spectrum_overload": ["data/*.fits"]},
# 'sample': ['package_data.dat'],
# },
include_package_data=True,
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
# 'console_scripts': [
# 'sample=sample:main',
"console_scripts": ["spectrum_overload=spectrum_overload:main"]
},
)
|
# This work is based on original code developed and copyrighted by TNO 2020.
# Subsequent contributions are licensed to you by the developers of such code and are
# made available to the Project under one or several contributor license agreements.
#
# This work is licensed to you under the Apache License, Version 2.0.
# You may obtain a copy of the license at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Contributors:
# TNO - Initial implementation
# Manager:
# TNO
import logging.config
from flask import Flask, Blueprint
from application import settings
from application.api import api
from application.api.ns_statistics import ns as ns_statistics
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
logging.config.fileConfig('logging.conf')
log = logging.getLogger(__name__)
# TEMPORARY SOLUTION TO DISABLE BROWSER CACHING DURING TESTING
@app.after_request
def add_header(r):
"""
Add headers to both force latest IE rendering engine or Chrome Frame,
and also to cache the rendered page for 10 minutes.
"""
r.headers["Pragma"] = "no-cache"
r.headers["Expires"] = "0"
r.headers['Cache-Control'] = 'public, max-age=0'
return r
def configure_app(flask_app):
# flask_app.config['SERVER_NAME'] = settings.FLASK_SERVER_NAME
flask_app.config['SERVER_HOST'] = settings.FLASK_SERVER_HOST
flask_app.config['SERVER_PORT'] = settings.FLASK_SERVER_PORT
flask_app.config['SWAGGER_UI_DOC_EXPANSION'] = settings.RESTPLUS_SWAGGER_UI_DOC_EXPANSION
flask_app.config['RESTPLUS_VALIDATE'] = settings.RESTPLUS_VALIDATE
flask_app.config['RESTPLUS_MASK_SWAGGER'] = settings.RESTPLUS_MASK_SWAGGER
flask_app.config['ERROR_404_HELP'] = settings.RESTPLUS_ERROR_404_HELP
def initialize_app(flask_app):
configure_app(flask_app)
blueprint = Blueprint('api', __name__, url_prefix='/api')
api.init_app(blueprint)
api.add_namespace(ns_statistics)
flask_app.register_blueprint(blueprint)
def main():
initialize_app(app)
log.info('>>>>> Starting development server at http://%s:%d/api/ <<<<<', settings.FLASK_SERVER_HOST, settings.FLASK_SERVER_PORT)
app.run(host=settings.FLASK_SERVER_HOST, port=settings.FLASK_SERVER_PORT, debug=settings.FLASK_DEBUG)
if __name__ == "__main__":
main() |
#######################
#Pytest fixture test sample
#######################
import pytest, time, logging
from selenium import webdriver
#logging.basicConfig(format = u'%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level = logging.DEBUG)
logging.basicConfig(filename="sample.log", level=logging.INFO)
logging.debug("This is a debug message")
logging.info("Informational message")
logging.error("An error has happened!")
@pytest.yield_fixture()
def browser(request):
driver = webdriver.Chrome()
yield driver
driver.quit()
def test_python_org(browser):
browser.get('http://python.org')
assert 'Welcome to Python.org' in browser.title
browser.find_element_by_css_selector("#id-search-field").send_keys("pycon")
browser.find_element_by_name("submit").click()
browser.find_element_by_css_selector("#content > div > section > form > ul > li:nth-child(1) > h3 > a").click()
#time.sleep(10)
|
import scrapy
import math
import urllib.parse as urlparse
from urllib.parse import parse_qs
from mergedeep import merge
import urllib.request
import os
from opencage.geocoder import OpenCageGeocode
import subprocess
class RoutesSpider(scrapy.Spider):
# Remember to set it first by `export OPENCAGE_API_KEY=XXXXX`
opencage_api_key = os.environ['OPENCAGE_API_KEY']
geocoder = OpenCageGeocode(opencage_api_key)
name = "routes"
start_urls = [
*(f'http://www.walkonhill.com/route.php?area=1&seq={seq}' for seq in range(1, 14)),
*(f'http://www.walkonhill.com/route.php?area=2&seq={seq}' for seq in range(1, 7)),
*(f'http://www.walkonhill.com/route.php?area=3&seq={seq}' for seq in range(1, 23)),
*(f'http://www.walkonhill.com/route.php?area=4&seq={seq}' for seq in range(1, 11)),
*(f'http://www.walkonhill.com/route.php?area=5&seq={seq}' for seq in range(1, 11)),
*(f'http://www.walkonhill.com/route.php?area=6&seq={seq}' for seq in range(1, 13)),
*(f'http://www.walkonhill.com/route.php?area=7&seq={seq}' for seq in range(1, 16)),
*(f'http://www.walkonhill.com/route.php?area=8&seq={seq}' for seq in range(1, 9)),
]
trail_id = 13 # increment by 1 every route
def parse(self, response):
areaToRegion = {
'0': '香港島',
'1': '香港島',
'2': '九龍',
'3': '新界',
'4': '香港島',
'5': '香港島',
'6': '香港島',
'7': '離島',
'8': '離島'
}
url = response.url
self.log(f'#### Fetching {url}')
queryString = parse_qs(urlparse.urlparse(url).query)
area = queryString['area'][0]
kmlFileUrl = response.urljoin(
response.css('a.kml_btn::attr(href)').get())
self.download_kml_and_convert_to_geojson(kmlFileUrl)
paths = response.css('div#trackpointList ol li a::text').getall()
generalInfo = response.css(
'div.generalInfo td.right_td::text').getall()
trail = {
'id': self.trail_id,
'name': generalInfo[0],
'name_en': 'XXX',
'description': response.css('p.intro::text').get(),
'description_en': 'XXX',
'regions': [
{
'name': areaToRegion[area],
'name_en': 'XXX'
}
],
'districts': [
{
'name': response.css('p#indicator::text').get().split('\\')[2].strip(),
'name_en': 'XXX'
}
],
'height': 9999,
'distanceInKm': float(generalInfo[1].replace(' 公里', '')),
'difficulty': math.ceil(float(response.css('p.current_rating::text').getall()[0])),
'durationInHour': float(generalInfo[2].replace(' 小時', '')),
'sceneRating': math.ceil(float(response.css('p.current_rating::text').getall()[1])),
'recommendRating': 3,
'route': {
'starts': [
{
'location': 'Coming Soon...',
'location_en': 'TODO',
'description': response.css('div#tab-1 p::text')[0].get().strip(),
'description_en': 'XXX',
}
],
'paths': [
{
'location': location.strip(),
'location_en': 'XXX',
'description': 'Coming Soon...',
'description_en': 'Coming Soon...',
} for location in paths
],
'ends': [
{
'location': 'Coming Soon...',
'location_en': 'Coming Soon...',
'description': response.css('div#tab-1 p::text')[1].get().strip(),
'description_en': 'XXX',
}
]
},
"images": [
{
"url": f'assets/images/no-data.webp',
"credit": "Coming Soon...",
"sourceUrl": "Coming Soon..."
}
],
"map": {
"geoJson": f'assets/geojson/{self.trail_id}.geojson',
"zoom": 14.84,
},
"reference": [url],
"status": "DRAFT"
}
self.add_markers_for_paths(trail['route']['paths'])
trail['map']['center'] = trail['route']['paths'][0]['marker']
englishUrl = url.replace('route.php', 'route_en.php')
englishPageRequest = scrapy.Request(
englishUrl, callback=self.parse_english_page)
englishPageRequest.meta['trail'] = trail
englishPageRequest.meta['area'] = area
self.trail_id += 1
yield englishPageRequest
def download_kml_and_convert_to_geojson(self, kmlUrl):
kmlFilePath = f'./kml/{self.trail_id}.kml'
urllib.request.urlretrieve(kmlUrl, kmlFilePath)
subprocess.run(['k2g', kmlFilePath, './geojson'])
def add_markers_for_paths(self, paths):
for path in paths:
results = self.geocoder.geocode(path['location'])
path['marker'] = {
'latitude': results[0]['geometry']['lat'],
'longitude': results[0]['geometry']['lng'],
}
def parse_english_page(self, response):
area = response.meta['area']
areaToRegion = {
'0': 'Hong Kong Island',
'1': 'Hong Kong Island',
'2': 'Kowloon',
'3': 'New Territories',
'4': 'Hong Kong Island',
'5': 'Hong Kong Island',
'6': 'Hong Kong Island',
'7': 'Islands',
'8': 'Islands'
}
generalInfoEn = response.css(
'div.generalInfo td::text').getall()
englishTrail = {
'name_en': generalInfoEn[0],
'description_en': response.css('p.intro::text').get(),
}
region = {
'name_en': areaToRegion[area],
}
district = {
'name_en': response.css('p#indicator::text').get().split('\\')[2].strip(),
}
startPath = {
'location_en': 'Coming Soon...',
'description_en': response.css('div#tab-1 p::text')[0].get().strip(),
}
endPath = {
'location_en': 'Coming Soon...',
'description_en': response.css('div#tab-1 p::text')[1].get().strip(),
}
paths = response.css('div#trackpointList ol li a::text').getall()
pathList = [
{
'location_en': location.strip(),
'description_en': 'Coming Soon...',
} for location in paths
]
trail = response.meta['trail']
trail['regions'][0] = merge(trail['regions'][0], region)
trail['districts'][0] = merge(trail['districts'][0], district)
trail['route']['starts'][0] = merge(
trail['route']['starts'][0], startPath)
trail['route']['ends'][0] = merge(trail['route']['ends'][0], endPath)
for i, location in enumerate(trail['route']['paths']):
location = merge(location, pathList[i])
yield merge(trail, englishTrail)
|
from django.shortcuts import render_to_response, HttpResponse, render, redirect, get_list_or_404
from django.template import RequestContext
import json, httplib, urllib
from project.models import Projects
def index(request):
data = {
"projects" : get_list_or_404(Projects.objects.all().order_by("-submit_date"))
}
return render(request,"index.html",data);
|
"""
In a given grid, each cell can have one of three values:
the value 0 representing an empty cell;
the value 1 representing a fresh orange;
the value 2 representing a rotten orange.
Every minute, any fresh orange that is adjacent (4-directionally) to a rotten orange becomes rotten.
Return the minimum number of minutes that must elapse until no cell has a fresh orange. If this is impossible, return -1 instead.
EX. 1
Input: [[2,1,1],[1,1,0],[0,1,1]]
Output: 4
EX. 2
Input: [[2,1,1],[0,1,1],[1,0,1]]
Output: -1
Explanation: The orange in the bottom left corner (row 2, column 0) is never rotten, because rotting only happens 4-directionally.
EX. 3
Input: [[0,2]]
Output: 0
Explanation: Since there are already no fresh oranges at minute 0, the answer is just 0.
1st approach: breadth first search
-2D array, calculate distance
-if still 1 in the distance is not calculated, return -1
"""
class Solution(object):
def orangesRotting(self, grid):
"""
:type grid: List[List[int]]
:rtype: int
"""
if len(grid) == 0 or len(grid[0]) == 0:
return 0
dist = []
for _ in range(len(grid)):
dist.append(len(grid[0]) * [sys.maxsize])
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 2:
self.bfs(i, j, grid, dist)
res = 0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
if dist[i][j] == sys.maxsize:
return -1
else:
res = max(res, dist[i][j])
return res
def bfs(self, x, y, grid, dist):
seen = set()
q = [(x, y, 0)]
while len(q) > 0:
i, j, steps = q.pop(0)
if i < 0 or i+1 > len(grid) or j < 0 or j+1 > len(grid[0]):
continue
if(i == x and j == y) or grid[i][j] == 1:
key = (i, j)
if key in seen:
continue
seen.add(key)
dist[i][j] = min(dist[i][j], steps)
q.append((i-1, j, steps+1))
q.append((i+1, j, steps+1))
q.append((i, j-1, steps+1))
q.append((i, j+1, steps+1))
|
# Import a specific function from a module
from random import randrange
from math import fsum, remainder, sqrt
# multiple functions can be imported from module. Thi is achieved by separating functions by a comma ','
# Example:
from os import times, system
rand_num = randrange(1, 100)
print('Random number between 1 and 100 is: %d' % rand_num)
numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print(min(numbers))
print(max(numbers))
print(sum(numbers))
print(pow(2, 8))
print(remainder(30, 12))
print(sqrt(40)) |
# coding: utf-8
"""
This module contains various helpers for migrations testing.
"""
import os
from django import VERSION
MIGRATION_NAME = 'test'
def makemigrations():
from django.core.management import call_command
from django.core.management.commands.makemigrations import Command
from django.db.migrations import questioner
# We should answer yes for all migrations questioner questions
questioner.input = lambda x: 'y'
os.system('find . -name \*.pyc -delete')
if VERSION >= (1, 11):
call_command('makemigrations', 'money_app', name=MIGRATION_NAME)
else:
# In Django 1.8 first argument name clashes with command option.
Command().execute('money_app', name=MIGRATION_NAME, verbosity=1)
def get_migration(name):
return __import__('money_app.migrations.%s_%s' % (name, MIGRATION_NAME), fromlist=['Migration']).Migration
def get_operations(migration_name):
return get_migration(migration_name).operations
def migrate():
from django.core.management import call_command
call_command('migrate', 'money_app')
|
import random
import asyncio
import aiohttp
import json
from discord import Game
from discord.ext.commands import Bot
# Jolts token
TOKEN = 'NTY0NjQ4Nzk2NjQyODY5MjQ4.XKrCew.ZoFF0FF_m0wDUMNmB2DZGs_pPuw'
# Possible starts for a comman
BOT_PREFIX = ("!", "?")
# Create instant of discord bot that we call "client" with our preffered prefixes
client = Bot(command_prefix=BOT_PREFIX)
# Create command ball8
@client.command()
async def ball8(ctx, context):
# tuple of possible responses from Jolt
possible_responses = [
'That is a resounding no',
'It is not looking likely',
'Too hard to tell',
'It is quite possible',
'Definitely',
]
# Jolt sends a randomized choice from the posible responses and ats the user who sent the ball8 request
await ctx.send(random.choice(possible_responses) + ", " + context.message.author.mention)
# Create command for hello
@client.command()
async def hello(ctx, context):
# Jolt send a message of hello to the author, and mentions them as well, this will be follwed by a list of other commmands once things like info are ready for use
await ctx.send("Hello, " + ctx.mention.author)
# Create command for square
@client.command()
async def square(ctx, number):
# cast the users response to a INT type and multiples it to itself
squared_value = int(number) * int(number)
# Jolt then sends out a message with the orginal number he squared, then its squared value
await ctx.send(str(number) + " squared is " + str(squared_value))
@client.event
async def on_ready(ctx):
await ctx.change_presence(game=Game(name="with humans"))
print("Logged in as " + client.user.name)
@client.command()
async def bitcoin(ctx):
url = 'https://api.coindesk.com/v1/bpi/currentprice/BTC.json'
async with aiohttp.ClientSession() as session: # Async HTTP request
raw_response = await session.get(url)
response = await raw_response.text()
response = json.loads(response)
await ctx.send("Bitcoin price is: $" + response['bpi']['USD']['rate'])
async def list_servers(ctx):
await client.wait_until_ready()
while not client.is_closed:
print("Current servers:")
for server in ctx.servers:
print(server.name)
await asyncio.sleep(600)
client.loop.create_task(list_servers(client))
client.run(TOKEN) |
import traceback
import logging
try:
import multiprocessing
except ImportError:
multiprocessing = None
class MultiprocessHelper:
"""
An small framework for running heavy operations as multi-processes.
MultiprocessHelper uses a map-reduce architecture, where multiple read workers perform the mapping
and a single write worker performs reduce.
The final results processed results are returned as a list of results (may be empty).
"""
def __init__(self, num_processes=multiprocessing.cpu_count(), queue_size=1024):
"""
Initialize the helper and ready it for a multi-process batch job.
:param num_processes: Number of processes to use. Default: cpu count.
Note: The default value may depend on operating systems definitions and may be inaccurate.
:param queue_size: The buffer cache size for each worker thread. This is the max number of entries from
the workload that each worker may hold at any present time.
Normally you shouldalter this value only to prevent out of memory errors.
"""
self.num_processes = num_processes
self.queue_size = queue_size
read_workers_count = max(1, num_processes-1)
self.read_queues = [multiprocessing.Queue(queue_size) for _ in range(read_workers_count)]
self.write_queue = multiprocessing.Queue(queue_size)
self.results_queue = multiprocessing.Queue(queue_size)
logging.debug('MultiprocessHelper :: Initialized Multiprocess Helper with %r processes and queue_size=%r. '
'Main process id=%r' % (num_processes, queue_size, self.worker_id()))
@staticmethod
def worker_id():
"""
:return: Unique id of the current worker process.
"""
return multiprocessing.current_process().pid
def execute(self, workload, read_func, write_func=None, read_args=(), write_args=(),
aggregation_type=list):
"""
Executes the map-reduce operation on the workload, using multiple processes.
[map] [reduce]
============ -> read_func ============
| Workload | -> read_func -> write_func -> | Results |
============ -> read_func ============
:param workload: An iterable object containing the data entries.
:param read_func: A "map function", which accepts a single data entry and performs some
mapping / transformation operation on it.
The exact signature of read_func is func(entry_idx, data_entry, *args), where
entry_idx is a unique running index of the current entry from the workload,
data_entry is the content of the current entry from the workload, and args is a tuple
containing any number of additional optional arguments needed by this function (may be empty).
Note: the return value of this function may be limited by the type of objects possible to share
between processes. In general, those should be simple, canonical objects (i.e: str).
For example: objects containing c-pointers cannot be returned by this function.
:param write_func: A "reduce function", which accepts a single data entry and performs some
aggregation operation on it.
The exact signature of write_func is func(entry_idx, data_entry, aggregation, *args), where
entry_idx is a unique running index of the current entry from the workload,
data_entry is the content of the processed entry as outputted from the mapping function,
aggregation is a reusable cache (list) the function may utilize to keep information between
repeated invocations, and args is a tuple containing any number of additional optional arguments
needed by this function (may be empty).
Note: the return value of this function may be limited by the type of objects possible to share
between processes. In general, those should be simple, canonical objects (i.e: str).
For example: objects containing c-pointers cannot be returned by this function.
:param read_args: Tuple. Optional arguments needed by read_func.
:param write_args: Tuple. Optional arguments needed by write_func.
:param aggregation_type: Type of iterable container to contain aggregated results, as used
by the write worker.
:return: A list containing the results of write_func.
"""
# Initialize an inner object that helps balance the workload between multiple workers
workload_alloc = WorkloadAllocator(workload)
# Create the reader / writer workers
read_processes, write_process = self.prepare_processes(read_func, write_func,
read_args, write_args,
aggregation_type)
# Start the workers. They will block until data from the workload is fed.
self.start_processes(read_processes, write_process)
# Feed the initial batch of data to the reader workers.
workload_alloc.divide_workload(self.read_queues)
# Wait for the reader / writer workers to finish processing
results = self.async_process(read_processes, write_process)
logging.debug('MultiprocessHelper :: Execution finished')
return results
def prepare_processes(self, read_func, write_func, read_args, write_args, aggregation_type):
"""
Creates the reader / writer workers.
Note: The workers are not started until start_processes is called.
:param read_func: A map function to be used by the reader workers.
:param write_func: A reduce function to be used the writer worker.
:param read_args: Tuple. Optional arguments needed by read_func.
:param write_args: Tuple. Optional arguments needed by write_func.
:param aggregation_type: Type of iterable container to contain aggregated results, as used
by the write worker.
:return: The initialized read / write workers
"""
read_processes = []
for idx in range(len(self.read_queues)):
read_worker_params = (read_func, read_args,
self.read_queues[idx], self.write_queue)
read_processes.append(multiprocessing.Process(target=self.read_worker, args=read_worker_params))
write_process = multiprocessing.Process(target=self.write_worker,
args=(write_func, write_args,
self.write_queue, self.results_queue, aggregation_type))
return read_processes, write_process
@staticmethod
def start_processes(read_processes, write_process):
"""
Starts the reader and writer workers.
:param read_processes: List of multiple reader workers running the map function.
:param write_process: Single writer worker running the reduce function.
"""
for process in read_processes:
process.start()
write_process.start()
def async_process(self, read_processes, write_process):
"""
Blocks until all reader & writer workers finish processing.
Note: This function doesn't use a timeout, to support very long operations.
:param read_processes: List of multiple reader workers running the map function.
:param write_process: Single writer worker running the reduce function.
:return: A list of results (or single item), as returned by the write worker.
"""
for process in read_processes:
while process.is_alive():
process.join(timeout=1)
logging.debug('MultiprocessHelper :: Main process - waiting for reader #%r' % process.pid)
logging.debug('MultiprocessHelper :: Main process - Read Worker collected')
self.write_queue.put(None)
results = self.collect_results(self.results_queue) # Wait for writer to finish and put load on queue
while write_process.is_alive():
write_process.join(timeout=1)
logging.debug('MultiprocessHelper :: Main process - waiting for writer #%r' % process.pid)
logging.debug('MultiprocessHelper :: Main process - Write Worker collected')
return results
@staticmethod
def read_worker(read_func, read_func_args, read_queue, write_queue):
"""
Read worker main function.
Consumes workload data allocated from workload_alloc, and runs read_func(), the map function,
on each entry.
The transformed results are placed inside the writer queue.
:param read_func: A map function to be used by the reader workers.
:param read_func_args: Tuple. Optional arguments needed by read_func.
:param read_queue: A queue for caching current workloaded this worker have allocated from workload_alloc.
By the beginning of this function, read_queue is already filled with some data.
:param write_queue: A queue for storing the processed entries of this worker.
"""
logging.debug('MultiprocessHelper :: Process [Read Worker] #%r started..' % MultiprocessHelper.worker_id())
while True:
# Next entry
chunk = read_queue.get()
if chunk is None: # Finished current allocation.
logging.debug('MultiprocessHelper :: Process [Read Worker] #%r has no more workload..' %
MultiprocessHelper.worker_id())
break # Workload have been processed entirely..
entry_idx, data_entry = chunk
try:
# Run the map function
processed_entry = read_func(entry_idx, data_entry, *read_func_args)
write_queue.put((entry_idx, processed_entry))
logging.debug('MultiprocessHelper :: Process [Read Worker] #%r finished processing entry %r..' %
(MultiprocessHelper.worker_id(), entry_idx))
except Exception:
logging.error('MultiprocessHelper :: Process [Read Worker] #%r have '
'run into error during read_func()..' % MultiprocessHelper.worker_id())
traceback.print_exc()
write_queue.put((entry_idx, None))
logging.debug('MultiprocessHelper :: Process [Read Worker] #%r exiting..' % MultiprocessHelper.worker_id())
@staticmethod
def write_worker(write_func, write_func_args, write_queue, results_queue, aggregation_type):
"""
Write worker main function.
Consumes workload data aggregated from all reader workers, and runs write_func(), the reduce function,
on each entry.
The aggregated results are placed inside the results queue.
:param write_func: A reduce function to be used by the write worker.
:param write_func_args: Tuple. Optional arguments needed by write_func.
:param write_queue: A queue for caching current workloaded this worker have accumulated from the reader
workers.
:param results_queue: A queue for storing the processed entries of this worker to the main process.
:param aggregation_type: Type of iterable container to contain aggregated results, as used
by the write worker.
"""
logging.debug('MultiprocessHelper :: Process [Write Worker] #%r started..' % MultiprocessHelper.worker_id())
results = None
aggregation = aggregation_type()
while True:
chunk = write_queue.get()
if chunk is None:
logging.debug('MultiprocessHelper :: Process [Write Worker] #%r has no more workload..' %
MultiprocessHelper.worker_id())
break
entry_idx, processed_entry = chunk
if processed_entry is not None:
if write_func is not None:
results = write_func(entry_idx, processed_entry, aggregation, *write_func_args)
logging.debug('MultiprocessHelper :: Process [Write Worker] #%r finished processing %r..' %
(MultiprocessHelper.worker_id(), entry_idx))
else:
logging.error('MultiprocessHelper :: Process [Write Worker] #%r have encountered a'
' bad entry: index=%r' % (MultiprocessHelper.worker_id(), entry_idx))
logging.debug('MultiprocessHelper :: Process [Write Worker] #%r exiting..' % MultiprocessHelper.worker_id())
# The last call to write_func determines the results
if results is not None:
results_queue.put(results)
results_queue.put(None)
@staticmethod
def collect_results(results_queue):
"""
Consumes the results queue containing the write worker aggregation results, and returns it as a list.
:param results_queue:
:return: A list of results (or single item), as returned by the write worker.
"""
collected_results = []
while True:
next_result = results_queue.get()
if next_result is None:
break
collected_results.append(next_result)
if collected_results is None:
return None
return collected_results[0] if len(collected_results) == 1 else collected_results
class WorkloadAllocator:
"""
A helper object for allocating data from the workload to the worker processes.
"""
def __init__(self, workload):
"""
:param workload: Iterable containing the data.
:param queue_size: Cache size of the workers, used to calculate the initial capacity.
"""
self.next_workload_entry_idx = 0
self.workload_iterator = iter(workload)
def divide_workload(self, read_queues):
"""
Divides workload among the read workers.
:param read_queues: List of queues to cache data from the workload
:return: read_queues are filled with data from the workload, and finally with None to symbolize the
end of the queue (prevent blocking)
"""
logging.debug('MultiprocessHelper :: Dividing workload to %r read queues' %
len(read_queues))
while True:
try:
item = next(self.workload_iterator)
except StopIteration:
logging.debug('MultiprocessHelper :: Iteration finished..')
break
# Divide data in round robin fashion
next_queue = self.next_workload_entry_idx % len(read_queues)
data_entry = (self.next_workload_entry_idx, item)
read_queues[next_queue].put(data_entry)
self.next_workload_entry_idx += 1
logging.debug('MultiprocessHelper :: Workload exhausted, sealing queue ends..')
# Mark the end of queue to prevent reader workers from blocking
for queue in read_queues:
queue.put(None)
logging.debug('MultiprocessHelper :: Workload allocator quitting')
|
import os
import time
import jwt
import requests
from cryptography.hazmat.backends import default_backend
def print_github_token():
app_id = os.environ['DEPLOY_APP_ID']
cert_str = os.environ['DEPLOY_APP_PRIVATE_KEY']
cert_bytes = cert_str.encode()
private_key = default_backend().load_pem_private_key(cert_bytes, None)
time_since_epoch_in_seconds = int(time.time())
payload = {
# issued at time
'iat': time_since_epoch_in_seconds,
# JWT expiration time (10 minute maximum)
'exp': time_since_epoch_in_seconds + (10 * 60),
# GitHub App's identifier
'iss': app_id
}
actual_jwt = jwt.encode(payload, private_key, algorithm='RS256')
headers = {"Authorization": "Bearer {}".format(actual_jwt),
"Accept": "application/vnd.github.machine-man-preview+json"}
installation_id = requests.get('https://api.github.com/app/installations', headers=headers).json()[0]['id']
token = requests.post('https://api.github.com/app/installations/{}/access_tokens'.format(installation_id),
headers=headers).json()['token']
print('GITHUB_TOKEN={}'.format(token))
if __name__ == '__main__':
print_github_token()
|
#!/usr/bin/env python3
# Copyright 2018 Stefan Kroboth
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
# http://opensource.org/licenses/MIT>, at your option. This file may not be
# copied, modified, or distributed except according to those terms.
"""Command Line interface
Author: Stefan Kroboth <stefan.kroboth@gmail.com>
"""
# pylint: disable=invalid-name
# pylint: disable=relative-import
# pylint: disable=no-member
# pylint: disable=too-many-locals
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
from __future__ import print_function
import os
from artbox.parser import args, parser
from time import time
os.environ['CUDA_DEVICE'] = str(args.gpu)
import pycuda.autoinit
from artbox.reconfile import load_dataset
from artbox.operators import Operator
from artbox.cg import CG
from artbox.tgv import tgv
from artbox.tools import create_dir, gpu_info, save_matlab
def main():
"""Make pylint happy.
"""
# set environment variable to use chosen GPU device
# this needs to be done *before* any GPU stuff is done
# os.environ['CUDA_DEVICE'] = str(args.gpu)
# Print GPU information
if args.gpu_info:
gpu_info()
# if time_iters is chosen, it is not useful to show the progress bar
if args.time_iters:
args.no_progress = True
out = args.out
# Check if all provided files exist
for dfile in args.data:
if not os.path.isfile(dfile):
parser.error("No file " + dfile + ": exiting.")
###############################################################################
# ITERATE OVER ALL FILES #
###############################################################################
for dfile in args.data:
if args.verbose:
print("Processing file " + dfile + " ...")
# create directory based on filename
if dfile.endswith('.mat') or dfile.endswith('.npz'):
out_dir = out + "/" + os.path.basename(dfile)[:-4]
else:
raise IOError("Wrong file format.")
###############################################################################
# APPLY FORWARD MODEL #
###############################################################################
if args.forward:
if args.verbose:
print("Applying forward model to image")
cg_out_dir = out_dir + "/forward"
create_dir(cg_out_dir, args.y)
if args.verbose:
print("Loading Data...")
data = load_dataset(dfile, double=args.double)
if args.verbose:
print("Building Operator...")
if args.time_operator:
operator_time = time()
op = Operator(data,
double=args.double,
max_threads=args.max_threads,
norm_div=args.norm_div,
divide=args.divide,
hop=args.hop,
divide_adjoint=args.divide_adjoint,
divide_forward=args.divide_forward,
hop_adjoint=args.hop_adjoint,
hop_forward=args.hop_forward,
show_kernel_params=args.show_kernel_params,
verbose=args.verbose)
if args.time_operator:
print(" Operator build time: " + str(time() - operator_time) +
" seconds")
if args.time:
start_time = time()
import pycuda.gpuarray as gpuarray
import numpy as np
result = gpuarray.zeros([data.nT, data.nC], np.complex64,
order='F')
op.apply(gpuarray.to_gpu(data.object.astype(np.complex64)), result)
if args.time:
print("Runtime for file " + dfile + ": " +
str(time() - start_time) + " seconds")
save_matlab(result.get(), out_dir, "result")
###############################################################################
# CG RECONSTRUCTION #
###############################################################################
if args.cg:
if args.verbose:
print("CG Reconstruction")
cg_out_dir = out_dir + "/cg"
create_dir(cg_out_dir, args.y)
if args.verbose:
print("Loading Data...")
loading_time = time()
data = load_dataset(dfile, double=args.double)
print("Loading time: " + str(time() - loading_time) + " seconds")
if args.time_operator:
operator_time = time()
if args.verbose:
print("Building Operator...")
op = Operator(data,
double=args.double,
max_threads=args.max_threads,
norm_div=args.norm_div,
divide=args.divide,
hop=args.hop,
divide_adjoint=args.divide_adjoint,
divide_forward=args.divide_forward,
hop_adjoint=args.hop_adjoint,
hop_forward=args.hop_forward,
show_kernel_params=args.show_kernel_params,
verbose=args.verbose)
if args.time_operator:
print(" Operator build time: " + str(time() - operator_time) +
" seconds")
if args.time:
start_time = time()
cg = CG(op,
data,
cg_out_dir,
double=args.double,
relative_tolerance=args.relative_tolerance,
absolute_tolerance=args.absolute_tolerance,
iters=args.iters,
no_progress=args.no_progress,
time_iters=args.time_iters,
save_images=args.save_images,
save_mat=args.save_matlab,
image_format=args.image_format,
verbose=args.verbose)
(_, tot_iters) = cg.run()
if args.time:
print("Runtime for file " + dfile + " with " + str(tot_iters) +
" iterations: " + str(time() - start_time) + " seconds")
###############################################################################
# TGV RECONSTRUCTION #
###############################################################################
if args.tgv:
if args.verbose:
print("TGV Reconstruction")
tgv_out_dir = out_dir + "/tgv"
create_dir(tgv_out_dir, args.y)
if args.verbose:
print("Loading Data...")
data = load_dataset(dfile, double=args.double)
if args.time_operator:
operator_time = time()
if args.verbose:
print("Building Operator...")
op = Operator(data,
double=args.double,
max_threads=args.max_threads,
norm_div=args.norm_div,
divide=args.divide,
hop=args.hop,
divide_adjoint=args.divide_adjoint,
divide_forward=args.divide_forward,
hop_adjoint=args.hop_adjoint,
hop_forward=args.hop_forward,
show_kernel_params=args.show_kernel_params,
verbose=args.verbose)
if args.time_operator:
print(" Operator build time: " + str(time() - operator_time) +
" seconds")
if args.time:
start_time = time()
tot_iters = tgv(op,
tgv_out_dir,
alpha=args.alpha,
tau_p=args.tau_p,
tau_d=args.tau_d,
reduction=args.reduction,
fac=args.fac,
iters=args.iters,
relative_tolerance=args.relative_tolerance,
absolute_tolerance=args.absolute_tolerance,
cg=False,
inner_iters=args.inner_iters,
norm_est=args.norm_est,
norm_est_iters=args.norm_est_iters,
time_iters=args.time_iters,
no_progress=args.no_progress,
save_images=args.save_images,
save_mat=args.save_matlab,
image_format=args.image_format,
verbose=args.verbose)
if args.time:
print("Runtime for file " + dfile + " with " + str(tot_iters) +
" iterations: " + str(time() - start_time) + " seconds")
###############################################################################
# TGV CG RECONSTRUCTION #
###############################################################################
if args.tgvcg:
if args.verbose:
print("TGV CG Reconstruction")
tgvcg_out_dir = out_dir + "/tgvcg"
create_dir(tgvcg_out_dir, args.y)
if args.verbose:
print("Loading Data...")
data = load_dataset(dfile, double=args.double)
if args.time_operator:
operator_time = time()
if args.verbose:
print("Building Operator...")
op = Operator(data,
double=args.double,
max_threads=args.max_threads,
norm_div=args.norm_div,
divide=args.divide,
hop=args.hop,
divide_adjoint=args.divide_adjoint,
divide_forward=args.divide_forward,
hop_adjoint=args.hop_adjoint,
hop_forward=args.hop_forward,
show_kernel_params=args.show_kernel_params,
verbose=args.verbose)
if args.time_operator:
print(" Operator build time: " + str(time() - operator_time) +
" seconds")
if args.time:
start_time = time()
tot_iters = tgv(op,
tgvcg_out_dir,
alpha=args.alpha,
tau_p=args.tau_p,
tau_d=args.tau_d,
reduction=args.reduction,
fac=args.fac,
iters=args.iters,
relative_tolerance=args.relative_tolerance,
absolute_tolerance=args.absolute_tolerance,
cg=True,
inner_iters=args.inner_iters,
norm_est=args.norm_est,
norm_est_iters=args.norm_est_iters,
time_iters=args.time_iters,
no_progress=args.no_progress,
save_images=args.save_images,
save_mat=args.save_matlab,
image_format=args.image_format,
verbose=args.verbose)
if args.time:
print("Runtime for file " + dfile + " with " + str(tot_iters) +
" iterations: " + str(time() - start_time) + " seconds")
###############################################################################
# TEST ADJOINT OPERATOR (ENCODING MATRIX) #
###############################################################################
if args.test_adjoint_encoding_mat:
if args.verbose:
print("Test Adjoint Operator (Encoding Matrix)")
if args.verbose:
print("Loading Data...")
data = load_dataset(dfile, double=args.double)
if args.time_operator:
operator_time = time()
if args.verbose:
print("Building Operator...")
op = Operator(data,
double=args.double,
max_threads=args.max_threads,
norm_div=args.norm_div,
divide=args.divide,
hop=args.hop,
divide_adjoint=args.divide_adjoint,
divide_forward=args.divide_forward,
hop_adjoint=args.hop_adjoint,
hop_forward=args.hop_forward,
show_kernel_params=args.show_kernel_params,
verbose=args.verbose)
if args.time_operator:
print(" Operator build time: " + str(time() - operator_time) +
" seconds")
if args.time:
start_time = time()
tot_iters = op.test_adjoint(args.iters)
if args.time:
print("Runtime for file " + dfile + " with " + str(tot_iters) +
" iterations: " + str(time() - start_time) + " seconds")
if __name__ == "__main__":
main()
|
#find all movies details
from imdb_task_1 import movies
import requests,pprint
from bs4 import BeautifulSoup
url = "https://www.imdb.com/title/tt0066763/"
def scrape_movie_details(movie_url):
Total_detail_list = []
page = requests.get(movie_url)
soup = BeautifulSoup(page.text,"html.parser")
# print(soup)
# title_div = soup.find('div',class_="title_wrapper").get_text().split()
# movies_name = (title_div[0])
title_div = soup.find('div',class_="titleBar").h1.get_text()
a=len(title_div)-8
movies_name=(title_div[0:a])
director = soup.find('div',class_="credit_summary_item")
# print(director)
director_1 = director.find_all("a")
director_list = []
for i in director_1:
director_list.append(i.text)
# print(director_list)
sub_div = soup.find('div',class_="subtext")
run_time = sub_div.find("time").get_text().strip().split()
# print(run_time)
a = len(run_time)
if a == 1:
b = run_time[0].split("h")
# print(b)
run_time_3 = int(b[0])*60
else:
run_time_1 = run_time[0].split("h")
run_time_2 = run_time[1].split("min")
run_time_3 = int(run_time_1[0])*60+int(run_time_2[0])
# print(run_time_3)
poster_image_url = soup.find('div',class_="poster").a['href']
# print(poster_image_url)
poster_image_url_1 = "www.imdb.com" + poster_image_url
# print(poster_image_url_1)
bio = soup.find('div',class_="plot_summary")
bio_1 = bio.find('div',class_="summary_text").get_text().strip()
# print(bio_1)
country5 = soup.find('div',attrs={"class":"article","id":"titleDetails"})
country6 = country5.find_all('div',class_="txt-block")
for i in country6:
h4 = i.find("h4").text
# print(h4)
if h4 == "Country:":
country7 = i.find_all("a")
for j in country7:
country8=j.text
break
language = soup.find('div',attrs={"class":"article","id":"titleDetails"})
language_1 = language.find_all('div',class_="txt-block")
language_list = []
for i in language_1:
h4 = i.find("h4").get_text()
# print(h4)
if h4 == "Language:":
language_2 = i.find_all("a")
for j in language_2:
language_list.append(j.text)
# print(language_list)
break
genre = soup.find('div',attrs={"class":"article","id":"titleStoryLine"})
# print(genre)
genre_1 = genre.find_all('div',class_="see-more inline canwrap")
genre_list = []
for i in genre_1:
h4 = i.find("h4").text
# print(h4)
if h4 == "Genres:":
genre_2 = i.find_all("a")
for j in genre_2:
genre_list.append(j.text)
# print(genre_list)
Dict = {"movies_name":movies_name,"director":director_list,"Country":country8,"language":language_list,"image_url":poster_image_url_1,"bio":bio_1,"runtime":run_time_3,"genre":genre_list}
Total_detail_list.append(Dict)
return(Dict)
pprint.pprint(Total_detail_list)
scrape_movie_details(url)
pprint.pprint(scrape_movie_details(url))
|
import numpy as np
n = 8
# Create a nxn matrix filled with 0
matrix = np.zeros((n, n), dtype=int)
# fill 1 with alternate rows and column
matrix[::2, 1::2] = 1
matrix[1::2, ::2] = 1
# Print the checkerboard pattern
for i in range(n):
for j in range(n):
print(matrix[i][j], end=" ")
print()
|
from base_classes.base_scraper import BaseScraper
class IssuePage(BaseScraper):
url_root = 'https://atomicavenue.com/atomic'
def __init__(self, url_to_scrape):
super(IssuePage, self).__init__(url_to_scrape)
def get_current_for_sale(self):
sellers = []
condition = ""
for row in self.dom.find("table").find_all("tr"):
if row.get("class") and "conditionHeader" in row.get("class"):
condition = row.text.strip()
elif row.get("class") and "issueGridRow" in row.get("class"):
seller = row.find("a")
sellers.append({
"condition": condition,
"seller_name": seller.text.strip(),
"seller_link": seller.get("href"),
"price": row.find_all("td")[2].text,
"extra_info": row.find("td", {"class": "issueGridNotes"}).text
})
return sellers
def get_issue_info(self):
infos = self.dom.find("div", {"id": "issueDetails"}).find_all("p")
publish_info = self.dom.find("span", {"id": "ctl00_ContentPlaceHolder1_lblNotes"}).find_all("a")[:2]
info = {
"extra_info": "",
"publish_date": publish_info[1].text,
"publish_date_url": publish_info[1].get("href"),
"publisher": publish_info[0].text,
"publisher_url": publish_info[0].get("href")
}
for cur_info in infos:
if cur_info.find("label"):
key, val = cur_info.text.split(":")
info[key] = val.strip()
else:
info["extra_info"] += cur_info.text + "\n"
return info
def next_issue(self):
split_url = self.url_to_scrape.split("/")
cur_page = False
for page in self.dom.find("select", {"id": "ctl00_ContentPlaceHolder1_cboJumpToIssue"}).find_all("option"):
if cur_page:
next_page = (page.get("value"), page.text)
break
if page.get("value") == split_url[-2]:
cur_page = True
next_page = "/".join(split_url[:-2] + [next_page[0], split_url[-1]])
return IssuePage(next_page)
def previous_issue(self):
split_url = self.url_to_scrape.split("/")
for page in self.dom.find("select", {"id": "ctl00_ContentPlaceHolder1_cboJumpToIssue"}).find_all("option"):
if page.get("value") == split_url[-2]:
break
prev_page = (page.get("value"), page.text)
prev_page = "/".join(split_url[:-2] + [prev_page[0], split_url[-1]])
return IssuePage(prev_page)
|
from qutip.solver.options import SolverOdeOptions
from qutip.solver.sesolve import SeSolver
from qutip.solver.mesolve import MeSolver
from qutip.solver.solver_base import Solver
import qutip
import numpy as np
from numpy.testing import assert_allclose
import pytest
class TestIntegratorCte():
_analytical_se = lambda _, t: np.cos(t * np.pi)
se_system = qutip.QobjEvo(-1j * qutip.sigmax() * np.pi)
_analytical_me = lambda _, t: 1 - np.exp(-t)
me_system = qutip.liouvillian(qutip.QobjEvo(qutip.qeye(2)),
c_ops=[qutip.destroy(2)])
@pytest.fixture(params=list(SeSolver.avail_integrators().keys()))
def se_method(self, request):
return request.param
@pytest.fixture(params=list(MeSolver.avail_integrators().keys()))
def me_method(self, request):
return request.param
# TODO: Change when the McSolver is added
@pytest.fixture(params=list(Solver.avail_integrators().keys()))
def mc_method(self, request):
return request.param
def test_se_integration(self, se_method):
opt = SolverOdeOptions(method=se_method)
evol = SeSolver.avail_integrators()[se_method](self.se_system, opt)
state0 = qutip.core.unstack_columns(qutip.basis(6,0).data, (2, 3))
evol.set_state(0, state0)
for t, state in evol.run(np.linspace(0, 2, 21)):
assert_allclose(self._analytical_se(t),
state.to_array()[0, 0], atol=2e-5)
assert state.shape == (2, 3)
def test_me_integration(self, me_method):
opt = SolverOdeOptions(method=me_method)
evol = MeSolver.avail_integrators()[me_method](self.me_system, opt)
state0 = qutip.operator_to_vector(qutip.fock_dm(2,1)).data
evol.set_state(0, state0)
for t in np.linspace(0, 2, 21):
t_, state = evol.integrate(t)
assert t_ == t
assert_allclose(self._analytical_me(t),
state.to_array()[0, 0], atol=2e-5)
def test_mc_integration(self, mc_method):
opt = SolverOdeOptions(method=mc_method)
evol = Solver.avail_integrators()[mc_method](self.se_system, opt)
state = qutip.basis(2,0).data
evol.set_state(0, state)
t = 0
for i in range(1, 21):
t_target = i * 0.05
while t < t_target:
t_old, y_old = evol.get_state()
t, state = evol.mcstep(t_target)
assert t <= t_target
assert t > t_old
assert_allclose(self._analytical_se(t),
state.to_array()[0, 0], atol=2e-5)
t_back = (t + t_old) / 2
t_got, bstate = evol.mcstep(t_back)
assert t_back == t_got
assert_allclose(self._analytical_se(t),
state.to_array()[0, 0], atol=2e-5)
t, state = evol.mcstep(t)
class TestIntegrator(TestIntegratorCte):
_analytical_se = lambda _, t: np.cos(t**2/2 * np.pi)
se_system = qutip.QobjEvo([-1j * qutip.sigmax() * np.pi, "t"])
_analytical_me = lambda _, t: 1 - np.exp(-(t**3) / 3)
me_system = qutip.liouvillian(
qutip.QobjEvo(qutip.qeye(2)),
c_ops=[qutip.QobjEvo([qutip.destroy(2), 't'])]
)
@pytest.fixture(
params=[key for key, integrator in SeSolver.avail_integrators().items()
if integrator.support_time_dependant]
)
def se_method(self, request):
return request.param
@pytest.fixture(
params=[key for key, integrator in MeSolver.avail_integrators().items()
if integrator.support_time_dependant]
)
def me_method(self, request):
return request.param
@pytest.fixture(
params=[key for key, integrator in Solver.avail_integrators().items()
if integrator.support_time_dependant]
)
def mc_method(self, request):
return request.param
|
#!/usr/bin/env python
import xml.etree.ElementTree as ET
class tailf_webui(object):
"""Auto generated class.
"""
def __init__(self, **kwargs):
self._callback = kwargs.pop('callback')
def webui_schematics_panels_panel_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name = ET.SubElement(panel, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_title(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
title = ET.SubElement(properties, "title")
title.text = kwargs.pop('title')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
description = ET.SubElement(properties, "description")
description.text = kwargs.pop('description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id = ET.SubElement(component, "id")
id.text = kwargs.pop('id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_top(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
top = ET.SubElement(properties, "top")
top.text = kwargs.pop('top')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_left(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
left = ET.SubElement(properties, "left")
left.text = kwargs.pop('left')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_z_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
z_index = ET.SubElement(properties, "z-index")
z_index.text = kwargs.pop('z_index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_image_image_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
image = ET.SubElement(component_type, "image")
image = ET.SubElement(image, "image")
image = ET.SubElement(image, "image")
image.text = kwargs.pop('image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_text(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
text = ET.SubElement(link, "text")
text.text = kwargs.pop('text')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_link(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
link = ET.SubElement(link, "link")
link.text = kwargs.pop('link')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name = ET.SubElement(asset, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_base_64_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
base_64_image = ET.SubElement(image, "base-64-image")
base_64_image.text = kwargs.pop('base_64_image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
type = ET.SubElement(image, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username = ET.SubElement(user_profile, "username")
username.text = kwargs.pop('username')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key = ET.SubElement(profile, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key_key = ET.SubElement(profile, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(profile, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key = ET.SubElement(data_store, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key_key = ET.SubElement(data_store, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(data_store, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name = ET.SubElement(panel, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_title(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
title = ET.SubElement(properties, "title")
title.text = kwargs.pop('title')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_description(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
description = ET.SubElement(properties, "description")
description.text = kwargs.pop('description')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
properties = ET.SubElement(panel, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_id(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id = ET.SubElement(component, "id")
id.text = kwargs.pop('id')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_top(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
top = ET.SubElement(properties, "top")
top.text = kwargs.pop('top')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_left(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
left = ET.SubElement(properties, "left")
left.text = kwargs.pop('left')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_width(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
width = ET.SubElement(properties, "width")
width.text = kwargs.pop('width')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_height(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
height = ET.SubElement(properties, "height")
height.text = kwargs.pop('height')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_z_index(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
z_index = ET.SubElement(properties, "z-index")
z_index.text = kwargs.pop('z_index')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_image_image_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
image = ET.SubElement(component_type, "image")
image = ET.SubElement(image, "image")
image = ET.SubElement(image, "image")
image.text = kwargs.pop('image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_text(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
text = ET.SubElement(link, "text")
text.text = kwargs.pop('text')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_panels_panel_components_component_properties_component_type_link_link_link(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
panels = ET.SubElement(schematics, "panels")
panel = ET.SubElement(panels, "panel")
name_key = ET.SubElement(panel, "name")
name_key.text = kwargs.pop('name')
components = ET.SubElement(panel, "components")
component = ET.SubElement(components, "component")
id_key = ET.SubElement(component, "id")
id_key.text = kwargs.pop('id')
properties = ET.SubElement(component, "properties")
component_type = ET.SubElement(properties, "component-type")
link = ET.SubElement(component_type, "link")
link = ET.SubElement(link, "link")
link = ET.SubElement(link, "link")
link.text = kwargs.pop('link')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name = ET.SubElement(asset, "name")
name.text = kwargs.pop('name')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_base_64_image(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
base_64_image = ET.SubElement(image, "base-64-image")
base_64_image.text = kwargs.pop('base_64_image')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_schematics_assets_asset_asset_type_image_type(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
schematics = ET.SubElement(webui, "schematics")
assets = ET.SubElement(schematics, "assets")
asset = ET.SubElement(assets, "asset")
name_key = ET.SubElement(asset, "name")
name_key.text = kwargs.pop('name')
asset_type = ET.SubElement(asset, "asset-type")
image = ET.SubElement(asset_type, "image")
type = ET.SubElement(image, "type")
type.text = kwargs.pop('type')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username = ET.SubElement(user_profile, "username")
username.text = kwargs.pop('username')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key = ET.SubElement(profile, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_profile_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
profile = ET.SubElement(user_profile, "profile")
key_key = ET.SubElement(profile, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(profile, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_user_profile_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
user_profile = ET.SubElement(data_stores, "user-profile")
username_key = ET.SubElement(user_profile, "username")
username_key.text = kwargs.pop('username')
saved_query = ET.SubElement(user_profile, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key = ET.SubElement(data_store, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_data_store_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
data_store = ET.SubElement(data_stores, "data-store")
key_key = ET.SubElement(data_store, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(data_store, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_key(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key = ET.SubElement(saved_query, "key")
key.text = kwargs.pop('key')
callback = kwargs.pop('callback', self._callback)
return callback(config)
def webui_data_stores_saved_query_value(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui")
data_stores = ET.SubElement(webui, "data-stores")
saved_query = ET.SubElement(data_stores, "saved-query")
key_key = ET.SubElement(saved_query, "key")
key_key.text = kwargs.pop('key')
value = ET.SubElement(saved_query, "value")
value.text = kwargs.pop('value')
callback = kwargs.pop('callback', self._callback)
return callback(config)
|
import asyncio
import hashlib
from os import linesep
from typing import Sequence
from aiofile import async_open
from aiofiles import os as aio
from ..common import run
DEFAULT_BUFFER_SIZE = 100 * 1024 * 1024
async def _getsize_by_reading(filepath: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
"""
Similar as `os.path.getsize`, get the filesize in bytes.
"""
filesize = 0
async with async_open(filepath, "rb") as reader:
async for chunk in reader.iter_chunked(chunk_size):
filesize += len(chunk)
return filesize
async def getsize(filepath: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
try:
return await aio.path.getsize(filepath)
except Exception:
return await _getsize_by_reading(filepath, chunk_size)
async def _md5_by_reading(filepath: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> str:
"""
Compute md5 of a filepath.
"""
file_hash = hashlib.md5()
async with async_open(filepath, "rb") as reader:
async for chunk in reader.iter_chunked(chunk_size):
file_hash.update(chunk)
return file_hash.hexdigest()
async def md5(filepath: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> str:
try:
result = await run(["md5sum", filepath])
if result.returncode == 0:
return result.stdout.split()[0].decode("utf-8")
except Exception:
pass
return await _md5_by_reading(filepath, chunk_size)
async def _copy(
source: str, dest: str, chunk_size: int = DEFAULT_BUFFER_SIZE, write_mode: str = "wb"
) -> bool:
source_bytes_count = 0
written_bytes_count = 0
async with async_open(source, "rb") as reader, async_open(dest, write_mode) as writer:
async for chunk in reader.iter_chunked(chunk_size):
source_bytes_count += len(chunk)
written_bytes_count += await writer.write(chunk)
return written_bytes_count == source_bytes_count
async def copy(source: str, dest: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> bool:
try:
result = await run(["cp", source, dest])
if result.returncode == 0:
return True
except Exception:
pass
return await _copy(source, dest, chunk_size, write_mode="wb")
async def append(source: str, dest: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> bool:
return await _copy(source, dest, chunk_size, write_mode="ab")
async def _linecount_by_reading(filepath: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
count = 0
async with async_open(filepath, "rb") as reader:
async for chunk in reader.iter_chunked(chunk_size):
count += chunk.count(bytes(linesep, "utf-8"))
return count
async def linecount(filepath: str, chunk_size: int = DEFAULT_BUFFER_SIZE) -> int:
try:
result = await run(["wc", "-l", filepath])
if result.returncode == 0:
return int(result.stdout.split()[0])
except Exception:
pass
return await _linecount_by_reading(filepath)
async def samecontent(source: str, target: str) -> bool:
result: Sequence[str] = await asyncio.gather(md5(source), md5(target))
return result[0] == result[1]
|
"""
Collection of utilities to manipulate structured arrays.
Most of these functions were initially implemented by John Hunter for
matplotlib. They have been rewritten and extended for convenience.
"""
from __future__ import division, absolute_import, print_function
import sys
import itertools
import numpy as np
import numpy.ma as ma
from numpy import ndarray, recarray
from numpy.ma import MaskedArray
from numpy.ma.mrecords import MaskedRecords
from numpy.lib._iotools import _is_string_like
from numpy.compat import basestring
if sys.version_info[0] < 3:
from future_builtins import zip
_check_fill_value = np.ma.core._check_fill_value
__all__ = [
'append_fields', 'drop_fields', 'find_duplicates',
'get_fieldstructure', 'join_by', 'merge_arrays',
'rec_append_fields', 'rec_drop_fields', 'rec_join',
'recursive_fill_fields', 'rename_fields', 'stack_arrays',
]
def recursive_fill_fields(input, output):
"""
Fills fields from output with fields from input,
with support for nested structures.
Parameters
----------
input : ndarray
Input array.
output : ndarray
Output array.
Notes
-----
* `output` should be at least the same size as `input`
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> a = np.array([(1, 10.), (2, 20.)], dtype=[('A', int), ('B', float)])
>>> b = np.zeros((3,), dtype=a.dtype)
>>> rfn.recursive_fill_fields(a, b)
array([(1, 10.0), (2, 20.0), (0, 0.0)],
dtype=[('A', '<i4'), ('B', '<f8')])
"""
newdtype = output.dtype
for field in newdtype.names:
try:
current = input[field]
except ValueError:
continue
if current.dtype.names:
recursive_fill_fields(current, output[field])
else:
output[field][:len(current)] = current
return output
def get_names(adtype):
"""
Returns the field names of the input datatype as a tuple.
Parameters
----------
adtype : dtype
Input datatype
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> rfn.get_names(np.empty((1,), dtype=int)) is None
True
>>> rfn.get_names(np.empty((1,), dtype=[('A',int), ('B', float)]))
('A', 'B')
>>> adtype = np.dtype([('a', int), ('b', [('ba', int), ('bb', int)])])
>>> rfn.get_names(adtype)
('a', ('b', ('ba', 'bb')))
"""
listnames = []
names = adtype.names
for name in names:
current = adtype[name]
if current.names:
listnames.append((name, tuple(get_names(current))))
else:
listnames.append(name)
return tuple(listnames) or None
def get_names_flat(adtype):
"""
Returns the field names of the input datatype as a tuple. Nested structure
are flattend beforehand.
Parameters
----------
adtype : dtype
Input datatype
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> rfn.get_names_flat(np.empty((1,), dtype=int)) is None
True
>>> rfn.get_names_flat(np.empty((1,), dtype=[('A',int), ('B', float)]))
('A', 'B')
>>> adtype = np.dtype([('a', int), ('b', [('ba', int), ('bb', int)])])
>>> rfn.get_names_flat(adtype)
('a', 'b', 'ba', 'bb')
"""
listnames = []
names = adtype.names
for name in names:
listnames.append(name)
current = adtype[name]
if current.names:
listnames.extend(get_names_flat(current))
return tuple(listnames) or None
def flatten_descr(ndtype):
"""
Flatten a structured data-type description.
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> ndtype = np.dtype([('a', '<i4'), ('b', [('ba', '<f8'), ('bb', '<i4')])])
>>> rfn.flatten_descr(ndtype)
(('a', dtype('int32')), ('ba', dtype('float64')), ('bb', dtype('int32')))
"""
names = ndtype.names
if names is None:
return ndtype.descr
else:
descr = []
for field in names:
(typ, _) = ndtype.fields[field]
if typ.names:
descr.extend(flatten_descr(typ))
else:
descr.append((field, typ))
return tuple(descr)
def zip_descr(seqarrays, flatten=False):
"""
Combine the dtype description of a series of arrays.
Parameters
----------
seqarrays : sequence of arrays
Sequence of arrays
flatten : {boolean}, optional
Whether to collapse nested descriptions.
"""
newdtype = []
if flatten:
for a in seqarrays:
newdtype.extend(flatten_descr(a.dtype))
else:
for a in seqarrays:
current = a.dtype
names = current.names or ()
if len(names) > 1:
newdtype.append(('', current.descr))
else:
newdtype.extend(current.descr)
return np.dtype(newdtype).descr
def get_fieldstructure(adtype, lastname=None, parents=None,):
"""
Returns a dictionary with fields indexing lists of their parent fields.
This function is used to simplify access to fields nested in other fields.
Parameters
----------
adtype : np.dtype
Input datatype
lastname : optional
Last processed field name (used internally during recursion).
parents : dictionary
Dictionary of parent fields (used interbally during recursion).
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> ndtype = np.dtype([('A', int),
... ('B', [('BA', int),
... ('BB', [('BBA', int), ('BBB', int)])])])
>>> rfn.get_fieldstructure(ndtype)
... # XXX: possible regression, order of BBA and BBB is swapped
{'A': [], 'B': [], 'BA': ['B'], 'BB': ['B'], 'BBA': ['B', 'BB'], 'BBB': ['B', 'BB']}
"""
if parents is None:
parents = {}
names = adtype.names
for name in names:
current = adtype[name]
if current.names:
if lastname:
parents[name] = [lastname, ]
else:
parents[name] = []
parents.update(get_fieldstructure(current, name, parents))
else:
lastparent = [_ for _ in (parents.get(lastname, []) or [])]
if lastparent:
lastparent.append(lastname)
elif lastname:
lastparent = [lastname, ]
parents[name] = lastparent or []
return parents or None
def _izip_fields_flat(iterable):
"""
Returns an iterator of concatenated fields from a sequence of arrays,
collapsing any nested structure.
"""
for element in iterable:
if isinstance(element, np.void):
for f in _izip_fields_flat(tuple(element)):
yield f
else:
yield element
def _izip_fields(iterable):
"""
Returns an iterator of concatenated fields from a sequence of arrays.
"""
for element in iterable:
if (hasattr(element, '__iter__') and
not isinstance(element, basestring)):
for f in _izip_fields(element):
yield f
elif isinstance(element, np.void) and len(tuple(element)) == 1:
for f in _izip_fields(element):
yield f
else:
yield element
def izip_records(seqarrays, fill_value=None, flatten=True):
"""
Returns an iterator of concatenated items from a sequence of arrays.
Parameters
----------
seqarrays : sequence of arrays
Sequence of arrays.
fill_value : {None, integer}
Value used to pad shorter iterables.
flatten : {True, False},
Whether to
"""
# Should we flatten the items, or just use a nested approach
if flatten:
zipfunc = _izip_fields_flat
else:
zipfunc = _izip_fields
if sys.version_info[0] >= 3:
zip_longest = itertools.zip_longest
else:
zip_longest = itertools.izip_longest
for tup in zip_longest(*seqarrays, fillvalue=fill_value):
yield tuple(zipfunc(tup))
def _fix_output(output, usemask=True, asrecarray=False):
"""
Private function: return a recarray, a ndarray, a MaskedArray
or a MaskedRecords depending on the input parameters
"""
if not isinstance(output, MaskedArray):
usemask = False
if usemask:
if asrecarray:
output = output.view(MaskedRecords)
else:
output = ma.filled(output)
if asrecarray:
output = output.view(recarray)
return output
def _fix_defaults(output, defaults=None):
"""
Update the fill_value and masked data of `output`
from the default given in a dictionary defaults.
"""
names = output.dtype.names
(data, mask, fill_value) = (output.data, output.mask, output.fill_value)
for (k, v) in (defaults or {}).items():
if k in names:
fill_value[k] = v
data[k][mask[k]] = v
return output
def merge_arrays(seqarrays, fill_value=-1, flatten=False,
usemask=False, asrecarray=False):
"""
Merge arrays field by field.
Parameters
----------
seqarrays : sequence of ndarrays
Sequence of arrays
fill_value : {float}, optional
Filling value used to pad missing data on the shorter arrays.
flatten : {False, True}, optional
Whether to collapse nested fields.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : {False, True}, optional
Whether to return a recarray (MaskedRecords) or not.
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> rfn.merge_arrays((np.array([1, 2]), np.array([10., 20., 30.])))
masked_array(data = [(1, 10.0) (2, 20.0) (--, 30.0)],
mask = [(False, False) (False, False) (True, False)],
fill_value = (999999, 1e+20),
dtype = [('f0', '<i4'), ('f1', '<f8')])
>>> rfn.merge_arrays((np.array([1, 2]), np.array([10., 20., 30.])),
... usemask=False)
array([(1, 10.0), (2, 20.0), (-1, 30.0)],
dtype=[('f0', '<i4'), ('f1', '<f8')])
>>> rfn.merge_arrays((np.array([1, 2]).view([('a', int)]),
... np.array([10., 20., 30.])),
... usemask=False, asrecarray=True)
rec.array([(1, 10.0), (2, 20.0), (-1, 30.0)],
dtype=[('a', '<i4'), ('f1', '<f8')])
Notes
-----
* Without a mask, the missing value will be filled with something,
* depending on what its corresponding type:
-1 for integers
-1.0 for floating point numbers
'-' for characters
'-1' for strings
True for boolean values
* XXX: I just obtained these values empirically
"""
# Only one item in the input sequence ?
if (len(seqarrays) == 1):
seqarrays = np.asanyarray(seqarrays[0])
# Do we have a single ndarray as input ?
if isinstance(seqarrays, (ndarray, np.void)):
seqdtype = seqarrays.dtype
if (not flatten) or \
(zip_descr((seqarrays,), flatten=True) == seqdtype.descr):
# Minimal processing needed: just make sure everythng's a-ok
seqarrays = seqarrays.ravel()
# Make sure we have named fields
if not seqdtype.names:
seqdtype = [('', seqdtype)]
# Find what type of array we must return
if usemask:
if asrecarray:
seqtype = MaskedRecords
else:
seqtype = MaskedArray
elif asrecarray:
seqtype = recarray
else:
seqtype = ndarray
return seqarrays.view(dtype=seqdtype, type=seqtype)
else:
seqarrays = (seqarrays,)
else:
# Make sure we have arrays in the input sequence
seqarrays = [np.asanyarray(_m) for _m in seqarrays]
# Find the sizes of the inputs and their maximum
sizes = tuple(a.size for a in seqarrays)
maxlength = max(sizes)
# Get the dtype of the output (flattening if needed)
newdtype = zip_descr(seqarrays, flatten=flatten)
# Initialize the sequences for data and mask
seqdata = []
seqmask = []
# If we expect some kind of MaskedArray, make a special loop.
if usemask:
for (a, n) in zip(seqarrays, sizes):
nbmissing = (maxlength - n)
# Get the data and mask
data = a.ravel().__array__()
mask = ma.getmaskarray(a).ravel()
# Get the filling value (if needed)
if nbmissing:
fval = _check_fill_value(fill_value, a.dtype)
if isinstance(fval, (ndarray, np.void)):
if len(fval.dtype) == 1:
fval = fval.item()[0]
fmsk = True
else:
fval = np.array(fval, dtype=a.dtype, ndmin=1)
fmsk = np.ones((1,), dtype=mask.dtype)
else:
fval = None
fmsk = True
# Store an iterator padding the input to the expected length
seqdata.append(itertools.chain(data, [fval] * nbmissing))
seqmask.append(itertools.chain(mask, [fmsk] * nbmissing))
# Create an iterator for the data
data = tuple(izip_records(seqdata, flatten=flatten))
output = ma.array(np.fromiter(data, dtype=newdtype, count=maxlength),
mask=list(izip_records(seqmask, flatten=flatten)))
if asrecarray:
output = output.view(MaskedRecords)
else:
# Same as before, without the mask we don't need...
for (a, n) in zip(seqarrays, sizes):
nbmissing = (maxlength - n)
data = a.ravel().__array__()
if nbmissing:
fval = _check_fill_value(fill_value, a.dtype)
if isinstance(fval, (ndarray, np.void)):
if len(fval.dtype) == 1:
fval = fval.item()[0]
else:
fval = np.array(fval, dtype=a.dtype, ndmin=1)
else:
fval = None
seqdata.append(itertools.chain(data, [fval] * nbmissing))
output = np.fromiter(tuple(izip_records(seqdata, flatten=flatten)),
dtype=newdtype, count=maxlength)
if asrecarray:
output = output.view(recarray)
# And we're done...
return output
def drop_fields(base, drop_names, usemask=True, asrecarray=False):
"""
Return a new array with fields in `drop_names` dropped.
Nested fields are supported.
Parameters
----------
base : array
Input array
drop_names : string or sequence
String or sequence of strings corresponding to the names of the
fields to drop.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : string or sequence, optional
Whether to return a recarray or a mrecarray (`asrecarray=True`) or
a plain ndarray or masked array with flexible dtype. The default
is False.
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> a = np.array([(1, (2, 3.0)), (4, (5, 6.0))],
... dtype=[('a', int), ('b', [('ba', float), ('bb', int)])])
>>> rfn.drop_fields(a, 'a')
array([((2.0, 3),), ((5.0, 6),)],
dtype=[('b', [('ba', '<f8'), ('bb', '<i4')])])
>>> rfn.drop_fields(a, 'ba')
array([(1, (3,)), (4, (6,))],
dtype=[('a', '<i4'), ('b', [('bb', '<i4')])])
>>> rfn.drop_fields(a, ['ba', 'bb'])
array([(1,), (4,)],
dtype=[('a', '<i4')])
"""
if _is_string_like(drop_names):
drop_names = [drop_names]
else:
drop_names = set(drop_names)
def _drop_descr(ndtype, drop_names):
names = ndtype.names
newdtype = []
for name in names:
current = ndtype[name]
if name in drop_names:
continue
if current.names:
descr = _drop_descr(current, drop_names)
if descr:
newdtype.append((name, descr))
else:
newdtype.append((name, current))
return newdtype
newdtype = _drop_descr(base.dtype, drop_names)
if not newdtype:
return None
output = np.empty(base.shape, dtype=newdtype)
output = recursive_fill_fields(base, output)
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
def _keep_fields(base, keep_names, usemask=True, asrecarray=False):
"""
Return a new array keeping only the fields in `keep_names`,
and preserving the order of those fields.
Parameters
----------
base : array
Input array
keep_names : string or sequence
String or sequence of strings corresponding to the names of the
fields to keep. Order of the names will be preserved.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : string or sequence, optional
Whether to return a recarray or a mrecarray (`asrecarray=True`) or
a plain ndarray or masked array with flexible dtype. The default
is False.
"""
newdtype = [(n, base.dtype[n]) for n in keep_names]
output = np.empty(base.shape, dtype=newdtype)
output = recursive_fill_fields(base, output)
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
def rec_drop_fields(base, drop_names):
"""
Returns a new numpy.recarray with fields in `drop_names` dropped.
"""
return drop_fields(base, drop_names, usemask=False, asrecarray=True)
def rename_fields(base, namemapper):
"""
Rename the fields from a flexible-datatype ndarray or recarray.
Nested fields are supported.
Parameters
----------
base : ndarray
Input array whose fields must be modified.
namemapper : dictionary
Dictionary mapping old field names to their new version.
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> a = np.array([(1, (2, [3.0, 30.])), (4, (5, [6.0, 60.]))],
... dtype=[('a', int),('b', [('ba', float), ('bb', (float, 2))])])
>>> rfn.rename_fields(a, {'a':'A', 'bb':'BB'})
array([(1, (2.0, [3.0, 30.0])), (4, (5.0, [6.0, 60.0]))],
dtype=[('A', '<i4'), ('b', [('ba', '<f8'), ('BB', '<f8', 2)])])
"""
def _recursive_rename_fields(ndtype, namemapper):
newdtype = []
for name in ndtype.names:
newname = namemapper.get(name, name)
current = ndtype[name]
if current.names:
newdtype.append(
(newname, _recursive_rename_fields(current, namemapper))
)
else:
newdtype.append((newname, current))
return newdtype
newdtype = _recursive_rename_fields(base.dtype, namemapper)
return base.view(newdtype)
def append_fields(base, names, data, dtypes=None,
fill_value=-1, usemask=True, asrecarray=False):
"""
Add new fields to an existing array.
The names of the fields are given with the `names` arguments,
the corresponding values with the `data` arguments.
If a single field is appended, `names`, `data` and `dtypes` do not have
to be lists but just values.
Parameters
----------
base : array
Input array to extend.
names : string, sequence
String or sequence of strings corresponding to the names
of the new fields.
data : array or sequence of arrays
Array or sequence of arrays storing the fields to add to the base.
dtypes : sequence of datatypes, optional
Datatype or sequence of datatypes.
If None, the datatypes are estimated from the `data`.
fill_value : {float}, optional
Filling value used to pad missing data on the shorter arrays.
usemask : {False, True}, optional
Whether to return a masked array or not.
asrecarray : {False, True}, optional
Whether to return a recarray (MaskedRecords) or not.
"""
# Check the names
if isinstance(names, (tuple, list)):
if len(names) != len(data):
msg = "The number of arrays does not match the number of names"
raise ValueError(msg)
elif isinstance(names, basestring):
names = [names, ]
data = [data, ]
#
if dtypes is None:
data = [np.array(a, copy=False, subok=True) for a in data]
data = [a.view([(name, a.dtype)]) for (name, a) in zip(names, data)]
else:
if not isinstance(dtypes, (tuple, list)):
dtypes = [dtypes, ]
if len(data) != len(dtypes):
if len(dtypes) == 1:
dtypes = dtypes * len(data)
else:
msg = "The dtypes argument must be None, a dtype, or a list."
raise ValueError(msg)
data = [np.array(a, copy=False, subok=True, dtype=d).view([(n, d)])
for (a, n, d) in zip(data, names, dtypes)]
#
base = merge_arrays(base, usemask=usemask, fill_value=fill_value)
if len(data) > 1:
data = merge_arrays(data, flatten=True, usemask=usemask,
fill_value=fill_value)
else:
data = data.pop()
#
output = ma.masked_all(max(len(base), len(data)),
dtype=base.dtype.descr + data.dtype.descr)
output = recursive_fill_fields(base, output)
output = recursive_fill_fields(data, output)
#
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
def rec_append_fields(base, names, data, dtypes=None):
"""
Add new fields to an existing array.
The names of the fields are given with the `names` arguments,
the corresponding values with the `data` arguments.
If a single field is appended, `names`, `data` and `dtypes` do not have
to be lists but just values.
Parameters
----------
base : array
Input array to extend.
names : string, sequence
String or sequence of strings corresponding to the names
of the new fields.
data : array or sequence of arrays
Array or sequence of arrays storing the fields to add to the base.
dtypes : sequence of datatypes, optional
Datatype or sequence of datatypes.
If None, the datatypes are estimated from the `data`.
See Also
--------
append_fields
Returns
-------
appended_array : np.recarray
"""
return append_fields(base, names, data=data, dtypes=dtypes,
asrecarray=True, usemask=False)
def stack_arrays(arrays, defaults=None, usemask=True, asrecarray=False,
autoconvert=False):
"""
Superposes arrays fields by fields
Parameters
----------
arrays : array or sequence
Sequence of input arrays.
defaults : dictionary, optional
Dictionary mapping field names to the corresponding default values.
usemask : {True, False}, optional
Whether to return a MaskedArray (or MaskedRecords is
`asrecarray==True`) or a ndarray.
asrecarray : {False, True}, optional
Whether to return a recarray (or MaskedRecords if `usemask==True`)
or just a flexible-type ndarray.
autoconvert : {False, True}, optional
Whether automatically cast the type of the field to the maximum.
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> x = np.array([1, 2,])
>>> rfn.stack_arrays(x) is x
True
>>> z = np.array([('A', 1), ('B', 2)], dtype=[('A', '|S3'), ('B', float)])
>>> zz = np.array([('a', 10., 100.), ('b', 20., 200.), ('c', 30., 300.)],
... dtype=[('A', '|S3'), ('B', float), ('C', float)])
>>> test = rfn.stack_arrays((z,zz))
>>> test
masked_array(data = [('A', 1.0, --) ('B', 2.0, --) ('a', 10.0, 100.0) ('b', 20.0, 200.0)
('c', 30.0, 300.0)],
mask = [(False, False, True) (False, False, True) (False, False, False)
(False, False, False) (False, False, False)],
fill_value = ('N/A', 1e+20, 1e+20),
dtype = [('A', '|S3'), ('B', '<f8'), ('C', '<f8')])
"""
if isinstance(arrays, ndarray):
return arrays
elif len(arrays) == 1:
return arrays[0]
seqarrays = [np.asanyarray(a).ravel() for a in arrays]
nrecords = [len(a) for a in seqarrays]
ndtype = [a.dtype for a in seqarrays]
fldnames = [d.names for d in ndtype]
#
dtype_l = ndtype[0]
newdescr = dtype_l.descr
names = [_[0] for _ in newdescr]
for dtype_n in ndtype[1:]:
for descr in dtype_n.descr:
name = descr[0] or ''
if name not in names:
newdescr.append(descr)
names.append(name)
else:
nameidx = names.index(name)
current_descr = newdescr[nameidx]
if autoconvert:
if np.dtype(descr[1]) > np.dtype(current_descr[-1]):
current_descr = list(current_descr)
current_descr[-1] = descr[1]
newdescr[nameidx] = tuple(current_descr)
elif descr[1] != current_descr[-1]:
raise TypeError("Incompatible type '%s' <> '%s'" %
(dict(newdescr)[name], descr[1]))
# Only one field: use concatenate
if len(newdescr) == 1:
output = ma.concatenate(seqarrays)
else:
#
output = ma.masked_all((np.sum(nrecords),), newdescr)
offset = np.cumsum(np.r_[0, nrecords])
seen = []
for (a, n, i, j) in zip(seqarrays, fldnames, offset[:-1], offset[1:]):
names = a.dtype.names
if names is None:
output['f%i' % len(seen)][i:j] = a
else:
for name in n:
output[name][i:j] = a[name]
if name not in seen:
seen.append(name)
#
return _fix_output(_fix_defaults(output, defaults),
usemask=usemask, asrecarray=asrecarray)
def find_duplicates(a, key=None, ignoremask=True, return_index=False):
"""
Find the duplicates in a structured array along a given key
Parameters
----------
a : array-like
Input array
key : {string, None}, optional
Name of the fields along which to check the duplicates.
If None, the search is performed by records
ignoremask : {True, False}, optional
Whether masked data should be discarded or considered as duplicates.
return_index : {False, True}, optional
Whether to return the indices of the duplicated values.
Examples
--------
>>> from numpy.lib import recfunctions as rfn
>>> ndtype = [('a', int)]
>>> a = np.ma.array([1, 1, 1, 2, 2, 3, 3],
... mask=[0, 0, 1, 0, 0, 0, 1]).view(ndtype)
>>> rfn.find_duplicates(a, ignoremask=True, return_index=True)
... # XXX: judging by the output, the ignoremask flag has no effect
"""
a = np.asanyarray(a).ravel()
# Get a dictionary of fields
fields = get_fieldstructure(a.dtype)
# Get the sorting data (by selecting the corresponding field)
base = a
if key:
for f in fields[key]:
base = base[f]
base = base[key]
# Get the sorting indices and the sorted data
sortidx = base.argsort()
sortedbase = base[sortidx]
sorteddata = sortedbase.filled()
# Compare the sorting data
flag = (sorteddata[:-1] == sorteddata[1:])
# If masked data must be ignored, set the flag to false where needed
if ignoremask:
sortedmask = sortedbase.recordmask
flag[sortedmask[1:]] = False
flag = np.concatenate(([False], flag))
# We need to take the point on the left as well (else we're missing it)
flag[:-1] = flag[:-1] + flag[1:]
duplicates = a[sortidx][flag]
if return_index:
return (duplicates, sortidx[flag])
else:
return duplicates
def join_by(key, r1, r2, jointype='inner', r1postfix='1', r2postfix='2',
defaults=None, usemask=True, asrecarray=False):
"""
Join arrays `r1` and `r2` on key `key`.
The key should be either a string or a sequence of string corresponding
to the fields used to join the array. An exception is raised if the
`key` field cannot be found in the two input arrays. Neither `r1` nor
`r2` should have any duplicates along `key`: the presence of duplicates
will make the output quite unreliable. Note that duplicates are not
looked for by the algorithm.
Parameters
----------
key : {string, sequence}
A string or a sequence of strings corresponding to the fields used
for comparison.
r1, r2 : arrays
Structured arrays.
jointype : {'inner', 'outer', 'leftouter'}, optional
If 'inner', returns the elements common to both r1 and r2.
If 'outer', returns the common elements as well as the elements of
r1 not in r2 and the elements of not in r2.
If 'leftouter', returns the common elements and the elements of r1
not in r2.
r1postfix : string, optional
String appended to the names of the fields of r1 that are present
in r2 but absent of the key.
r2postfix : string, optional
String appended to the names of the fields of r2 that are present
in r1 but absent of the key.
defaults : {dictionary}, optional
Dictionary mapping field names to the corresponding default values.
usemask : {True, False}, optional
Whether to return a MaskedArray (or MaskedRecords is
`asrecarray==True`) or a ndarray.
asrecarray : {False, True}, optional
Whether to return a recarray (or MaskedRecords if `usemask==True`)
or just a flexible-type ndarray.
Notes
-----
* The output is sorted along the key.
* A temporary array is formed by dropping the fields not in the key for
the two arrays and concatenating the result. This array is then
sorted, and the common entries selected. The output is constructed by
filling the fields with the selected entries. Matching is not
preserved if there are some duplicates...
"""
# Check jointype
if jointype not in ('inner', 'outer', 'leftouter'):
raise ValueError(
"The 'jointype' argument should be in 'inner', "
"'outer' or 'leftouter' (got '%s' instead)" % jointype
)
# If we have a single key, put it in a tuple
if isinstance(key, basestring):
key = (key,)
# Check the keys
if len(set(key)) != len(key):
dup = next(x for n,x in enumerate(key) if x in key[n+1:])
raise ValueError("duplicate join key %r" % dup)
for name in key:
if name not in r1.dtype.names:
raise ValueError('r1 does not have key field %r' % name)
if name not in r2.dtype.names:
raise ValueError('r2 does not have key field %r' % name)
# Make sure we work with ravelled arrays
r1 = r1.ravel()
r2 = r2.ravel()
# Fixme: nb2 below is never used. Commenting out for pyflakes.
# (nb1, nb2) = (len(r1), len(r2))
nb1 = len(r1)
(r1names, r2names) = (r1.dtype.names, r2.dtype.names)
# Check the names for collision
if (set.intersection(set(r1names), set(r2names)).difference(key) and
not (r1postfix or r2postfix)):
msg = "r1 and r2 contain common names, r1postfix and r2postfix "
msg += "can't be empty"
raise ValueError(msg)
# Make temporary arrays of just the keys
# (use order of keys in `r1` for back-compatibility)
key1 = [ n for n in r1names if n in key ]
r1k = _keep_fields(r1, key1)
r2k = _keep_fields(r2, key1)
# Concatenate the two arrays for comparison
aux = ma.concatenate((r1k, r2k))
idx_sort = aux.argsort(order=key)
aux = aux[idx_sort]
#
# Get the common keys
flag_in = ma.concatenate(([False], aux[1:] == aux[:-1]))
flag_in[:-1] = flag_in[1:] + flag_in[:-1]
idx_in = idx_sort[flag_in]
idx_1 = idx_in[(idx_in < nb1)]
idx_2 = idx_in[(idx_in >= nb1)] - nb1
(r1cmn, r2cmn) = (len(idx_1), len(idx_2))
if jointype == 'inner':
(r1spc, r2spc) = (0, 0)
elif jointype == 'outer':
idx_out = idx_sort[~flag_in]
idx_1 = np.concatenate((idx_1, idx_out[(idx_out < nb1)]))
idx_2 = np.concatenate((idx_2, idx_out[(idx_out >= nb1)] - nb1))
(r1spc, r2spc) = (len(idx_1) - r1cmn, len(idx_2) - r2cmn)
elif jointype == 'leftouter':
idx_out = idx_sort[~flag_in]
idx_1 = np.concatenate((idx_1, idx_out[(idx_out < nb1)]))
(r1spc, r2spc) = (len(idx_1) - r1cmn, 0)
# Select the entries from each input
(s1, s2) = (r1[idx_1], r2[idx_2])
#
# Build the new description of the output array .......
# Start with the key fields
ndtype = [list(_) for _ in r1k.dtype.descr]
# Add the other fields
ndtype.extend(list(_) for _ in r1.dtype.descr if _[0] not in key)
# Find the new list of names (it may be different from r1names)
names = list(_[0] for _ in ndtype)
for desc in r2.dtype.descr:
desc = list(desc)
name = desc[0]
# Have we seen the current name already ?
if name in names:
nameidx = ndtype.index(desc)
current = ndtype[nameidx]
# The current field is part of the key: take the largest dtype
if name in key:
current[-1] = max(desc[1], current[-1])
# The current field is not part of the key: add the suffixes
else:
current[0] += r1postfix
desc[0] += r2postfix
ndtype.insert(nameidx + 1, desc)
#... we haven't: just add the description to the current list
else:
names.extend(desc[0])
ndtype.append(desc)
# Revert the elements to tuples
ndtype = [tuple(_) for _ in ndtype]
# Find the largest nb of common fields :
# r1cmn and r2cmn should be equal, but...
cmn = max(r1cmn, r2cmn)
# Construct an empty array
output = ma.masked_all((cmn + r1spc + r2spc,), dtype=ndtype)
names = output.dtype.names
for f in r1names:
selected = s1[f]
if f not in names or (f in r2names and not r2postfix and f not in key):
f += r1postfix
current = output[f]
current[:r1cmn] = selected[:r1cmn]
if jointype in ('outer', 'leftouter'):
current[cmn:cmn + r1spc] = selected[r1cmn:]
for f in r2names:
selected = s2[f]
if f not in names or (f in r1names and not r1postfix and f not in key):
f += r2postfix
current = output[f]
current[:r2cmn] = selected[:r2cmn]
if (jointype == 'outer') and r2spc:
current[-r2spc:] = selected[r2cmn:]
# Sort and finalize the output
output.sort(order=key)
kwargs = dict(usemask=usemask, asrecarray=asrecarray)
return _fix_output(_fix_defaults(output, defaults), **kwargs)
def rec_join(key, r1, r2, jointype='inner', r1postfix='1', r2postfix='2',
defaults=None):
"""
Join arrays `r1` and `r2` on keys.
Alternative to join_by, that always returns a np.recarray.
See Also
--------
join_by : equivalent function
"""
kwargs = dict(jointype=jointype, r1postfix=r1postfix, r2postfix=r2postfix,
defaults=defaults, usemask=False, asrecarray=True)
return join_by(key, r1, r2, **kwargs)
|
import datetime
from os.path import dirname, join
import pandas as pd
from scipy.signal import savgol_filter
from bokeh.io import curdoc
from bokeh.layouts import column, row
from bokeh.models import ColumnDataSource, DataRange1d, Select
from bokeh.palettes import Blues4
from bokeh.plotting import figure
STATISTICS = ['record_min_temp', 'actual_min_temp', 'average_min_temp', 'average_max_temp', 'actual_max_temp', 'record_max_temp']
def get_dataset(src, name, distribution):
df = src[src.airport == name].copy()
del df['airport']
df['date'] = pd.to_datetime(df.date)
# timedelta here instead of pd.DateOffset to avoid pandas bug < 0.18 (Pandas issue #11925)
df['left'] = df.date - datetime.timedelta(days=0.5)
df['right'] = df.date + datetime.timedelta(days=0.5)
df = df.set_index(['date'])
df.sort_index(inplace=True)
if distribution == 'Smoothed':
window, order = 51, 3
for key in STATISTICS:
df[key] = savgol_filter(df[key], window, order)
return ColumnDataSource(data=df)
def make_plot(source, title):
plot = figure(x_axis_type="datetime", width=800, tools="", toolbar_location=None)
plot.title.text = title
plot.quad(top='record_max_temp', bottom='record_min_temp', left='left', right='right',
color=Blues4[2], source=source, legend_label="Record")
plot.quad(top='average_max_temp', bottom='average_min_temp', left='left', right='right',
color=Blues4[1], source=source, legend_label="Average")
plot.quad(top='actual_max_temp', bottom='actual_min_temp', left='left', right='right',
color=Blues4[0], alpha=0.5, line_color="black", source=source, legend_label="Actual")
# fixed attributes
plot.xaxis.axis_label = None
plot.yaxis.axis_label = "Temperature (F)"
plot.axis.axis_label_text_font_style = "bold"
plot.x_range = DataRange1d(range_padding=0.0)
plot.grid.grid_line_alpha = 0.3
return plot
def update_plot(attrname, old, new):
city = city_select.value
plot.title.text = "Weather data for " + cities[city]['title']
src = get_dataset(df, cities[city]['airport'], distribution_select.value)
source.data.update(src.data)
city = 'Austin'
distribution = 'Discrete'
cities = {
'Austin': {
'airport': 'AUS',
'title': 'Austin, TX',
},
'Boston': {
'airport': 'BOS',
'title': 'Boston, MA',
},
'Seattle': {
'airport': 'SEA',
'title': 'Seattle, WA',
}
}
city_select = Select(value=city, title='City', options=sorted(cities.keys()))
distribution_select = Select(value=distribution, title='Distribution', options=['Discrete', 'Smoothed'])
df = pd.read_csv(join(dirname(__file__), 'data/2015_weather.csv'))
source = get_dataset(df, cities[city]['airport'], distribution)
plot = make_plot(source, "Weather data for " + cities[city]['title'])
city_select.on_change('value', update_plot)
distribution_select.on_change('value', update_plot)
controls = column(city_select, distribution_select)
curdoc().add_root(row(plot, controls))
curdoc().title = "Weather"
|
#MenuTitle: Delete Diagonal Nodes Between Extremes
# -*- coding: utf-8 -*-
from __future__ import print_function, division, unicode_literals
__doc__="""
Good for cleaning TTF curve. It removes Diagonal Node Between Extremes, after placing the current outline in the background.
"""
import GlyphsApp
import math
f = Glyphs.font # frontmost font
sel = f.selectedLayers # active layers of selected glyphs
def deleteDiagonals( thisLayer ):
for pathindex, p in enumerate(thisLayer.paths):
numOfNodes = len(p.nodes)
for i in range( -1, numOfNodes):
try:
hNode = p.nodes[i-1]
iNode = p.nodes[i]
jNode = p.nodes[i+1]
if iNode.type != GSOFFCURVE: #if thisNode is on-curve
if hNode.type == GSOFFCURVE and jNode.type == GSOFFCURVE:
# on-curve now found
# diagonal cleaner
try:
if p.nodes[i+2].x == p.nodes[i+3].x or p.nodes[i+2].y == p.nodes[i+3].y or p.nodes[i-2].x == p.nodes[i-3].x or p.nodes[i-2].y == p.nodes[i-3].y:
if hNode.x == jNode.x or hNode.y == jNode.y:
pass # because the node is extreme
else:
boolx1 = hNode.x < iNode.x < jNode.x
boolx2 = hNode.x > iNode.x > jNode.x
booly1 = hNode.y < iNode.y < jNode.y
booly2 = hNode.y > iNode.y > jNode.y
if (boolx1 or boolx2) and (booly1 or booly2):
atan2hi = math.atan2(iNode.y-hNode.y,iNode.x-hNode.x)
atan2ij = math.atan2(jNode.y-iNode.y,jNode.x-iNode.x)
if abs(atan2ij-atan2hi) < 0.1:
dupLayer = thisLayer.copy()
dupPath = dupLayer.paths[pathindex]
dupNode = dupPath.nodes[i]
dupPath.removeNodeCheckKeepShape_(dupNode)
nodesBefore = len(dupPath.nodes)
dupLayer.addInflectionPoints()
nodesAfter = len(dupPath.nodes)
if (nodesBefore == nodesAfter):
p.removeNodeCheckKeepShape_(iNode)
except:
pass
except:
pass
f.disableUpdateInterface() # suppresses UI updates in Font View
for l in sel:
g = l.parent
g.beginUndo() # begin undo grouping
l.setBackground_(l)
deleteDiagonals( l )
deleteDiagonals( l ) # run the process again, just to make sure
g.endUndo() # end undo grouping
f.enableUpdateInterface() # re-enables UI updates in Font View
|
import timeit
import numpy as np
durations = timeit.repeat(
'a["b"]',
repeat=10 ** 6,
number=1,
setup="a = {'b': 3, 'c': 4, 'd': 5}"
)
mul = 10 ** -7
print(
"mean = {:0.1f} * 10^-7, std={:0.1f} * 10^-7".format(
np.mean(durations) / mul,
np.std(durations) / mul
)
)
print("min = {:0.1f} * 10^-7".format(np.min(durations) / mul))
print("max = {:0.1f} * 10^-7".format(np.max(durations) / mul))
|
# Copyright 2019 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import re
from recipe_engine import recipe_api
class CommitPositionApi(recipe_api.RecipeApi):
"""Recipe module providing commit position parsing and formatting."""
RE_COMMIT_POSITION = re.compile(
r'(?P<ref>refs/[^@]+)@{#(?P<revision>\d+)}')
@classmethod
def parse(cls, value):
"""Returns (ref, revision_number) tuple."""
match = cls.RE_COMMIT_POSITION.match(value)
if not match:
raise ValueError(
'Commit position "%s" does not match r"%s"' %
(value, cls.RE_COMMIT_POSITION.pattern))
return match.group('ref'), int(match.group('revision'))
@classmethod
def format(cls, ref, revision_number):
"""Returns a commit position string.
ref must start with 'refs/'.
"""
assert isinstance(ref, str)
assert ref.startswith('refs/'), ref
revision_number = int(revision_number)
return '%s@{#%d}' % (ref, revision_number)
|
from calendar import timegm
from flask_login import UserMixin, login_user
from datetime import datetime
import flask_socketio as sio
from .Permissions import Permissions
from .Token import Token
from .database import Database
from .Room import Room, ROOMS
from .Logger import Logger
from .. import config
from .Layout import Layout
logged_users = {}
class User(UserMixin):
_id = None
def get_id(self):
return str(self.id())
def is_authenticated(self):
return self.token().valid()
def id(self):
return self._id
def token(self):
c = Database().get_cursor()
c.execute("SELECT TokenId FROM User WHERE Id = ?;", (self.id(),))
fetch = c.fetchone()
if fetch[0] is None:
return None
return Token.from_id(fetch[0])
def set_token(self, token: Token):
if not isinstance(token, Token):
raise TypeError(
f"Object of type `Token` expected, however type `{type(token)}` was passed")
db = Database()
db.get_cursor().execute(
'UPDATE User SET TokenId = ? WHERE Id = ?;', (token.id(), self.id()))
db.commit()
def name(self):
c = Database().get_cursor()
c.execute("SELECT Name FROM User WHERE Id = ?;", (self.id(),))
fetch = c.fetchone()
return fetch[0] if fetch and fetch[0] else None
def set_name(self, name: str):
if not isinstance(name, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(name)}` was passed")
db = Database()
db.get_cursor().execute('UPDATE User SET Name = ? WHERE Id = ?;', (name, self.id()))
db.commit()
def sid(self):
c = Database().get_cursor()
c.execute(
'SELECT SessionId FROM SessionId WHERE UserId = ? ORDER BY Updated DESC LIMIT 1;', (self.id(),))
fetch = c.fetchone()
return fetch[0] if fetch and fetch[0] else None
def set_sid(self, sid: str):
if not isinstance(sid, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(sid)}` was passed")
db = Database()
db.get_cursor().execute('INSERT OR REPLACE INTO SessionId(`UserId`, `SessionId`) VALUES(?, ?);',
(self.id(), sid))
db.commit()
def latest_room(self):
c = Database().get_cursor()
c.execute('SELECT LatestRoom FROM User WHERE Id = ?;', (self.id(),))
fetch = c.fetchone()
return Room(fetch[0]) if fetch and fetch[0] else None
def set_latest_room(self, latest_room: Room):
if not isinstance(latest_room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(latest_room)}` was passed")
db = Database()
db.get_cursor().execute('UPDATE User SET LatestRoom = ? WHERE Id = ?;',
(latest_room.id(), self.id()))
db.commit()
def join_room(self, room: Room):
if not isinstance(room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(room)}` was passed")
db = Database()
db.get_cursor().execute('INSERT OR REPLACE INTO UserRoom(`UserId`, `RoomId`) VALUES (?, ?);',
(self.id(), room.id()))
db.commit()
self.set_latest_room(room)
sio.join_room(room.name(), self.sid())
if room.id() not in ROOMS:
logfile_format = '%Y-%m-%d %H-%M-%S'
if "logfile-date-format" in config["server"]:
logfile_format = config["server"]["logfile-date-format"]
logfile_date_format = '{:'+logfile_format+"}"
logfile_date = logfile_date_format.format(datetime.now())
ROOMS[room.id()] = {
'log': Logger('log/{}-{}.log'.format(logfile_date, room.name())),
'users': {},
'listeners': {}
}
users = [User.from_id(id).serialize()
for id in ROOMS[room.id()]['users']]
ROOMS[room.id()]['users'][self.id()] = self
history = []
for event in ROOMS[room.id()]['log'].get_data():
if (event["type"] == "new_image" or event["type"] == "text") and ('receiver' not in event or event["receiver"] == self.id()):
history.append(event)
if event["type"] == "command" and event["user"]['id'] == self.id():
history.append(event)
sio.emit('status', {
'type': 'join',
'user': self.serialize(),
'room': room.serialize(),
'timestamp': timegm(datetime.now().utctimetuple())
}, room=room.name())
sio.emit('joined_room', {
'room': room.serialize(),
'layout': Layout.from_json_file(room.layout_path()).serialize(),
'users': users,
'history': history,
'self': self.serialize(),
'permissions': Permissions(self.token(), room).serialize()
}, room=self.sid())
ROOMS[room.id()]['log'].append(
{'type': "join", 'user': self.serialize(), 'room': room.serialize()})
print(self.name(), "joined room:", room.name())
def leave_room(self, room: Room):
if not isinstance(room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(room)}` was passed")
db = Database()
db.get_cursor().execute(
'DELETE FROM UserRoom WHERE UserId = ? AND RoomId = ?;', (self.id(), room.id()))
db.commit()
sio.leave_room(room.name(), self.sid())
sio.emit('left_room', {'room': room.serialize()}, room=self.sid())
ROOMS[room.id()]['log'].append(
{'type': "leave", 'user': self.serialize(), 'room': room.serialize()})
print(self.name(), "left room:", room.name())
if room.id() in ROOMS:
if self.id() in ROOMS[room.id()]['users']:
del ROOMS[room.id()]['users'][self.id()]
if not ROOMS[room.id()]:
del ROOMS[room.id()]
sio.close_room(room.name())
sio.emit('status', {
'type': 'leave',
'room': room.serialize(),
'user': self.serialize(),
'timestamp': timegm(datetime.now().utctimetuple())
}, room=room.name())
def rooms(self):
return [Room(id[0]) for id in Database().get_cursor().execute('SELECT RoomId FROM UserRoom WHERE UserId = ?',
(self.id(),))]
def in_room(self, room: Room):
if not isinstance(room, Room):
raise TypeError(
f"Object of type `Room` expected, however type `{type(room)}` was passed")
c = Database().get_cursor()
c.execute('SELECT COUNT(*) FROM UserRoom WHERE UserId = ? AND RoomId = ?',
(self.id(), room.id()))
fetch = c.fetchone()
return Room(fetch[0]) if fetch[0] else None
def serialize(self):
return {
'id': self.id(),
'name': self.name(),
'sid': self.sid(),
'token': self.token().serialize(),
'latest_room': self.latest_room().serialize(),
'rooms': [room.serialize() for room in self.rooms()]
}
@classmethod
def from_id(cls, id):
if not isinstance(id, int) and not isinstance(id, str):
raise TypeError(
f"Object of type `int` or `str` expected, however type `{type(id)}` was passed")
global logged_users
if id not in logged_users:
c = Database().get_cursor()
c.execute('SELECT COUNT(*) FROM User WHERE Id = ?', (id,))
logged_users[id] = cls(id) if c.fetchone()[0] != 0 else None
return logged_users[id]
@classmethod
def from_sid(cls, sid: str):
if not isinstance(sid, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(sid)}` was passed")
c = Database().get_cursor()
c.execute('SELECT UserId FROM SessionId WHERE SessionId = ?', (sid,))
id = c.fetchone()
return cls(id[0]) if id[0] else None
@classmethod
def login(cls, name: str, token: Token):
if not token:
return None
if not isinstance(name, str):
raise TypeError(
f"Object of type `str` expected, however type `{type(name)}` was passed")
if not isinstance(token, Token):
raise TypeError(
f"Object of type `Token` expected, however type `{type(token)}` was passed")
if not token.valid():
return None
db = Database()
c = db.get_cursor()
c.execute('INSERT INTO User(`TokenId`, `Name`) VALUES (?, ?);',
(token.id(), name))
db.commit()
user = cls(c.lastrowid)
login_user(user)
return user
def __repr__(self):
return str(self.serialize())
def __init__(self, id: int):
if not isinstance(id, int) and not isinstance(id, str):
raise TypeError(
f"Object of type `int` or `str` expected, however type `{type(id)}` was passed")
self._id = int(id)
|
import os
import sys
import math
"""
Score: 1 full point. Well done!
Notes:
-------
First of all, your work is correct. I noticed you forked instead of cloning. It's actually
more efficient to clone though there is something to learn from cloning.
Docstrings:
- typically, we would write the title on the first line immediately after the first triple quote. By skipping to the next line
we inadvertently add a newline, which will show up in the parsed documentation.
- There are formal formats that one could follow. These can be parsed by, for example, PyCharm so that when you click the function
name at its point of use and press F1 you will see a bubble with the functions usage - very handy when you have functions in
different modules.
- One of the most important tools for documentation is Sphinx, primarily built to handle documentation of Python
objects (see https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#python-signatures). Sphinx can use any
markup language (Markdown, reStructureText) which it can then convert in HTML, LaTeX etc. Here is how we can improve the docstring to
use reStructuredText by placing some delimiters on the variables (it uses info field lists defined at
https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#info-field-lists):
:param float a: coefficient of x^2
:param float b: coefficient of x
:param float c: constant
:return: both solutions
:rtype: tuple(float, float)
- You correctly identifed that we were missing brackets for each solution.
Another solution is using / for each dividend i.e. (-b + math.sqrt(discriminant)) / 2 / a
- From next class I'll score each exercise with 10 points for a max of 100 points so we can have some granularity.
"""
def calculate(a, b, c):
"""
Solve quadratic equation and return the value of x
Parameters:
a (float): Value a, not equal to zero
b(float): Value b
c(float): Value c
Returns:
float:Returning value
"""
discriminant = b ** 2 - 4 * a * c
if discriminant < 0:
return None, None
x1 = (-b + math.sqrt(discriminant)) / (2 * a)
x2 = (-b - math.sqrt(discriminant)) / (2 * a)
return x1, x2
def main():
a = float(input("a: "))
b = float(input("b: "))
c = float(input("c: "))
x1, x2 = calculate(a, b, c)
print(f"x1={x1}, x2={x2}")
return os.EX_OK
if __name__ == "__main__":
sys.exit(main())
|
import os
import json
import threading
import time
from Qt import QtCore, QtGui, QtWidgets
from pype.vendor import ftrack_api
from pypeapp import style
from pype.ftrack import FtrackServer, credentials
from . import login_dialog
from pype import api as pype
log = pype.Logger().get_logger("FtrackModule", "ftrack")
class FtrackModule:
def __init__(self, main_parent=None, parent=None):
self.parent = parent
self.widget_login = login_dialog.Login_Dialog_ui(self)
self.action_server = FtrackServer('action')
self.thread_action_server = None
self.thread_timer = None
self.bool_logged = False
self.bool_action_server = False
self.bool_timer_event = False
def show_login_widget(self):
self.widget_login.show()
def validate(self):
validation = False
cred = credentials._get_credentials()
try:
if 'username' in cred and 'apiKey' in cred:
validation = credentials._check_credentials(
cred['username'],
cred['apiKey']
)
if validation is False:
self.show_login_widget()
else:
self.show_login_widget()
except Exception as e:
log.error("We are unable to connect to Ftrack: {0}".format(e))
validation = credentials._check_credentials()
if validation is True:
log.info("Connected to Ftrack successfully")
self.loginChange()
else:
log.warning("Please sign in to Ftrack")
self.bool_logged = False
self.set_menu_visibility()
return validation
# Necessary - login_dialog works with this method after logging in
def loginChange(self):
self.bool_logged = True
self.set_menu_visibility()
self.start_action_server()
def logout(self):
credentials._clear_credentials()
self.stop_action_server()
log.info("Logged out of Ftrack")
self.bool_logged = False
self.set_menu_visibility()
# Actions part
def start_action_server(self):
if self.thread_action_server is None:
self.thread_action_server = threading.Thread(
target=self.set_action_server
)
self.thread_action_server.daemon = True
self.thread_action_server.start()
log.info("Ftrack action server launched")
self.bool_action_server = True
self.set_menu_visibility()
def set_action_server(self):
try:
self.action_server.run_server()
except Exception as exc:
log.error(
"Ftrack Action server crashed! Please try to start again.",
exc_info=True
)
# TODO show message to user
self.bool_action_server = False
self.set_menu_visibility()
def reset_action_server(self):
self.stop_action_server()
self.start_action_server()
def stop_action_server(self):
try:
self.action_server.stop_session()
if self.thread_action_server is not None:
self.thread_action_server.join()
self.thread_action_server = None
log.info("Ftrack action server stopped")
self.bool_action_server = False
self.set_menu_visibility()
except Exception as e:
log.error("During Killing action server: {0}".format(e))
# Definition of Tray menu
def tray_menu(self, parent_menu):
# Menu for Tray App
self.menu = QtWidgets.QMenu('Ftrack', parent_menu)
self.menu.setProperty('submenu', 'on')
# Actions - server
self.smActionS = self.menu.addMenu("Action server")
self.aRunActionS = QtWidgets.QAction(
"Run action server", self.smActionS
)
self.aResetActionS = QtWidgets.QAction(
"Reset action server", self.smActionS
)
self.aStopActionS = QtWidgets.QAction(
"Stop action server", self.smActionS
)
self.aRunActionS.triggered.connect(self.start_action_server)
self.aResetActionS.triggered.connect(self.reset_action_server)
self.aStopActionS.triggered.connect(self.stop_action_server)
self.smActionS.addAction(self.aRunActionS)
self.smActionS.addAction(self.aResetActionS)
self.smActionS.addAction(self.aStopActionS)
# Actions - basic
self.aLogin = QtWidgets.QAction("Login", self.menu)
self.aLogin.triggered.connect(self.validate)
self.aLogout = QtWidgets.QAction("Logout", self.menu)
self.aLogout.triggered.connect(self.logout)
self.menu.addAction(self.aLogin)
self.menu.addAction(self.aLogout)
self.bool_logged = False
self.set_menu_visibility()
parent_menu.addMenu(self.menu)
def tray_start(self):
self.validate()
# Definition of visibility of each menu actions
def set_menu_visibility(self):
self.smActionS.menuAction().setVisible(self.bool_logged)
self.aLogin.setVisible(not self.bool_logged)
self.aLogout.setVisible(self.bool_logged)
if self.bool_logged is False:
if self.bool_timer_event is True:
self.stop_timer_thread()
return
self.aRunActionS.setVisible(not self.bool_action_server)
self.aResetActionS.setVisible(self.bool_action_server)
self.aStopActionS.setVisible(self.bool_action_server)
if self.bool_timer_event is False:
self.start_timer_thread()
def start_timer_thread(self):
try:
if self.thread_timer is None:
self.thread_timer = FtrackEventsThread(self)
self.bool_timer_event = True
self.thread_timer.signal_timer_started.connect(
self.timer_started
)
self.thread_timer.signal_timer_stopped.connect(
self.timer_stopped
)
self.thread_timer.start()
except Exception:
pass
def stop_timer_thread(self):
try:
if self.thread_timer is not None:
self.thread_timer.terminate()
self.thread_timer.wait()
self.thread_timer = None
except Exception as e:
log.error("During Killing Timer event server: {0}".format(e))
def process_modules(self, modules):
if 'TimersManager' in modules:
self.timer_manager = modules['TimersManager']
self.timer_manager.add_module(self)
def start_timer_manager(self, data):
if self.thread_timer is not None:
self.thread_timer.ftrack_start_timer(data)
def stop_timer_manager(self):
if self.thread_timer is not None:
self.thread_timer.ftrack_stop_timer()
def timer_started(self, data):
if hasattr(self, 'timer_manager'):
self.timer_manager.start_timers(data)
def timer_stopped(self):
if hasattr(self, 'timer_manager'):
self.timer_manager.stop_timers()
class FtrackEventsThread(QtCore.QThread):
# Senders
signal_timer_started = QtCore.Signal(object)
signal_timer_stopped = QtCore.Signal()
def __init__(self, parent):
super(FtrackEventsThread, self).__init__()
cred = credentials._get_credentials()
self.username = cred['username']
self.user = None
self.last_task = None
def run(self):
self.timer_session = ftrack_api.Session(auto_connect_event_hub=True)
self.timer_session.event_hub.subscribe(
'topic=ftrack.update and source.user.username={}'.format(
self.username
),
self.event_handler)
user_query = 'User where username is "{}"'.format(self.username)
self.user = self.timer_session.query(user_query).one()
timer_query = 'Timer where user.username is "{}"'.format(self.username)
timer = self.timer_session.query(timer_query).first()
if timer is not None:
self.last_task = timer['context']
self.signal_timer_started.emit(
self.get_data_from_task(self.last_task)
)
self.timer_session.event_hub.wait()
def get_data_from_task(self, task_entity):
data = {}
data['task_name'] = task_entity['name']
data['task_type'] = task_entity['type']['name']
data['project_name'] = task_entity['project']['full_name']
data['hierarchy'] = self.get_parents(task_entity['parent'])
return data
def get_parents(self, entity):
output = []
if entity.entity_type.lower() == 'project':
return output
output.extend(self.get_parents(entity['parent']))
output.append(entity['name'])
return output
def event_handler(self, event):
try:
if event['data']['entities'][0]['objectTypeId'] != 'timer':
return
except Exception:
return
new = event['data']['entities'][0]['changes']['start']['new']
old = event['data']['entities'][0]['changes']['start']['old']
if old is None and new is None:
return
timer_query = 'Timer where user.username is "{}"'.format(self.username)
timer = self.timer_session.query(timer_query).first()
if timer is not None:
self.last_task = timer['context']
if old is None:
self.signal_timer_started.emit(
self.get_data_from_task(self.last_task)
)
elif new is None:
self.signal_timer_stopped.emit()
def ftrack_stop_timer(self):
actual_timer = self.timer_session.query(
'Timer where user_id = "{0}"'.format(self.user['id'])
).first()
if actual_timer is not None:
self.user.stop_timer()
self.timer_session.commit()
self.signal_timer_stopped.emit()
def ftrack_start_timer(self, input_data):
if self.user is None:
return
actual_timer = self.timer_session.query(
'Timer where user_id = "{0}"'.format(self.user['id'])
).first()
if (
actual_timer is not None and
input_data['task_name'] == self.last_task['name'] and
input_data['hierarchy'][-1] == self.last_task['parent']['name']
):
return
input_data['entity_name'] = input_data['hierarchy'][-1]
task_query = (
'Task where name is "{task_name}"'
' and parent.name is "{entity_name}"'
' and project.full_name is "{project_name}"'
).format(**input_data)
task = self.timer_session.query(task_query).one()
self.last_task = task
self.user.start_timer(task)
self.timer_session.commit()
self.signal_timer_started.emit(
self.get_data_from_task(self.last_task)
)
|
#!/usr/bin/env python
import random
import numpy as np
import pandas as pd
import six
def mmm(data, method= "mean/median/mode"):
"""Multivariate Imputation by Mean, median or mode
----------
data: Pandas data frame
Data to impute.
method : could be Mean,Median,Mode
RETURNS
-------
Imputed data: Pandas dataframe
EXAMPLE
-------
#Dummy data
NaN = float('nan')
ID = [1, 2, 3, 4, 5, 6, 7]
A = [NaN, NaN, NaN, 0.1, 0.3, 0.3, 0.4]
B = [0.2, NaN, 0.2, 0.7, 0.9, NaN, NaN]
C = [NaN, 'A', 'B', NaN, 'C', 'D', 'D']
D = [NaN, 'C', 'E', NaN, 'C', 'H', 'D']
columns = {'A':A, 'B':B, 'C':C, 'D':D}
df = pd.DataFrame(columns, index=ID)
df.index.name = 'ID'
USAGE
------
mmm(df,method = "mode") ## method can be replaced with mean or median
"""
if method not in ("mean","median","mode"):
raise Exception("Only mean, median and mode can be used as methods in mmm")
elif not isinstance(data, pd.DataFrame) :
raise TypeError('datatype for input data can only be pandas dataframe')
elif data.shape[0] < 2 :
raise TypeError('dataframe should have more than one row')
for i in range(0,len(list(data.count()))):
if list(data.count())[i] < 2:
raise ValueError('dataframe should have atleast two not-null value in each column')
else :
df = data
df_num = df._get_numeric_data() # Get only numeric columns from dataframe
if (method == "mode") :
result = df.apply(lambda x: x.fillna(x.mode()[0]),axis=0) # this is applied to both numeric and character values
elif (method != "mode" and df_num.shape[1] == 0):
raise Exception("No numeric columns, mean and median imputation not valid")
else :
if df_num.shape[1] > 0 :
if method == "mean" :
out = df_num.apply(lambda x: x.fillna(x.mean()),axis=0)
a = []
for i in df.columns.values.tolist() :
if i not in out.columns.values.tolist() :
a.append(i) # Step to identify non-numeric columns
result = pd.concat([out, df.ix[:,a]], axis=1) # merge all columns for result
elif method == "median" :
out = df_num.apply(lambda x: x.fillna(x.median()),axis=0) # Get only numeric columns from dataframe
a = []
for i in df.columns.values.tolist() :
if i not in out.columns.values.tolist() :
a.append(i) # Step to identify non-numeric columns
result = pd.concat([out, df.ix[:,a]], axis=1) # merge all columns for result
return result # imputed dataset
|
"""Classes to track inputs/outputs of modeling protocols.
See also :class:`modelcif.protocol.Step`.
"""
class Data(object):
"""Some part of the system that is input or output by part of
the modeling protocol.
Usually a subclass is passed to :class:`modelcif.protocol.Step`
to describe the input or output:
- A database of possible template sequences/structures to construct
or search (:class:`modelcif.ReferenceDatabase`)
- A template structure (:class:`modelcif.Template`)
- The sequence of the target (:class:`modelcif.Entity`)
- A target-template alignment (:mod:`modelcif.alignment`)
- Target structure coordinates (:class:`modelcif.model.Model`)
However, this class can also be used directly to describe other kinds
of input/output data.
:param str name: A short name for the data.
:param str details: A longer description of the data.
"""
data_content_type = 'other'
data_other_details = None
def __init__(self, name, details=None):
self.name = name
self.data_other_details = details
class DataGroup(list):
"""A number of :class:`Data` objects that are grouped together.
This class can be used to group together multiple :class:`Data`
objects if a given modeling protocol step consumes or generates
multiple pieces of data. See :class:`modelcif.protocol.Step`. It behaves
like a regular Python list.
"""
pass
|
# Authors: CommPy contributors
# License: BSD 3-Clause
from __future__ import division # Python 2 compatibility
from numpy import arange, log10
from numpy.random import seed
from numpy.testing import run_module_suite, dec, assert_allclose
from commpy.channels import MIMOFlatChannel, SISOFlatChannel
from commpy.modulation import kbest
from commpy.wifi80211 import Wifi80211
@dec.slow
def test_wifi80211_siso_channel():
seed(17121996)
wifi80211 = Wifi80211(1)
BERs = wifi80211.link_performance(SISOFlatChannel(fading_param=(1 + 0j, 0)), range(0, 9, 2), 10 ** 4, 600)[0]
desired = (0.489, 0.503, 0.446, 0.31, 0.015) # From previous tests
# for i, val in enumerate(desired):
# print((BERs[i] - val) / val)
assert_allclose(BERs, desired, rtol=0.3,
err_msg='Wrong performance for SISO QPSK and AWGN channel')
@dec.slow
def test_wifi80211_mimo_channel():
seed(17121996)
# Apply link_performance to MIMO 16QAM and 4x4 Rayleigh channel
wifi80211 = Wifi80211(3)
RayleighChannel = MIMOFlatChannel(4, 4)
RayleighChannel.uncorr_rayleigh_fading(complex)
modem = wifi80211.get_modem()
def receiver(y, h, constellation, noise_var):
return modem.demodulate(kbest(y, h, constellation, 16), 'hard')
BERs = wifi80211.link_performance(RayleighChannel, arange(0, 21, 5) + 10 * log10(modem.num_bits_symbol), 10 ** 4,
600, receiver=receiver)[0]
desired = (0.535, 0.508, 0.521, 0.554, 0.475) # From previous test
assert_allclose(BERs, desired, rtol=1.25,
err_msg='Wrong performance for MIMO 16QAM and 4x4 Rayleigh channel')
if __name__ == "__main__":
run_module_suite()
|
#
# 794. Valid Tic-Tac-Toe State
#
# Q: https://leetcode.com/problems/valid-tic-tac-toe-state/
# A: https://leetcode.com/problems/valid-tic-tac-toe-state/discuss/117603/Javascript-Python3-C%2B%2B-Concise-solutions
#
from typing import List
class Solution:
def validTicTacToe(self, A: List[str], winX = False, winO = False) -> bool:
X = sum([row.count('X') for row in A])
O = sum([row.count('O') for row in A])
def win(c):
target = c * 3
for row in A:
if row == target: return True
for j in range(3):
col = A[0][j] + A[1][j] + A[2][j]
if col == target: return True
return A[0][0] + A[1][1] + A[2][2] == target or A[0][2] + A[1][1] + A[2][0] == target
winX = win('X')
winO = win('O')
if winX and winO: return False # case 1: if X won and O won
if winX and X - O != 1: return False # case 2: if X won, then there must be one more X than O
if winO and X - O != 0: return False # case 3: if O won, then there must be the same amount of X and O
return X - O in [0, 1] # case 4: if no winner, then there must be the same amount of X and O xor one more X than O
|
import os
import sys
import time
import logging
try:
import py_modelica as pym
print 'Found py_modelica in virtual python environment'
except ImportError as err:
print err.message
print 'Use META virtual python environment'
from optparse import OptionParser
parser = OptionParser()
parser.add_option("-d", "--driver", dest="driver",
help="Name of the driver to be executed")
if __name__=='__main__':
if not os.path.isdir('log'):
os.mkdir('log')
log = pym.instantiate_logger(os.path.join('log', 'debug.log'))
(options, args) = parser.parse_args()
driver_name = options.driver
driver = __import__(driver_name)
driver.main()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-24 18:47
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0029_chart_slug'),
]
operations = [
migrations.RemoveField(
model_name='chart',
name='type',
),
]
|
from django.conf.urls import re_path
from . import views
urlpatterns = [
re_path(r'^$', views.index, name='index'),
re_path(r'^search/', views.search_results, name='search'),
re_path(r'^location/(?P<location>\w+)/', views.image_location, name='location'),
]
|
#!/usr/bin/env python
"""
.. module:: workbench
:synopsis: Experiments in Python.
.. moduleauthor:: Steve Knipmeyer <steve@modelrelief.org>
"""
# https://nikolak.com/pyqt-qt-designer-getting-started/
from PyQt5 import QtWidgets # Import the PyQt4 module we'll need
import os
import sys # We need sys so that we can pass argv to QApplication
import design # This file holds our MainWindow and all design related things
# it also keeps events, etc that we defined in Qt Designer
class ExampleApp(QtWidgets.QMainWindow, design.Ui_MainWindow):
def __init__(self):
# Explaining super is out of the scope of this article
# So please google it if you're not familar with it
# Simple reason why we use it here is that it allows us to
# access variables, methods etc in the design.py file
super().__init__()
self.setupUi(self) # This is defined in design.py file automatically
# It sets up layout and widgets that are defined
self.btnBrowse.clicked.connect(self.browse_folder) # When the button is pressed
# Execute browse_folder function
def browse_folder(self):
self.listWidget.clear() # In case there are any existing elements in the list
directory = QtWidgets.QFileDialog.getExistingDirectory(self,
"Pick a folder")
# execute getExistingDirectory dialog and set the directory variable to be equal
# to the user selected directory
if directory: # if user didn't pick a directory don't continue
for file_name in os.listdir(directory): # for all files, if any, in the directory
self.listWidget.addItem(file_name) # add file to the listWidget
def main():
app = QtWidgets.QApplication(sys.argv) # A new instance of QApplication
form = ExampleApp() # We set the form to be our ExampleApp (design)
form.show() # Show the form
app.exec_() # and execute the app
if __name__ == '__main__': # if we're running file directly and not importing it
main() # run the main function
|
"""Functions for formatting messages.
This module allows for messages to be consistently formatted by
message type. Some message types are always printed, while others
are only printed if verbose mode is enabled.
By default, error, warning, and success messages are always printed
while info and debug messages are not. To modify the behavior for
a specific message in a different module, set `always` to True
or False as needed.
"""
import typer
from . import config
def _echo(icon: str, msg: str, fg: str, always: bool) -> None:
"""Print message."""
if config.MESSAGE_ICONS:
msg = f"{icon} {msg}"
if always or config.VERBOSE:
typer.echo(typer.style(msg, fg=fg))
def error(msg: str, always: bool = True, icon: str = "❌") -> None:
"""Print error message."""
_echo(icon=icon, msg=msg, always=always, fg=typer.colors.RED)
def warning(msg: str, always: bool = True, icon: str = "⚠️ ") -> None:
"""Print warning message."""
_echo(icon=icon, msg=msg, always=always, fg=typer.colors.YELLOW)
def info(msg: str, always: bool = False, icon: str = "") -> None:
"""Print info message."""
_echo(icon=icon, msg=msg, always=always, fg=typer.colors.BLUE)
def debug(msg: str, always: bool = False, icon: str = "🐞") -> None:
"""Print debug message."""
_echo(icon=icon, msg=msg, always=always, fg=typer.colors.MAGENTA)
def success(msg: str, always: bool = True, icon: str = "✔️ ") -> None:
"""Print success message."""
_echo(icon=icon, msg=msg, always=always, fg=typer.colors.GREEN)
def section(msg: str, always: bool = False, icon: str = "📑") -> None:
"""Print section header message."""
msg = f"===== {msg} ====="
_echo(icon=icon, msg=msg, always=always, fg=typer.colors.BRIGHT_CYAN)
|
# coding: utf-8
"""
Healthbot APIs
API interface for Healthbot application # noqa: E501
OpenAPI spec version: 1.0.0
Contact: healthbot-hackers@juniper.net
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SshKeyProfileSchema(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'ssh_private_key_file': 'str',
'ssh_private_key_passphrase': 'str'
}
attribute_map = {
'name': 'name',
'ssh_private_key_file': 'ssh-private-key-file',
'ssh_private_key_passphrase': 'ssh-private-key-passphrase'
}
def __init__(self, name=None, ssh_private_key_file=None, ssh_private_key_passphrase=None): # noqa: E501
"""SshKeyProfileSchema - a model defined in Swagger""" # noqa: E501
self._name = None
self._ssh_private_key_file = None
self._ssh_private_key_passphrase = None
self.discriminator = None
self.name = name
self.ssh_private_key_file = ssh_private_key_file
self.ssh_private_key_passphrase = ssh_private_key_passphrase
@property
def name(self):
"""Gets the name of this SshKeyProfileSchema. # noqa: E501
SSH Key profile name # noqa: E501
:return: The name of this SshKeyProfileSchema. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this SshKeyProfileSchema.
SSH Key profile name # noqa: E501
:param name: The name of this SshKeyProfileSchema. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def ssh_private_key_file(self):
"""Gets the ssh_private_key_file of this SshKeyProfileSchema. # noqa: E501
SSH private key file name # noqa: E501
:return: The ssh_private_key_file of this SshKeyProfileSchema. # noqa: E501
:rtype: str
"""
return self._ssh_private_key_file
@ssh_private_key_file.setter
def ssh_private_key_file(self, ssh_private_key_file):
"""Sets the ssh_private_key_file of this SshKeyProfileSchema.
SSH private key file name # noqa: E501
:param ssh_private_key_file: The ssh_private_key_file of this SshKeyProfileSchema. # noqa: E501
:type: str
"""
if ssh_private_key_file is None:
raise ValueError("Invalid value for `ssh_private_key_file`, must not be `None`") # noqa: E501
self._ssh_private_key_file = ssh_private_key_file
@property
def ssh_private_key_passphrase(self):
"""Gets the ssh_private_key_passphrase of this SshKeyProfileSchema. # noqa: E501
SSH private key passphrase # noqa: E501
:return: The ssh_private_key_passphrase of this SshKeyProfileSchema. # noqa: E501
:rtype: str
"""
return self._ssh_private_key_passphrase
@ssh_private_key_passphrase.setter
def ssh_private_key_passphrase(self, ssh_private_key_passphrase):
"""Sets the ssh_private_key_passphrase of this SshKeyProfileSchema.
SSH private key passphrase # noqa: E501
:param ssh_private_key_passphrase: The ssh_private_key_passphrase of this SshKeyProfileSchema. # noqa: E501
:type: str
"""
if ssh_private_key_passphrase is None:
raise ValueError("Invalid value for `ssh_private_key_passphrase`, must not be `None`") # noqa: E501
self._ssh_private_key_passphrase = ssh_private_key_passphrase
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SshKeyProfileSchema):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
import numpy as np
import cv2
import os.path
from argparse import ArgumentParser
import torch
import torch.nn.functional as F
from torch.autograd import Variable
import sys
sys.path.append("./correlation_package/build/lib.linux-x86_64-3.6")
import cscdnet
class DataInfo:
def __init__(self):
self.width = 1024
self.height = 224
self.no_start = 0
self.no_end = 100
self.num_cv = 5
class Test:
def __init__(self, arguments):
self.args = arguments
self.di = DataInfo()
def test(self):
_inputs = torch.from_numpy(np.concatenate((self.t0, self.t1), axis=0)).contiguous()
_inputs = Variable(_inputs).view(1, -1, self.h_resize, self.w_resize)
_inputs = _inputs.cuda()
_outputs = self.model(_inputs)
inputs = _inputs[0].cpu().data
image_t0 = inputs[0:3, :, :]
image_t1 = inputs[3:6, :, :]
image_t0 = (image_t0 + 1.0) * 128
image_t1 = (image_t1 + 1.0) * 128
mask_gt = np.where(self.mask.data.numpy().squeeze(axis=0) == True, 0, 255)
outputs = _outputs[0].cpu().data
mask_pred = F.softmax(outputs[0:2, :, :], dim=0)[1] * 255
self.display_results(image_t0, image_t1, mask_pred, mask_gt)
def display_results(self, t0, t1, mask_pred, mask_gt):
w, h = self.w_orig, self.h_orig
t0_disp = cv2.resize(np.transpose(t0.numpy(), (1, 2, 0)).astype(np.uint8), (w, h))
t1_disp = cv2.resize(np.transpose(t1.numpy(), (1, 2, 0)).astype(np.uint8), (w, h))
mask_pred_disp = cv2.resize(cv2.cvtColor(mask_pred.numpy().astype(np.uint8), cv2.COLOR_GRAY2RGB), (w, h))
mask_gt_disp = cv2.resize(cv2.cvtColor(mask_gt.astype(np.uint8), cv2.COLOR_GRAY2RGB), (w, h))
img_out = np.zeros((h* 2, w * 2, 3), dtype=np.uint8)
img_out[0:h, 0:w, :] = t0_disp
img_out[0:h, w:w * 2, :] = t1_disp
img_out[h:h * 2, 0:w * 1, :] = mask_gt_disp
img_out[h:h * 2, w * 1:w * 2, :] = mask_pred_disp
for dn, img in zip(['mask', 'disp'], [mask_pred_disp, img_out]):
dn_save = os.path.join(self.args.checkpointdir, 'result', dn)
fn_save = os.path.join(dn_save, '{0:08d}.png'.format(self.index))
if not os.path.exists(dn_save):
os.makedirs(dn_save)
print('Writing ... ' + fn_save)
cv2.imwrite(fn_save, img)
def run(self):
for i_set in range(0,self.di.num_cv):
if self.args.use_corr:
print('Correlated Siamese Change Detection Network (CSCDNet)')
self.model = cscdnet.Model(inc=6, outc=2, corr=True, pretrained=True)
fn_model = os.path.join(os.path.join(self.args.checkpointdir, 'set{}'.format(i_set), 'cscdnet-00030000.pth'))
else:
print('Siamese Change Detection Network (Siamese CDResNet)')
self.model = cscdnet.Model(inc=6, outc=2, corr=False, pretrained=True)
fn_model = os.path.join(os.path.join(self.args.checkpointdir, 'set{}'.format(i_set), 'cdnet-00030000.pth'))
if os.path.isfile(fn_model) is False:
print("Error: Cannot read file ... " + fn_model)
exit(-1)
else:
print("Reading model ... " + fn_model)
self.model.load_state_dict(torch.load(fn_model))
self.model = self.model.cuda()
if self.args.dataset == 'PCD':
from dataset_pcd import PCD_full
for dataset in ['TSUNAMI']:
loader_test = PCD_full(os.path.join(self.args.datadir,dataset), self.di.no_start, self.di.no_end, self.di.width, self.di.height)
for index in range(0,loader_test.__len__()):
if i_set * (10 / self.di.num_cv) <= (index % 10) < (i_set + 1) * (10 / self.di.num_cv):
self.index = index
self.t0, self.t1, self.mask, self.w_orig, self.h_orig, self.w_resize, self.h_resize = loader_test.__getitem__(index)
self.test()
else:
continue
else:
print('Error: Unexpected dataset')
exit(-1)
if __name__ == '__main__':
parser = ArgumentParser(description='Start testing ...')
parser.add_argument('--datadir', required=True)
parser.add_argument('--checkpointdir', required=True)
parser.add_argument('--use-corr', action='store_true', help='using correlation layer')
parser.add_argument('--dataset', required=True)
test = Test(parser.parse_args())
test.run()
|
import torch
from nlstruct.torch_utils import multi_dim_triu
def masked_flip(x, mask, dim_x=-2):
flipped_x = torch.zeros_like(x)
flipped_x[mask] = x.flip(dim_x)[mask.flip(-1)]
return flipped_x
IMPOSSIBLE = -100000
# def logdotexp(log_A, B):
# # log_A: 2 * N_samples * N_tags
# # B: 2 * N_tags * N_tags
# if 0 not in log_A.shape:
# max_A = log_A.max(-1, keepdim=True).values
# return torch.bmm((log_A - max_A).exp(), B).log() + max_A
# return torch.bmm(log_A.exp(), B).log()
@torch.jit.script
def logdotexp(log_A, log_B):
# log_A: 2 * N * M
# log_B: 2 * M * O
# out: 2 * N * O
return (log_A.unsqueeze(-1) + log_B.unsqueeze(-3)).logsumexp(-2)
class LinearChainCRF(torch.nn.Module):
def __init__(self, forbidden_transitions, start_forbidden_transitions=None, end_forbidden_transitions=None, learnable_transitions=True, with_start_end_transitions=True):
super().__init__()
num_tags = forbidden_transitions.shape[0]
self.register_buffer('forbidden_transitions', forbidden_transitions.bool())
if start_forbidden_transitions is not None:
self.register_buffer('start_forbidden_transitions', start_forbidden_transitions.bool())
else:
self.register_buffer('start_forbidden_transitions', torch.zeros(num_tags, dtype=torch.bool))
if end_forbidden_transitions is not None:
self.register_buffer('end_forbidden_transitions', end_forbidden_transitions.bool())
else:
self.register_buffer('end_forbidden_transitions', torch.zeros(num_tags, dtype=torch.bool))
if learnable_transitions:
self.transitions = torch.nn.Parameter(torch.zeros_like(forbidden_transitions, dtype=torch.float))
else:
self.register_buffer('transitions', torch.zeros_like(forbidden_transitions, dtype=torch.float))
if learnable_transitions and with_start_end_transitions:
self.start_transitions = torch.nn.Parameter(torch.zeros(num_tags, dtype=torch.float))
else:
self.register_buffer('start_transitions', torch.zeros(num_tags, dtype=torch.float))
if learnable_transitions and with_start_end_transitions:
self.end_transitions = torch.nn.Parameter(torch.zeros(num_tags, dtype=torch.float))
else:
self.register_buffer('end_transitions', torch.zeros(num_tags, dtype=torch.float))
def decode(self, emissions, mask):
# Forward pass
backtrack = self.propagate(emissions, mask, ring_op_name="max", use_constraints=True, way="forward")[2]
path = [backtrack[-1][0, :, 0]]
# what is happening here ? why did i write:
# backtrack = torch.stack(backtrack[:-1] + backtrack[-2:-1], 2).squeeze(0)
# backtrack = torch.stack(backtrack, 2).squeeze(0)
if len(backtrack) > 1:
# backtrack = torch.zeros(*backtrack[-1].shape[:-1], self.num_tags, dtype=torch.long)
backtrack = torch.stack(backtrack[:-1] + backtrack[-2:-1], 2).squeeze(0)
backtrack[range(len(mask)), mask.sum(1) - 1] = path[-1].unsqueeze(-1)
# Backward max path following
for k in range(backtrack.shape[1] - 2, -1, -1):
path.insert(0, backtrack[:, k][range(len(path[0])), path[0]])
path = torch.stack(path, -1).masked_fill(~mask, 0)
return path
def sample(self, emissions, mask, n):
# Forward pass
log_alphas = self.propagate(emissions, mask, ring_op_name="logsumexp", use_constraints=True)[1].transpose(0, 1)
# Backward sampling
sequences = []
bs = len(mask)
start_transitions = self.start_transitions.masked_fill(self.start_forbidden_transitions, IMPOSSIBLE) if self.start_forbidden_transitions is not None else self.start_transitions
transitions = self.transitions.masked_fill(self.forbidden_transitions, IMPOSSIBLE) if self.forbidden_transitions is not None else self.transitions
end_transitions = self.end_transitions.masked_fill(self.end_forbidden_transitions, IMPOSSIBLE) if self.end_forbidden_transitions is not None else self.end_transitions
# Sample multiple tags for the last token of each sample
next_log_prob = (
log_alphas[range(bs), mask.sum(-1) - 1] +
end_transitions
)
next_tag = torch.multinomial(next_log_prob.softmax(-1), n, replacement=True)
sequences.insert(0, next_tag)
seq_size = emissions.shape[1]
for i in range(seq_size - 2, -1, -1):
next_log_prob = (
log_alphas[:, i].unsqueeze(1) +
transitions[:, next_tag].permute(1, 2, 0)
).softmax(-1)
next_tag = torch.where(
mask[:, i + 1].unsqueeze(-1), # if next token is not a padding token
torch.multinomial(next_log_prob.reshape(-1, next_log_prob.shape[-1]), 1).reshape(next_tag.shape), # then put the sampled tags
next_tag, # otherwise replicate the tags sampled at the end
)
sequences.insert(0, next_tag)
return torch.stack(sequences, 1).permute(2, 0, 1).masked_fill(~mask.unsqueeze(0), 0)
def forward(self, emissions, mask, tags, use_constraints=False, reduction="mean"):
z = self.propagate(emissions, mask, ring_op_name="logsumexp", use_constraints=use_constraints)[0]
posterior_potential = self.propagate(emissions, mask, tags, ring_op_name="posterior", use_constraints=use_constraints)[0]
nll = (posterior_potential - z)
if reduction == 'none':
return nll
if reduction == 'sum':
return nll.sum()
if reduction == 'mean':
return nll.mean()
assert reduction == 'token_mean'
return nll.sum() / mask.float().sum()
def propagate(self, emissions, mask, tags=None, ring_op_name="logsumexp", use_constraints=True, way="forward"):
"""
Each alpha is the potential for the state given all previous observations and the current one
"""
emissions = emissions.transpose(0, 1)
mask = mask.transpose(0, 1)
if tags is not None:
if len(tags.shape) == 2:
tags = tags.transpose(0, 1).unsqueeze(1)
elif len(tags.shape) == 3:
tags = tags.permute(2, 0, 1)
backtrack = None
if ring_op_name == "logsumexp":
# ring_op = lse_ring_op
def ring_op(last_potential, trans, loc):
return (last_potential.unsqueeze(-1) + trans.unsqueeze(0).unsqueeze(0)).logsumexp(2)
elif ring_op_name == "posterior":
def ring_op(last_potential, trans, loc):
return trans[tags[loc]] + last_potential[torch.arange(tags.shape[1]).unsqueeze(1),
torch.arange(tags.shape[2]).unsqueeze(0),
tags[loc]].unsqueeze(-1)
elif ring_op_name == "max":
backtrack = []
def ring_op(last_potential, trans, loc):
res, indices = (last_potential.unsqueeze(-1) + trans.unsqueeze(0).unsqueeze(0)).max(2)
backtrack.append(indices)
return res
else:
raise NotImplementedError()
if use_constraints:
start_transitions = self.start_transitions.masked_fill(self.start_forbidden_transitions, IMPOSSIBLE)
transitions = self.transitions.masked_fill(self.forbidden_transitions, IMPOSSIBLE)
end_transitions = self.end_transitions.masked_fill(self.end_forbidden_transitions, IMPOSSIBLE)
else:
start_transitions = self.start_transitions
transitions = self.transitions
end_transitions = self.end_transitions
if way == "backward":
assert ring_op_name != "max", "Unsupported"
start_transitions, end_transitions = end_transitions, start_transitions
transitions = transitions.t()
emissions = masked_flip(emissions.transpose(0, 1), mask.transpose(0, 1), -2).transpose(0, 1)
# emissions = torch.cat([torch.zeros_like(emissions[[0]]), emissions[:-1]])
# log_probs = [start_transitions.unsqueeze(0).unsqueeze(0).repeat_interleave(tags.shape[1] if tags is not None else 1, dim=0)]
# if ring_op_name == "logsumexp":
# max_transitions = transitions.max()
# transitions = (transitions - max_transitions).exp()
log_probs = [(start_transitions + emissions[0]).unsqueeze(0).repeat_interleave(tags.shape[1] if tags is not None else 1, dim=0)]
for k in range(1, len(emissions)):
res = ring_op(log_probs[-1], transitions, k - 1) # - max_transitions
# log_probs.append(res + emissions[k] + max_transitions)
log_probs.append(torch.where(
mask[k].unsqueeze(-1),
res + emissions[k],
log_probs[-1]
))
if ring_op_name == "logsumexp":
z = ring_op(log_probs[-1], end_transitions.unsqueeze(1), 0)
else:
z = ring_op(log_probs[-1], end_transitions.unsqueeze(1),
((mask.sum(0) - 1).unsqueeze(0), torch.arange(log_probs[-1].shape[0]).unsqueeze(1), torch.arange(mask.shape[1]).unsqueeze(0))).squeeze(-1)
log_probs = torch.cat(log_probs, dim=0)
if way == "backward":
log_probs = masked_flip(
log_probs.transpose(0, 1),
mask.transpose(0, 1),
dim_x=-2,
).transpose(0, 1)
return z, log_probs, backtrack
# def marginal(self, emissions, mask):
# z_forward, log_alphas = self.propagate(emissions, mask, ring_op_name="logsumexp", use_constraints=True)[:2]
# log_betas = self.propagate(emissions, mask, ring_op_name="logsumexp", use_constraints=True, way="backward")[1]
# return log_alphas.transpose(0, 1) + log_betas.transpose(0, 1) - emissions - z_forward.squeeze(0).unsqueeze(-1).unsqueeze(-1)
# This is faster
def marginal(self, emissions, mask):
device = emissions.device
transitions = self.transitions.masked_fill(self.forbidden_transitions, IMPOSSIBLE)
start_transitions = self.start_transitions.masked_fill(self.start_forbidden_transitions, IMPOSSIBLE)
end_transitions = self.end_transitions.masked_fill(self.end_forbidden_transitions, IMPOSSIBLE)
bi_transitions = torch.stack([transitions, transitions.t()], dim=0)
# add start transitions (ie cannot start with ...)
emissions[:, 0] = emissions[:, 0] + start_transitions
# add end transitions (ie cannot end with ...): flip the emissions along the token axis, and add the end transitions
#emissions = masked_flip(emissions, mask, dim_x=1)
emissions[torch.arange(mask.shape[0], device=device), mask.long().sum(1) - 1] = emissions[
torch.arange(mask.shape[0], device=device),
mask.long().sum(1) - 1,
] + end_transitions
# stack start -> end emissions (needs to flip the previously flipped emissions), and end -> start emissions
bi_emissions = torch.stack([emissions, masked_flip(emissions, mask, dim_x=1)], 1)
bi_emissions = bi_emissions.transpose(0, 2)
#bi_emissions[0, 0] = bi_emissions[0, 0] + start_transitions
#bi_emissions[0, 1] = bi_emissions[0, 1] + end_transitions
out = [bi_emissions[0]]
for k in range(1, len(bi_emissions)):
res = logdotexp(out[-1], bi_transitions)
out.append(res + bi_emissions[k])
out = torch.stack(out, dim=0).transpose(0, 2)
forward = out[:, 0]
backward = masked_flip(out[:, 1], mask, dim_x=1)
backward_z = backward[:, 0].logsumexp(-1)
forward_z = masked_flip(out[:, 0], mask, dim_x=1)[:, 0].logsumexp(-1)
# print("forward", forward)
# print("backward", backward)
# print("emissions", emissions)
return forward + backward - emissions - backward_z[:, None, None] # [:, -1].logsumexp(-1)
def forward(self, emissions, mask, target):
transitions = self.transitions.masked_fill(self.forbidden_transitions, IMPOSSIBLE)
start_transitions = self.start_transitions.masked_fill(self.start_forbidden_transitions, IMPOSSIBLE)
end_transitions = self.end_transitions.masked_fill(self.end_forbidden_transitions, IMPOSSIBLE)
bi_emissions = torch.stack([emissions.masked_fill(~target, IMPOSSIBLE), emissions], 1).transpose(0, 2)
# emissions: n_samples * n_tokens * n_tags
# bi_emissions: n_tokens * 2 * n_samples * n_tags
out = [bi_emissions[0] + start_transitions]
for k in range(1, len(bi_emissions)):
res = logdotexp(out[-1], transitions)
out.append(res + bi_emissions[k])
out = torch.stack(out, dim=0).transpose(0, 2)
# n_samples * 2 * n_tokens * n_tags
z = masked_flip(out, mask.unsqueeze(1).repeat(1, 2, 1), dim_x=2)[:, :, 0] + end_transitions
supervised_z = z[:, 0].logsumexp(-1)
unsupervised_z = z[:, 1].logsumexp(-1)
return supervised_z - unsupervised_z
class BIOULDecoder(LinearChainCRF):
def __init__(self, num_labels, with_start_end_transitions=True, allow_overlap=False, allow_juxtaposition=True, learnable_transitions=True):
O, I, B, L, U = 0, 1, 2, 3, 4
self.allow_overlap = allow_overlap
num_tags = 1 + num_labels * 4
forbidden_transitions = torch.ones(num_tags, num_tags, dtype=torch.bool)
forbidden_transitions[O, O] = 0 # O to O
for i in range(num_labels):
STRIDE = 4 * i
for j in range(num_labels):
STRIDE_J = j * 4
forbidden_transitions[L + STRIDE, B + STRIDE_J] = 0 # L-i to B-j
forbidden_transitions[L + STRIDE, U + STRIDE_J] = 0 # L-i to U-j
forbidden_transitions[U + STRIDE, B + STRIDE_J] = 0 # U-i to B-j
forbidden_transitions[U + STRIDE, U + STRIDE_J] = 0 # U-i to U-j
forbidden_transitions[O, B + STRIDE] = 0 # O to B-i
forbidden_transitions[B + STRIDE, I + STRIDE] = 0 # B-i to I-i
forbidden_transitions[I + STRIDE, I + STRIDE] = 0 # I-i to I-i
forbidden_transitions[I + STRIDE, L + STRIDE] = 0 # I-i to L-i
forbidden_transitions[B + STRIDE, L + STRIDE] = 0 # B-i to L-i
forbidden_transitions[L + STRIDE, O] = 0 # L-i to O
forbidden_transitions[O, U + STRIDE] = 0 # O to U-i
forbidden_transitions[U + STRIDE, O] = 0 # U-i to O
if not allow_juxtaposition:
forbidden_transitions[L + STRIDE, U + STRIDE] = 1 # L-i to U-i
forbidden_transitions[U + STRIDE, B + STRIDE] = 1 # U-i to B-i
forbidden_transitions[U + STRIDE, U + STRIDE] = 1 # U-i to U-i
forbidden_transitions[L + STRIDE, B + STRIDE] = 1 # L-i to B-i
if allow_overlap:
forbidden_transitions[L + STRIDE, I + STRIDE] = 0 # L-i to I-i
forbidden_transitions[L + STRIDE, L + STRIDE] = 0 # L-i to L-i
forbidden_transitions[I + STRIDE, B + STRIDE] = 0 # I-i to B-i
forbidden_transitions[B + STRIDE, B + STRIDE] = 0 # B-i to B-i
forbidden_transitions[B + STRIDE, U + STRIDE] = 0 # B-i to U-i
forbidden_transitions[U + STRIDE, L + STRIDE] = 0 # U-i to L-i
forbidden_transitions[U + STRIDE, I + STRIDE] = 0 # U-i to I-i
forbidden_transitions[I + STRIDE, U + STRIDE] = 0 # I-i to U-i
start_forbidden_transitions = torch.zeros(num_tags, dtype=torch.bool)
if with_start_end_transitions:
for i in range(num_labels):
STRIDE = 4 * i
start_forbidden_transitions[I + STRIDE] = 1 # forbidden to start by I-i
start_forbidden_transitions[L + STRIDE] = 1 # forbidden to start by L-i
end_forbidden_transitions = torch.zeros(num_tags, dtype=torch.bool)
if with_start_end_transitions:
for i in range(num_labels):
STRIDE = 4 * i
end_forbidden_transitions[I + STRIDE] = 1 # forbidden to end by I-i
end_forbidden_transitions[B + STRIDE] = 1 # forbidden to end by B-i
super().__init__(forbidden_transitions,
start_forbidden_transitions,
end_forbidden_transitions,
with_start_end_transitions=with_start_end_transitions,
learnable_transitions=learnable_transitions)
@staticmethod
def spans_to_tags(sample_ids, begins, ends, ner_labels, n_samples, n_tokens):
BEFORE, AFTER, INSIDE = 0, 1, 2
positions = torch.arange(n_tokens, device=begins.device).unsqueeze(0)
begins = begins.unsqueeze(1)
ends = ends.unsqueeze(1)
mention_tags = (
# BEFORE tags
((positions < begins) * BEFORE) +
# AFTER tags
((positions >= ends) * AFTER) +
# INSIDE tags
(((positions >= begins) & (positions < ends)) * (INSIDE + ner_labels.unsqueeze(1)))
)
return torch.zeros((n_samples, n_tokens), dtype=torch.long, device=begins.device).index_add_(0, sample_ids, mention_tags)
def tags_to_spans(self, tag, mask=None, do_overlap_disambiguation=False):
I, B, L, U = 0, 1, 2, 3
if mask is not None:
tag = tag.masked_fill(~mask, 0)
unstrided_tags = ((tag - 1) % 4).masked_fill(tag == 0, -1)
is_B_or_U = (unstrided_tags == B) | (unstrided_tags == U)
is_L_or_U = (unstrided_tags == L) | (unstrided_tags == U)
# If allow overlapping, only prevent O tag between two bounds
cs_no_hole = (tag == 0).long().cumsum(1)
has_no_hole = (cs_no_hole.unsqueeze(-1) - cs_no_hole.unsqueeze(-2)) == 0
prediction = multi_dim_triu((is_B_or_U).unsqueeze(-1) & (is_L_or_U).unsqueeze(-2) & has_no_hole)
# If no overlapping, prevent anything other than I between two bounds
if not self.allow_overlap:
begin_cs = ((is_B_or_U)).cumsum(1)
end_cs = ((is_L_or_U)).cumsum(1)
begin_count = (begin_cs.unsqueeze(-1) - begin_cs.unsqueeze(-2))
end_count = (end_cs.unsqueeze(-1) - end_cs.unsqueeze(-2))
prediction &= ((begin_count + end_count) == 0) | ((begin_count + end_count == -1))
if mask is not None:
prediction = prediction & mask.unsqueeze(-1) & mask.unsqueeze(-2)
return prediction |
"""
This module provides WebIDE related functionality
"""
import logging
import zipfile
import os
from ..hana_ml_utils import DirectoryHandler
logger = logging.getLogger(__name__) #pylint: disable=invalid-name
class WebIDEDeployer(object):
"""
This class provides WebIDE deployer related functionality.
Currently the generation of a webide zip archive is provided.
"""
def generate_webide_package(self, source_path, target_path=None, file_name='WebIDE'):
"""
Generates a zip archive that can be used to import the HANA artifacts into WebIDE.
Parameters
----------
source_path : str
Path to the source that needs to be packaged into zip.
target_path : str
Where to place the generated zip archive
file_name : str
Zip archive file name
"""
self.directory_handler = DirectoryHandler()
if not target_path:
target_path = os.path.abspath(os.path.join(source_path, os.pardir))
data_zip_file = zipfile.ZipFile(target_path + '/' + '{}.zip'.format(file_name), 'w')
os.chdir(source_path)
self.directory_handler.zip_directory(source_path, data_zip_file)
|
#! /usr/bin/env python
# _*_ coding:utf-8 _*_
from common import Event
def singleton(cls):
_instance = {}
def _singleton(*args, **kargs):
if cls not in _instance:
_instance[cls] = cls(*args, **kargs)
return _instance[cls]
return _singleton
@singleton
class CommonEvent(object):
def __init__(self):
self.EVENT_HEARTBEAT = "Heartbeat"
self.event_heartbeat = Event(type_=self.EVENT_HEARTBEAT)
self.EVENT_WVS_STATE_RECV = "WVSStateRecv"
self.event_wvs_state_recv = Event(type_=self.EVENT_WVS_STATE_RECV)
self.EVENT_WVS_RESULT_RECV = "ScanResultRecv"
self.event_scan_result_recv = Event(type_=self.EVENT_WVS_RESULT_RECV)
self.EVENT_WVS_COMMAND = "WVSCommand"
self.event_wvs_command = Event(type_=self.EVENT_WVS_COMMAND)
self.EVENT_SERVER_COMMAND = "ServerCommand"
self.event_server_command = Event(type_=self.EVENT_SERVER_COMMAND)
self.EVENT_AGENT_EXIT = "AgentExit"
self.event_agent_exit = Event(type_=self.EVENT_AGENT_EXIT)
self.EVENT_SCAN_RESULT_SEND = "SendResultSend"
self.event_scan_result_send = Event(type_=self.EVENT_SCAN_RESULT_SEND)
|
import sys
import pandas as pd
import sqlite3
import os
def processRegionalLoad(dbName):
dirname = os.path.dirname(__file__)
regionData = pd.read_csv(os.path.join(dirname,"input/regions.csv"), sep=";")
regionIDs = regionData["ID"].values
# For progressbar
cnt = 0
length = len(regionIDs)
prog_per_it = 40 / length
conn = sqlite3.connect(dbName)
c = conn.cursor()
c.execute("DROP TABLE IF EXISTS Regions")
c.execute("""CREATE TABLE IF NOT EXISTS Regions (
Time timestamp,
Type text,
PowerMW real
) """)
for regionID in regionIDs:
c.execute("""INSERT INTO Regions (Time, PowerMW, Type)
Select Time, SUM(PowerMW), Region
From TimeSeries
INNER JOIN Agents on Agents.AgentID = TimeSeries.AgentID
Where Region = (?)
GROUP BY Time""", (regionID, ))
conn.commit()
# Progressbar
cnt += 1
prog = int(cnt * prog_per_it)
sys.stdout.write("\r[%-40s] %d%%" % ('=' * prog, 2.5 * prog))
sys.stdout.flush()
conn.close()
def processTotalLoad(dbName):
conn = sqlite3.connect(dbName)
c = conn.cursor()
c.execute("DROP TABLE IF EXISTS Total")
c.execute("""CREATE TABLE IF NOT EXISTS Total (
Time timestamp,
Type text,
PowerMW real
) """)
c.execute("""INSERT INTO Total (Time, PowerMW, Type)
Select Time, SUM(PowerMW), 'Total'
FROM TimeSeries
GROUP BY Time """)
conn.commit()
conn.close()
def getProcessed(dbName, region="Total"):
table = 'Total' if region == "Total" else 'Regions'
conn = sqlite3.connect(dbName)
c = conn.cursor()
# Get time
c.execute("""SELECT *
FROM Simulation""")
t = c.fetchone()
index = pd.date_range(start=t[0], end=t[1], freq=t[2], inclusive="left")
# Get power
sql = f"""SELECT Time, PowerMW
FROM {table}
WHERE Type = '{region}'"""
_df = pd.read_sql(sql, conn, index_col="Time", parse_dates="Time").reindex(index).fillna(0)
conn.close()
return _df |
import re
import sys
import math
class Token:
def __init__(self, type, value):
self.type = type
self.value = value
ans = 0
functions = {
"ans": lambda p: ans,
"exit": lambda p: sys.exit(0),
"mean": lambda numbers: float(sum(numbers)) / max(len(numbers), 1),
"sum": lambda numbers: math.fsum(numbers)
}
tokenDefinitions = [
Token("number", r"\d+(\.\d+)?"),
Token("op_add", r"\+"),
Token("op_sub", r"\-"),
Token("op_mul", r"\*"),
Token("op_div", r"\/"),
Token("op_mod", r"\%"),
Token("function", r"[a-zA-Z]+"),
Token("paren_open", r"\("),
Token("paren_close", r"\)"),
Token("comma", r"\,"),
Token("whitespace", r"\s+")
]
grammar = [
[
"function",
"paren_open",
"expr",
"paren_close",
lambda tokens: [
tokens[0],
tokens[1],
Token("params", [tokens[2].value]),
tokens[3]
]
],
[
"number",
lambda tokens: [
Token("expr", float(tokens[0].value))
]
],
[
"expr",
"paren_open",
"expr",
"paren_close",
lambda tokens: [
Token("expr", tokens[0].value * tokens[2].value)
]
],
[
"paren_open",
"expr",
"paren_close",
lambda tokens: [
Token("expr", tokens[1].value)
]
],
[
"expr",
"op_div",
"expr",
lambda tokens: [
Token("expr", tokens[0].value / tokens[2].value)
]
],
[
"expr",
"op_mul",
"expr",
lambda tokens: [
Token("expr", tokens[0].value * tokens[2].value)
]
],
[
"expr",
"op_mod",
"expr",
lambda tokens: [
Token("expr", tokens[0].value % tokens[2].value)
]
],
[
"expr",
"op_add",
"expr",
lambda tokens: [
Token("expr", tokens[0].value + tokens[2].value)
]
],
[
"expr",
"op_sub",
"expr",
lambda tokens: [
Token("expr", tokens[0].value - tokens[2].value)
]
],
[
"expr",
"comma",
"expr",
lambda tokens: [
Token("params", [tokens[0].value, tokens[2].value])
]
],
[
"params",
"comma",
"expr",
lambda tokens: [
Token("params", tokens[0].value + [tokens[2].value])
]
],
[
"function",
"paren_open",
"paren_close",
lambda tokens: [
tokens[0],
tokens[1],
Token("params", []),
tokens[2]
]
],
[
"function",
"paren_open",
"params",
"paren_close",
lambda tokens: [
Token("expr", functions[tokens[0].value](tokens[2].value))
]
]
]
def lex (source):
pointer = 0
result = []
while pointer < len(source):
foundMatch = False
for token in tokenDefinitions:
match = re.search(token.value, source[pointer:])
if match and match.start() == 0:
if not token.type == "whitespace":
result.append(Token(token.type, match.group(0)))
pointer = pointer + match.end()
foundMatch = True
break
if not foundMatch:
print(source)
print(' ' * pointer + "^")
print("Unexpected character {0}".format(source[pointer]))
sys.exit(1)
return result
def parse (program):
while len(program) > 1:
for node in grammar:
pointerLen = len(node) - 2
pointer = 0
match = False
while pointer + pointerLen < len(program):
match = True
for i in range(0, pointerLen + 1):
if not program[pointer + i].type == node[i]:
match = False
if match:
newTokens = node[len(node) - 1](program[pointer: pointer + pointerLen + 1])
program = program[:pointer] + newTokens + program[pointer + pointerLen + 1:]
break
else:
pointer += 1
if match:
break
if program[0].type == "expr":
return float(program[0].value)
return False
if (len(sys.argv) > 1):
for i in range(1, len(sys.argv)):
program = lex(sys.argv[i])
ans = parse(program)
print(ans)
else:
while True:
source = raw_input("> ")
program = lex(source)
ans = parse(program)
print(ans)
|
import subprocess
lines = open ("topology.txt", "r").readlines()
res = [str.strip().split("<- is \"wired\" to ->") for str in lines]
iFaceList = {}
#extract the node names and the used eth interfaces from the file
for s in res:
for x in s:
if x == "":
continue
t = x.strip().split(" ")
if t[0] in iFaceList:
iFaceList[t[0]].append(t[1])
else:
iFaceList[t[0]] = [t[1]]
#for each node connects via SSH and runs a local script passing one or more network intrface
for iFace in iFaceList:
server = "{}.Sacchetti-IntNetw.OffTech".format(iFace)
command = "sudo sh OffTech/{}.sh {} ".format(iFace, iFaceList.get(iFace)[0])
if len(iFaceList[iFace]) != 1:
command += iFaceList.get(iFace)[1]
subprocess.check_output(["ssh",server, command])
#g= subprocess.check_output(["ssh", "", "ping -c 1 1.1.1.1"])
#print(g) |
import time, heapq
import numpy as np
import numpy.ma as ma
import gdspy as gp
class GdsMap:
def __init__(self, cell, grid_size, buffer=None, layers=None, precision=0.001):
start_time = time.time()
self.grid_size = grid_size
if layers is None:
layers = list(cell.get_layers())
if not isinstance(layers, list):
layers = [layers]
if buffer is None:
buffer = grid_size / 2
bounding_box = cell.get_bounding_box()
x_length = int((bounding_box[1][0] - bounding_box[0][0] + 2 * grid_size) / grid_size) + 1
y_length = int((bounding_box[1][1] - bounding_box[0][1] + 2 * grid_size) / grid_size) + 1
out_string = '-- Constructing map with ' + str(int(x_length * y_length)) + ' (' + str(int(x_length)) + ' x ' + str(int(y_length)) + ') entries'
print(out_string, end=(7 - len(out_string)//8) * '\t', flush=True)
x_linspace = np.linspace(
bounding_box[0][0] - grid_size,
bounding_box[1][0] + grid_size,
x_length
)
y_linspace = np.flip(
np.linspace(
bounding_box[0][1] - grid_size,
bounding_box[1][1] + grid_size,
y_length
)
)
x_array, y_array = np.meshgrid(x_linspace, y_linspace)
xy_array = np.array(list(zip(
x_array.ravel(),
y_array.ravel()
))).reshape(*x_array.shape, 2)
self.mask = np.zeros((y_length, x_length))
self.copy_cell = cell.copy(cell.name + "_copy", deep_copy = True)
self.copy_cell.remove_polygons(lambda pts, layer, datatype: layer not in layers)
self.copy_polygonset = gp.offset(
self.copy_cell.get_polygons(),
distance=buffer
)
map_gdspy = np.array(
gp.inside(
np.array(xy_array).reshape(-1, 2),
self.copy_polygonset,
precision = precision
)
).reshape((y_length, x_length))
for i in range(y_length):
for j in range(x_length):
if not map_gdspy[i][j]:
if self.mask[i][j] != 1:
self.mask[i][j] = 0
else:
try:
self.mask[i][j] = self.mask[i - 1][j] = self.mask[i + 1][j] = self.mask[i][j - 1] = self.mask[i][j + 1] = 1
except:
pass
elapsed_time = round(time.time() - start_time, 3)
print('| Finished after ' + str(elapsed_time) + ' s --')
self.x_length = x_length
self.y_length = y_length
self.x_linspace = x_linspace
self.y_linspace = y_linspace
self.x_array = x_array
self.y_array = y_array
self.xy_array = xy_array
def draw_map(self):
circles = []
for i in range(len(self.mask)):
for j in range(len(self.mask[i])):
if self.mask[i][j] == 1:
circles.append(
gp.Round(
self.xy_array[i][j],
self.grid_size / 2,
number_of_points=8
).polygons[0]
)
return gp.PolygonSet(self.copy_polygonset.polygons + circles)
def router(
GdsMap,
end_points,
end_directions = None,
segment_widths = None,
segment_distances = 0,
segment_buffers = 0,
reference_point = None
):
start_time = time.time()
out_string = '-- Routing from ' + str(end_points[0]) + ' to ' + str(end_points[1])
print(out_string, end=(7 - len(out_string)//8) * '\t', flush=True)
if end_directions != None:
from_point = (end_points[0][0] + segment_widths[0] * np.cos(end_directions[0]), end_points[0][1] + segment_widths[0] * np.sin(end_directions[0]))
to_point = (end_points[1][0] + segment_widths[-1] * np.cos(end_directions[1]), end_points[1][1] + segment_widths[-1] * np.sin(end_directions[1]))
else:
from_point = end_points[0]
to_point = end_points[1]
if not isinstance(segment_widths, list):
segment_widths = [segment_widths]
segment_widths = np.array(segment_widths)
if not isinstance(segment_distances, list):
segment_distances = [segment_distances]
if not isinstance(segment_buffers, list):
segment_buffers = [segment_buffers] * len(segment_widths)
segment_buffers = np.array(segment_buffers)
reference_point = reference_point if reference_point is not None else to_point
start_point_index = ma.MaskedArray.argmin(
ma.array(
array_heuristic(
GdsMap.x_array,
GdsMap.y_array,
from_point,
GdsMap.grid_size),
mask=GdsMap.mask))
start_point = (start_point_index % GdsMap.x_length,
start_point_index // GdsMap.x_length)
end_point_index = ma.MaskedArray.argmin(
ma.array(
array_heuristic(
GdsMap.x_array,
GdsMap.y_array,
to_point,
GdsMap.grid_size),
mask=GdsMap.mask))
end_point = (end_point_index % GdsMap.x_length,
end_point_index // GdsMap.x_length)
route = jump_point_astar(GdsMap.mask, start_point, end_point)
if not route:
print('| Error: No route found after ' + str(round(time.time() - start_time, 3)) + ' s --')
return False
routed_path = []
routed_path.append(end_points[0])
for point in route:
routed_path.append(GdsMap.xy_array[point[1]][point[0]])
routed_path.append(end_points[1])
if end_directions != None:
routed_path[1] = from_point
routed_path[-2] = to_point
index = 0
neighbours = get_neighbours(GdsMap, segment_widths[index] + 2 * segment_buffers[index])
# Remove trivial points
remove_index_list = []
new_path_point_list = []
path_length = len(routed_path)
for i in range(1, path_length - 2):
for neighbour in neighbours:
GdsMap.mask[route[i - 1][1] + neighbour[1]][route[i - 1][0] + neighbour[0]] = 1
if bipoint_angle(routed_path[i - 1], routed_path[i]) == bipoint_angle(routed_path[i], routed_path[i + 1]):
if heuristic(routed_path[i], reference_point) < segment_distances[index]:
new_path_point_list.append(routed_path[i])
index += 1
neighbours = get_neighbours(GdsMap, segment_widths[index] + 2 * segment_buffers[index])
else:
remove_index_list.append(i)
for index in reversed(remove_index_list):
del routed_path[index]
path_length = len(routed_path)
route_segments = []
for segment_point in new_path_point_list:
for i, route_point in enumerate(routed_path):
if np.array_equal(route_point, segment_point):
route_segments.append(routed_path[:i + 1])
routed_path = routed_path[i:]
break
route_segments.append(routed_path)
print('| Route found after ' + str(round(time.time() - start_time, 3)) + ' s --')
return route_segments
def jump_point_astar(mask, start_point, end_point, grid_size=100, sqrt_two=1.4):
closed_set = set()
came_from = {}
g_score = {start_point: 0}
f_score = {start_point: heuristic(start_point, end_point, grid_size, sqrt_two)}
point_queue = []
heapq.heappush(point_queue, (f_score[start_point], start_point))
while point_queue:
current_pos = heapq.heappop(point_queue)[1]
if current_pos == end_point:
route = []
while current_pos in came_from:
route.append(current_pos)
current_pos = came_from[current_pos]
route.append(start_point)
route = list(reversed(route[::]))
return route
closed_set.add(current_pos)
for delta_x, delta_y in [(0, 1),
(0, -1),
(1, 0),
(-1, 0),
(1, 1),
(1, -1),
(-1, 1),
(-1, -1)]:
if blocked(current_pos, delta_x, delta_y, mask):
continue
neighbour = current_pos[0] + delta_x, current_pos[1] + delta_y
if delta_x != 0 and delta_y != 0:
tentative_g_score = g_score[current_pos] + sqrt_two * grid_size
else:
tentative_g_score = g_score[current_pos] + grid_size
if neighbour in closed_set:
continue
if tentative_g_score < g_score.get(neighbour, 0) or neighbour not in [i[1] for i in point_queue]:
came_from[neighbour] = current_pos
g_score[neighbour] = tentative_g_score
f_score[neighbour] = tentative_g_score + heuristic(neighbour, end_point, grid_size, sqrt_two)
heapq.heappush(point_queue, (f_score[neighbour], neighbour))
return False
def blocked(point, delta_x, delta_y, mask):
if point[0] + delta_x < 0 or point[0] + delta_x >= mask.shape[1]:
return True
if point[1] + delta_y < 0 or point[1] + delta_y >= mask.shape[0]:
return True
if delta_x != 0 and delta_y != 0:
if mask[point[1]][point[0] + delta_x] == 1 and mask[point[1] + delta_y][point[0]] == 1:
return True
if mask[point[1] + delta_y][point[0] + delta_x] == 1:
return True
else:
if delta_x != 0:
if mask[point[1]][point[0] + delta_x] == 1:
return True
else:
if mask[point[1] + delta_y][point[0]] == 1:
return True
return False
def heuristic(point_a, point_b, grid_size = 1, sqrt_two=1.4):
delta_x = abs(point_a[0] - point_b[0])
delta_y = abs(point_a[1] - point_b[1])
edge_dist = grid_size
diag_dist = sqrt_two * grid_size
distance = edge_dist * max(delta_x, delta_y) + (diag_dist - edge_dist) * min(delta_x, delta_y)
return distance
def array_heuristic(x_array, y_array, point, grid_size, sqrt_two=1.4):
delta_x = np.absolute(x_array - point[0])
delta_y = np.absolute(y_array - point[1])
edge_dist = grid_size
diag_dist = sqrt_two * grid_size
distance = edge_dist * np.maximum(delta_x, delta_y) + (diag_dist - edge_dist) * np.minimum(delta_x, delta_y)
return distance
def bipoint_angle(point_a, point_b):
delta_x = point_b[0] - point_a[0]
delta_y = point_b[1] - point_a[1]
theta = np.arctan2(delta_y, delta_x)
return theta
def get_neighbours(GdsMap, distance):
grid_distance = int((distance - distance % GdsMap.grid_size) / (2 * GdsMap.grid_size) + 1)
if grid_distance == 1:
neighbours = [(0, 0), (0, 1), (0, -1), (1, 0), (-1, 0)]
else:
neighbours = [(i, j)
for i in range(-grid_distance, grid_distance + 1)
for j in range(-grid_distance, grid_distance + 1)]
return neighbours
def suggest_order(routing_pairs, direction_pairs = None):
distances = []
for pair in routing_pairs:
distances.append(heuristic(pair[0], pair[1]))
sort_indexes = np.argsort(distances)
routing_pairs = [routing_pairs[i] for i in sort_indexes]
if direction_pairs != None:
direction_pairs = [direction_pairs[i] for i in sort_indexes]
return routing_pairs, direction_pairs |
#!/usr/bin/python
# Filename: __init__.py
# Author: David Taylor (@Prooffreader)
from chorogrid.Colorbin import Colorbin
from chorogrid.Chorogrid import Chorogrid
from chorogrid.plotting import plot
from zipcodes_df import zipcodes |
from datetime import timedelta
from typing import List
from http import HTTPStatus
from cachetools import TTLCache, cached
from azure.core import MatchConditions
from azure.cosmos import ContainerProxy
from azure.cosmos.exceptions import CosmosResourceNotFoundError, \
CosmosAccessConditionFailedError, CosmosHttpResponseError
from ._cosmos_client_builder import CosmosClientBuilderFromKeyvaultSecret
from ...storage import Storage
from ...storage.models import StorageEntryModel
from ... import exceptions
class CosmosContainerHandler(Storage):
_database_name: str
_container_name: str
_client_builder: CosmosClientBuilderFromKeyvaultSecret
def __init__(
self,
database_name: str,
container_name: str,
client_builder: CosmosClientBuilderFromKeyvaultSecret,
cache_timeout: timedelta):
self._database_name = database_name
self._container_name = container_name
self._client_builder = client_builder
self._ttl_cache = TTLCache(10, cache_timeout.total_seconds())
def get(self, id: str, partition_key: str) -> StorageEntryModel:
client_list = self._get_cached_or_create_clients()
retries = len(client_list)
for client in client_list:
try:
data = client.read_item(item=id, partition_key=partition_key)
except CosmosResourceNotFoundError:
return None
except CosmosHttpResponseError as e:
if retries > 1 and e.status_code == HTTPStatus.UNAUTHORIZED:
retries -= 1
continue
raise exceptions.Unauthorized(e)
entry = StorageEntryModel(**{
'id': data['id'],
'partition_key': data['partition_key'],
'data': data,
'etag': data.get('_etag', '*')
})
return entry
raise exceptions.ShouldNotHaveReachedHereError()
def add_or_update(self, storage_entry: StorageEntryModel):
body = storage_entry.data
body['id'] = storage_entry.id
body['partition_key'] = storage_entry.partition_key
client_list = self._get_cached_or_create_clients()
retries = len(client_list)
for client in client_list:
try:
client.upsert_item(
body=body,
etag=storage_entry.etag,
match_condition=MatchConditions.IfNotModified)
except CosmosAccessConditionFailedError:
raise exceptions.EtagMismatchError()
except CosmosHttpResponseError as e:
if retries > 1 and e.status_code == HTTPStatus.UNAUTHORIZED:
retries -= 1
continue
raise exceptions.Unauthorized(e)
break
def _get_cached_or_create_clients(self) -> List[ContainerProxy]:
@cached(self._ttl_cache)
def wrapper():
return self._client_builder.get_container_clients(self._database_name, self._container_name)
return wrapper()
|
"""Graphical procedures for GPR data."""
import numpy as np
import math
import matplotlib.pyplot as plt
from matplotlib import cm
from ...gis.raster import RectifyTif
class PLT:
def show(self):
"""Display the current plot."""
plt.show()
def save(self, outpath):
"""Save the current plot to file."""
def jpg(self, path, levels=50, cmap=cm.Spectral, axis=True, dpi=150, format='jpg', bbox_inches='tight', pad_inches=0):
"""Remove the axes of a timeslice for spatial recification"""
plt = self.plot(array, levels, cmap)
if axis == False:
plt.axis('off')
plt.savefig(path, dpi=dpi, format=format, bbox_inches=bbox_inches, pad_inches=pad_inches)
plt.close()
class RadarGram(PLT):
"""Plot a radargram using a GPR object"""
def __init__(self, array, x=None, y=None):
self.array = array
#self.x = x
#self.y = y
def imshow(self, cmap='Greys_r', aspect='auto', interpolation='bicubic', vmin=None, vmax=None):
""""""
fig = plt.figure(figsize=(29.7/2.54, 21/2/2.54))
ax = plt.axes()
a = self.array
ax.imshow(a, cmap=cmap, aspect=aspect, interpolation=interpolation, vmin=vmin, vmax=vmax)
ax.set_xlabel('Traces')
ax.set_ylabel('Samples')
ax.set_title(self.path)
def plot(self, xlab='Distance (m)', ylab='Time (ns)', y2lab='Depth (m)', velocity=None, vmin=None, vmax=None):
fig = plt.figure(figsize=(29.7/2.54, 21/2/2.54))
ax1 = plt.axes()
a = self.array
#ax1.imshow(a, cmap = 'Greys_r', aspect = 'auto', interpolation = 'bicubic')
ax1.imshow(a, cmap = 'Greys_r', aspect = 'auto', interpolation = 'bicubic', vmin=vmin, vmax=vmax)
self.x_labels(ax1, xlab)
self.y_labels(ax1, ylab)
self.y2_labels(ax1, y2lab, velocity)
return(plt)
def x_labels(self, ax, label):
ax.set_xlabel(label)
x = [i - self.x[0] for i in self.x]
labs = [i for i in range(round(min(x)), round(max(x)) + 1, 3)]
ticks = [(i-labs[0]) / (x[1]-x[0]) for i in labs]
plt.xticks(ticks, labs)
def y_labels(self, ax, label):
ax.set_ylabel(label)
y = self.y
labs = [i for i in range(0, round(max(y)) + 1, 10)]
ticks = [i/(y[1]-y[0]) for i in labs]
plt.yticks(ticks, labs)
def y2_labels(self, ax, label, velocity):
""""""
try:
ax2 = ax.twinx()
ax2.set_ylabel(label)
y, v = self.y, velocity
d = [ns2mm(i/2, v) / 1000 for i in y]
mn, mx = math.floor(min(d)), math.ceil(max(d))
labs = [i for i in range(mn, mx)]
near = lambda x: abs(min([i-x for i in d]))
locs = [1-(near(i)/max(d)) for i in labs]
plt.yticks(locs, labs)
except Exception as e:
print(e)
class TimeSlice(RectifyTif):
""""""
def __init__(self):
""""""
def plot(self, array, x, y, levels=5, cmap='rainbow'):
"""Name of method changed to avoid conflict with radargram plot method; other plot method should be deleted in time."""
#x = [i - xwee.x[0] for i in xwee]
#y = [i - ywee.y[0] for i in ywee]
line_spacing = abs(round(y[1] - y[0], 4))
distance_interval = abs(x[1] - x[0])
# create a 21 x 21 vertex mesh
X, Y = np.meshgrid(np.linspace(min(x), max(x), len(x)), np.linspace(min(y), max(y), len(y)))
x_ratio = len(x) * distance_interval
y_ratio = len(y) * line_spacing
fig, ax1 = plt.subplots(figsize = (21 / 2.54 / 2, 21 / 2 / 2.54 * y_ratio / x_ratio))
levels=5
import matplotlib as mpl
#cset = ax1.contourf(X, -Y, array, levels, norm=mpl.colors.Normalize(vmin=0, vmax=1), cmap='rainbow') # the sign of y will flip the y-axis; there is some indication that this needs manually changing in some instances
#cset = ax1.contourf(X, -Y, array, levels, cmap='rainbow') # the sign of y will flip the y-axis; there is some indication that this needs manually changing in some instances
cset = ax1.contourf(X, -Y, array, levels, cmap=cmap)
y_min = min([abs(i) for i in y])
y_max = max([abs(i) for i in y])
yticks = [i for i in ax1.get_yticks() if abs(i) <= y_max and abs(i) >= y_min]
plt.yticks(yticks, [abs(i) for i in yticks])
# set axis labels
#ax1.set_xlabel('Distance (m)')
#ax1.set_ylabel('Distance (m)')
plt.axis('off')
return(plt)
def jpg(self, path, array, levels=50, cmap=cm.Spectral, axis=True, dpi=150, format='jpg', bbox_inches='tight', pad_inches=0):
"""Remove the axes of a timeslice for spatial recification"""
plt = self.plot(array, levels, cmap)
if axis == False:
plt.axis('off')
plt.savefig(path, dpi=dpi, format=format, bbox_inches=bbox_inches, pad_inches=pad_inches)
plt.close()
def tif(self, path, array, levels=50, cmap=cm.Spectral, axis=False, dpi=150, format='tif', bbox_inches='tight', pad_inches=0):
"""Remove the axes of a timeslice for spatial recification"""
plt = self.plot(array, levels, cmap)
if axis == False:
plt.axis('off')
plt.savefig(path, dpi=dpi, format=format, bbox_inches=bbox_inches, pad_inches=pad_inches)
plt.close()
|
from flask import render_template, g, request, jsonify, current_app
from info import db, constants
from info.utils.common import user_login
from info.modules.profile import profile_blu
from info.libs.image_storage import storage
from info.models import Category, News
from info.utils.response_code import RET
@profile_blu.route("/user_follow")
@user_login
def user_follow():
"""我的关注"""
page = request.args.get("page", 1)
try:
page = int(page)
except Exception as e:
current_app.logger.error(e)
page = 1
user = g.user
follows = []
current_page = 1
total_page = 1
try:
paginate = user.followed.paginate(page, constants.USER_FOLLOWED_MAX_COUNT, False)
follows = paginate.items
current_page = paginate.page
total_page = paginate.pages
except Exception as e:
current_app.logger.error(e)
user_dict_li = [follow_user.to_dict() for follow_user in follows]
data = {
"users": user_dict_li,
"current_page": current_page,
"total_page": total_page
}
return render_template("news/user_follow.html", data=data)
@profile_blu.route('/user_news_list')
@user_login
def user_news_list():
"""
用户新闻列表
:return:
"""
user = g.user
page = request.args.get("page")
try:
page = int(page)
except Exception as e:
current_app.logger.error(e)
page = 1
news_li = []
current_page = 1
total_page = 1
try:
paginate = News.query.filter(News.user_id==user.id).paginate(page, constants.USER_COLLECTION_MAX_NEWS, False)
news_li = paginate.items
current_page = paginate.page
total_page = paginate.pages
except Exception as e:
current_app.logger.eror(e)
news_dict_li = [news.to_review_dict() for news in news_li]
data = {
"news_dict_li":news_dict_li,
"current_page":current_page,
"total_page":total_page
}
print(news_dict_li)
return render_template("news/user_news_list.html", data=data)
@profile_blu.route("/user_news_release", methods=["GET", "POST"])
@user_login
def user_news_release():
"""
用户发布新闻
:return:
"""
user = g.user
if request.method == "GET":
categories = []
try:
categories = Category.query.all()
except Exception as e:
current_app.logger.error(e)
categories_dict_li = [category.to_dict() for category in categories]
categories_dict_li.pop(0)
data = {
"categories_dict_li":categories_dict_li
}
return render_template("news/user_news_release.html", data=data)
title = request.form.get("title")
category_id = request.form.get("category_id")
digest = request.form.get("digest")
index_image = request.files.get("index_image")
content = request.form.get("content")
if not all([title, category_id, digest, index_image, content]):
return jsonify(errno=RET.PARAMERR, errmsg="参数不全")
try:
category_id = int(category_id)
image_data = index_image.read()
except Exception as e:
current_app.logger.error(e)
return jsonify(errno=RET.PARAMERR, errmsg="参数错误")
try:
key = storage(image_data)
except Exception as e:
current_app.logger.error(e)
return jsonify(errno=RET.THIRDERR, errmsg="第三方上传失败")
# 执行业务逻辑
news= News()
news.title = title
news.source = "个人发布"
news.digest = digest
news.content = content
news.index_image_url = constants.QINIU_DOMIN_PREFIX + key
news.category_id = category_id
news.user_id = user.id
news.status = 1
print(news)
try:
db.session.add(news)
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库保存失败")
return jsonify(errno=RET.OK,errmsg="OK")
@profile_blu.route("/user_collection")
@user_login
def user_collection():
"""
显示用户收藏的新闻
:return:
"""
user = g.user
page = request.args.get("page")
try:
page = int(page)
except Exception as e:
current_app.logger.error(e)
page = 1
# 查询该用户收藏的新闻
news_list = []
current_page = 1
total_page = 1
try:
paginate = user.collection_news.paginate(page, constants.USER_COLLECTION_MAX_NEWS, False)
news_list = paginate.items
current_page = paginate.page
total_page = paginate.pages
except Exception as e:
current_app.logger.error(e)
news_dict_li = [news.to_dict() for news in news_list]
data = {
"news_dict_li":news_dict_li,
"current_page":current_page,
"total_page":total_page
}
return render_template("news/user_collection.html", data=data)
@profile_blu.route("/user_pass_info", methods=["GET", "POST"])
@user_login
def user_pass_info():
"""
密码修改
:return:
"""
user = g.user
if request.method == "GET":
return render_template("news/user_pass_info.html")
old_password = request.json.get("old_password")
new_password = request.json.get("new_password")
if not all([old_password, new_password]):
return jsonify(errno=RET.PARAMERR, errmsg="参数不全")
if not user.check_passowrd(old_password):
return jsonify(errno=RET.DATAERR, errmsg="旧密码输入错误")
user.password = new_password
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库保存失败")
return jsonify(errno=RET.OK, errmsg="密码修改成功")
@profile_blu.route("/user_pic_info", methods=["GET", "POST"])
@user_login
def user_pic_info():
"""
用户头像设置
:return:
"""
user = g.user
if request.method == "GET":
data = {
"user_info":user.to_dict()
}
return render_template("news/user_pic_info.html", data=data)
try:
image_data = request.files.get("avatar").read()
except Exception as e:
current_app.logger.error(e)
return jsonify(errno=RET.PARAMERR, errmsg="参数错误")
# 将用户上传的图像保存在七牛云上
try:
key = storage(image_data)
except Exception as e:
current_app.logger.error(e)
return jsonify(errno=RET.THIRDERR, errmsg="上传头像失败")
user.avatar_url = key
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库保存失败")
return jsonify(errno=RET.OK, errmsg="上传头像成功", data=constants.QINIU_DOMIN_PREFIX + key)
@profile_blu.route("/user_base_info", methods=["GET", "POST"])
@user_login
def user_base_info():
"""
渲染基本资料页面
:return:
"""
user = g.user
# 个人基本资料的显示
if request.method == "GET":
data = {
"user_info":user.to_dict()
}
return render_template("news/user_base_info.html", data=data)
#
nick_name = request.json.get("nick_name")
signature = request.json.get("signature")
gender = request.json.get("gender")
if not all([nick_name, signature, gender]):
return jsonify(errno=RET.PARAMERR, errmsg="参数不全")
if gender not in ["MAN", "WOMAN"]:
return jsonify(errno=RET.PARAMERR, errmsg="参数错误")
# 修改该用户的用户名 签名 性别
user.nick_name = nick_name
user.signature = signature
user.gender = gender
try:
db.session.commit()
except Exception as e:
db.session.rollback()
current_app.logger.error(e)
return jsonify(errno=RET.DBERR, errmsg="数据库保存失败")
return jsonify(errno=RET.OK, errmsg="OK", data=user.to_dict())
@profile_blu.route("/info")
@user_login
def user_info():
"""
个人中心
:return:
"""
user = g.user
if not user:
return credits("/")
data = {
"user_info":user.to_dict()
}
return render_template("/news/user.html", data=data) |
from odoo import tools, models, fields
class AccountArVatLine(models.Model):
"""
Modelo base para nuevos reportes argentinos de iva. La idea es que estas
lineas tenga todos los datos necesarios y que frente a cambios en odoo, los
mismos sean abosrvidos por este cubo y no se requieran cambios en los
reportes que usan estas lineas.
Se genera una linea para cada apunte contable afectado por iva
Basicamente lo que hace es convertir los apuntes contables en columnas
segun la informacion de impuestos y ademas agrega algunos otros
campos
"""
_name = "account.ar.vat.line"
_description = "Línea de IVA para análisis en localización argentina"
_auto = False
_order = 'invoice_date asc, move_name asc, id asc'
document_type_id = fields.Many2one('l10n_latam.document.type', 'Document Type', readonly=True)
date = fields.Date(readonly=True)
invoice_date = fields.Date(readonly=True)
cuit = fields.Char(readonly=True)
afip_responsibility_type_name = fields.Char(readonly=True)
partner_name = fields.Char(readonly=True)
move_name = fields.Char(readonly=True)
type = fields.Selection(selection=[
('entry', 'Journal Entry'),
('out_invoice', 'Customer Invoice'),
('out_refund', 'Customer Credit Note'),
('in_invoice', 'Vendor Bill'),
('in_refund', 'Vendor Credit Note'),
('out_receipt', 'Sales Receipt'),
('in_receipt', 'Purchase Receipt'),
], readonly=True)
base_21 = fields.Monetary(readonly=True, string='Grav. 21%', currency_field='company_currency_id')
vat_21 = fields.Monetary(readonly=True, string='VAT 21%', currency_field='company_currency_id')
base_27 = fields.Monetary(readonly=True, string='Grav. 27%', currency_field='company_currency_id')
vat_27 = fields.Monetary(readonly=True, string='VAT 27%', currency_field='company_currency_id')
base_10 = fields.Monetary(readonly=True, string='Grav. 10,5%', currency_field='company_currency_id')
vat_10 = fields.Monetary(readonly=True, string='VAT 10,5%', currency_field='company_currency_id')
base_25 = fields.Monetary(readonly=True, string='Grav. 2,5%', currency_field='company_currency_id')
vat_25 = fields.Monetary(readonly=True, string='VAT 2,5%', currency_field='company_currency_id')
base_5 = fields.Monetary(readonly=True, string='Grav. 5%', currency_field='company_currency_id')
vat_5 = fields.Monetary(readonly=True, string='VAT 5%', currency_field='company_currency_id')
vat_per = fields.Monetary(
readonly=True, string='VAT Perc.', currency_field='company_currency_id')
iibb_per = fields.Monetary(
readonly=True, string='IIBB Perc.', currency_field='company_currency_id')
municipal_per = fields.Monetary(
readonly=True, string='Municipal Taxes Perc.', currency_field='company_currency_id')
internal_tax = fields.Monetary(
readonly=True, string='Internal Taxes.', currency_field='company_currency_id')
not_taxed = fields.Monetary(
readonly=True, string='Not taxed/ex', currency_field='company_currency_id')
other_taxes = fields.Monetary(
readonly=True, string='Other Taxes', currency_field='company_currency_id')
total = fields.Monetary(readonly=True, currency_field='company_currency_id')
state = fields.Selection([('draft', 'Unposted'), ('posted', 'Posted')], 'Status', readonly=True)
journal_id = fields.Many2one('account.journal', 'Journal', readonly=True, auto_join=True)
partner_id = fields.Many2one('res.partner', 'Partner', readonly=True, auto_join=True)
afip_responsibility_type_id = fields.Many2one(
'l10n_ar.afip.responsibility.type', string='AFIP Responsibility Type', readonly=True, auto_join=True)
company_id = fields.Many2one('res.company', 'Company', readonly=True, auto_join=True)
company_currency_id = fields.Many2one(related='company_id.currency_id', readonly=True)
move_id = fields.Many2one('account.move', string='Entry', auto_join=True)
def open_journal_entry(self):
self.ensure_one()
return self.move_id.get_formview_action()
def init(self):
cr = self._cr
tools.drop_view_if_exists(cr, self._table)
# pylint: disable=sql-injection
query = """
SELECT
am.id,
(CASE WHEN lit.l10n_ar_afip_code = '80' THEN rp.vat ELSE null END) as cuit,
art.name as afip_responsibility_type_name,
am.name as move_name,
rp.name as partner_name,
am.id as move_id,
am.type,
am.date,
am.invoice_date,
am.partner_id,
am.journal_id,
am.name,
am.l10n_ar_afip_responsibility_type_id as afip_responsibility_type_id,
am.l10n_latam_document_type_id as document_type_id,
am.state,
am.company_id,
sum(CASE WHEN btg.l10n_ar_vat_afip_code = '5' THEN aml.balance ELSE Null END) as base_21,
sum(CASE WHEN ntg.l10n_ar_vat_afip_code = '5' THEN aml.balance ELSE Null END) as vat_21,
sum(CASE WHEN btg.l10n_ar_vat_afip_code = '4' THEN aml.balance ELSE Null END) as base_10,
sum(CASE WHEN ntg.l10n_ar_vat_afip_code = '4' THEN aml.balance ELSE Null END) as vat_10,
sum(CASE WHEN btg.l10n_ar_vat_afip_code = '6' THEN aml.balance ELSE Null END) as base_27,
sum(CASE WHEN ntg.l10n_ar_vat_afip_code = '6' THEN aml.balance ELSE Null END) as vat_27,
sum(CASE WHEN btg.l10n_ar_vat_afip_code = '9' THEN aml.balance ELSE Null END) as base_25,
sum(CASE WHEN ntg.l10n_ar_vat_afip_code = '9' THEN aml.balance ELSE Null END) as vat_25,
sum(CASE WHEN btg.l10n_ar_vat_afip_code = '8' THEN aml.balance ELSE Null END) as base_5,
sum(CASE WHEN ntg.l10n_ar_vat_afip_code = '8' THEN aml.balance ELSE Null END) as vat_5,
sum(CASE WHEN btg.l10n_ar_vat_afip_code in ('0', '1', '2', '3', '7') THEN aml.balance ELSE Null END) as not_taxed,
sum(CASE WHEN ntg.l10n_ar_tribute_afip_code = '04' THEN aml.balance ELSE Null END) as internal_tax,
sum(CASE WHEN ntg.l10n_ar_tribute_afip_code = '06' THEN aml.balance ELSE Null END) as vat_per,
sum(CASE WHEN ntg.l10n_ar_tribute_afip_code = '07' THEN aml.balance ELSE Null END) as iibb_per,
sum(CASE WHEN ntg.l10n_ar_tribute_afip_code = '08' THEN aml.balance ELSE Null END) as municipal_per,
sum(CASE WHEN ntg.l10n_ar_vat_afip_code is null and ntg.l10n_ar_tribute_afip_code != '06'
THEN aml.balance ELSE Null END) as other_taxes,
sum(aml.balance) as total
FROM
account_move_line aml
LEFT JOIN
account_move as am
ON aml.move_id = am.id
LEFT JOIN
-- nt = net tax
account_tax AS nt
ON aml.tax_line_id = nt.id
LEFT JOIN
account_move_line_account_tax_rel AS amltr
ON aml.id = amltr.account_move_line_id
LEFT JOIN
-- bt = base tax
account_tax AS bt
ON amltr.account_tax_id = bt.id
LEFT JOIN
account_tax_group AS btg
ON btg.id = bt.tax_group_id
LEFT JOIN
account_tax_group AS ntg
ON ntg.id = nt.tax_group_id
LEFT JOIN
res_partner AS rp
ON rp.id = am.partner_id
LEFT JOIN
l10n_latam_identification_type AS lit
ON rp.l10n_latam_identification_type_id = lit.id
LEFT JOIN
l10n_ar_afip_responsibility_type AS art
ON am.l10n_ar_afip_responsibility_type_id = art.id
WHERE
(aml.tax_line_id is not null or btg.l10n_ar_vat_afip_code is not null)
and am.type in ('out_invoice', 'in_invoice', 'out_refund', 'in_refund')
GROUP BY
am.id, art.name, rp.id, lit.id
ORDER BY
am.date, am.name
"""
sql = """CREATE or REPLACE VIEW %s as (%s)""" % (self._table, query)
cr.execute(sql)
|
from IPython.core.display import Markdown, display
import numpy as np
def printmd(string:str):
'''
Markdown printout in Jupyter
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
prints a ``string`` that contains markdown (or more precisely, can contain markdown) in Jupyter cell output
'''
display(Markdown(string))
def print_matrix(matrix:np.ndarray, decimals:int=None, name:str=None, maxSize:int=20):
'''
Matrix Markdown in Jupyter
==========================
prints a ``matrix:numpy.ndarray`` as Matrix in Jupyter cell output \
``decimals`` decimal places for each element. None= use unformatted output. \
``name`` can be specified to print name= before the matrix
``maxSize`` can be specified to limit the columns/rows printed to maxSize elements. More rows/columns will be skipped indicated with ...
List use: Both ``matrix`` and ``name`` can be a python list, preferably wit hthe same numbers of elements, to print multiple matrices at once.
Raises:
~~~~~~~
* Exception if the matrix to print has more than two dimensions!
* TypeError if the matrix is not an numpy.ndarray
'''
def oneMatrix(matr, name):
if isinstance(matr, np.ndarray):
if name!=None:
mdheader = f'$ {name} = \\begin{{bmatrix}}'
else:
mdheader = '$ \\begin{bmatrix}'
mstr = ''
if len(matr.shape)==1:
matr = matr.reshape(matr.shape[0],1)
if len(matr.shape)>2:
raise Exception('cannot print more than two dimensions on a flat screen')
cskipat = matr.shape[1]+1
cskipto = matr.shape[1]+1
rskipat = matr.shape[0]+1
rskipto = matr.shape[0]+1
if matr.shape[1]>maxSize:
cskipat = maxSize-2
cskipto = matr.shape[1]-1
if matr.shape[0]>maxSize:
rskipat = maxSize-2
rskipto = matr.shape[0]-1
rskip=False
for row in range(matr.shape[0]):
if row>=rskipat and row<rskipto:
if not rskip:
# row to skip
if cskipat!=cskipto: # there are columns to skip, too: Use diagonal dots
mstr += "\\vdots & " * (maxSize-2) + ' \\ddots & \\vdots \\\\ '
else:
mstr += "\\vdots & " * (min(maxSize, matr.shape[1])-1) + ' \\vdots \\\\ '
rskip=True
else:
cskip=False
for col in range(matr.shape[1]):
# debug {
#mstr += '[' + str(col) +'<=>' + str(cskipat) + ']'
#debug }
if col>=cskipat and col<cskipto:
# column to skip
if not cskip:
mstr += "\cdots & "
cskip = True
else:
if decimals!=None:
mstr += "{{:.{}f}}".format(decimals).format(matr[row][col])
else:
mstr += str(round(matr[row][col],15))
if col<matr.shape[1]-1:
mstr += ' & '
if row<matr.shape[0]-1:
mstr += ' \\\\ '
mdfooter = f' \end{{bmatrix}}_{{Shape{matr.shape}}} $'
return mdheader+mstr+mdfooter
else:
# return (type(matr) + ' is not supported')
raise TypeError('Wrong type of matrix: only numpy.ndarray is supported.')
if isinstance(matrix, list):
coll = ''
if isinstance(name, list):
if len(name)<len(matrix):
name = name + [None]*(len(matrix)-len(name))
else:
name = [name] + [None]*(len(matrix)-1)
for m,n in zip(matrix,name):
coll += oneMatrix(m,n)
else:
coll = oneMatrix(matrix,name)
printmd(coll)
#print(coll)
def matrixInfo(matrix:np.ndarray, name:str='A', verbose:bool=False, decimals:int=None, maxSize:int=20, surfaceGraph=False):
'''
Matrix quick analysis in Jupyter
================================
Prints some short analysis of the matrix passed, such as determinant, eingenvectors and -values, inverse
``decimals`` decimal places for each element. None= use unformatted output.
``name`` can be specified to print name= before the matrix
``maxSize`` can be specified to limit the columns/rows printed to maxSize elements. More rows/columns will be skipped indicated with ...
``verbose`` True will print more hints to the analyses, e.g. Wikipedia links.
'''
if len(matrix.shape) in [1,2]:
printmd(f'## Overview for the {len(matrix.shape)}-dimensional matrix {name}')
print_matrix(matrix, name=name, decimals=decimals)
else:
printmd(f'## Overview for the {len(matrix.shape)}-dimensional matrix {name}')
eigval, eigvec = np.linalg.eig(matrix)
printmd('### Eigenvalues and corresponding eigenvectors')
if verbose:
printmd('Eigenvectors are the vectors (different from the nullvector) that are only _scaled_ by a transformation matrix operation _but not rotated_. ')
printmd('The eigenvalues are the measure of scaling. Eigenvectors by numpy are normalized in length. ')
printmd('There might not be a solution in real space, so the eigenvectors and eigenvalues can be complex vectors and numbers respectively. ')
printmd('[Wikipedia link.](https://en.wikipedia.org/wiki/Eigenvalues_and_eigenvectors) ')
print_matrix([eigvec[:,x] for x in range(eigvec.shape[0])], name=['v_{{{}}}'.format(x) for x in list(eigval)], decimals=decimals, maxSize=maxSize)
printmd('## Euclidian Norm (2nd)')
if verbose:
# https://en.wikipedia.org/wiki/Matrix_norm
printmd('[Wikipedia link.](https://en.wikipedia.org/wiki/Matrix_norm) ')
printmd(f'$ {{\|{name}\|_2}} =' + str(np.linalg.norm(matrix))+'$')
printmd('### Determinant')
if verbose:
# https://en.wikipedia.org/wiki/Matrix_norm
printmd('[Wikipedia link.](https://en.wikipedia.org/wiki/Determinant) ')
printmd(f'${{det}}_{{{name}}} = $' + str(np.linalg.det(matrix)))
printmd('### Rank')
if verbose: #
printmd('[Wikipedia link](https://en.wikipedia.org/wiki/Rank_(linear_algebra))')
r = np.linalg.matrix_rank(matrix)
printmd(f'$rank({name}) = $' + str(r))
if r==min(matrix.shape):
printmd('Matrix is FULL RANK')
printmd('### Inverse')
try:
i = np.linalg.inv(matrix)
print_matrix(i, name= f'{{{name}}}^{{-1}}', decimals=decimals, maxSize=maxSize)
except Exception as exc:
printmd('_there is no inverse to that matrix, or at least it could not be computed._')
print(exc)
if surfaceGraph and len(matrix.shape)==2:
import plotly.graph_objects as go
fig = go.Figure(go.Surface(
contours = {
"z": {"show": True, "start": np.mean(matrix.flatten())-np.std(matrix.flatten()), "end": np.mean(matrix.flatten())+np.std(matrix.flatten())*1.01, "size": np.std(matrix.flatten())}
},
x = list(range(matrix.shape[0])),
y = list(range(matrix.shape[1])),
z = matrix))
fig.layout.title.text = "Surface approximation (with +/- one std deviation markers"
fig.update_layout(xaxis_title = 'column',
yaxis_title='row')
fig.show()
|
import os
import sys
import json
import requests
class OpenWeatherAPI(object):
"""
Performs requests to the OpenWeather API Service.
Documentation : https://openweathermap.org/api
# NOTE: - Add more features to this API
- Publish to PyPi
"""
def __init__(self, key, headers=None, debug_mode=False, *args,**kwargs):
self.key = key
self.debug = debug_mode
self.args = args
self.kwargs = kwargs
self.baseurl = 'https://api.openweathermap.org/data/2.5/'
if (headers != None):
self.headers = headers
else :
self.headers = {
"User-agent": "curl/7.43.0",
"Content-type": "application/json",
"User-key": self.key
}
def current_weather(self, **option):
""" Get current weather data by city name """
option['appid'] = self.key
if 'coordinate' in option:
if type(option['coordinate']) is str:
lat, lon = option['coordinate'].split()
else :
lat, lon = option['coordinate']
option['lat'] = lat
option['lon'] = lon
del option['coordinate']
weather_url = self.baseurl + 'weather'
response = requests.get(url=weather_url, params=option, headers=self.headers)
if (self.debug):
print(response.url)
return response.json()
|
import warnings
from unittest import TestCase
import numpy as np
import tensorflow as tf
import uncertainty_wizard as uwiz
from uncertainty_wizard.internal_utils import UncertaintyWizardWarning
class EnsembleFunctionalTest(TestCase):
@staticmethod
def _dummy_stochastic_classifier():
model = uwiz.models.StochasticSequential(
layers=[
tf.keras.layers.Input(shape=1000),
tf.keras.layers.Dense(1000),
tf.keras.layers.Dropout(0.3),
tf.keras.layers.Dense(1000),
tf.keras.layers.Dense(10),
tf.keras.layers.Softmax(),
]
)
model.compile(loss="mse")
# The labels make no sense for a softmax output layer, but this does not matter
model.fit(x=np.ones((10, 1000)), y=np.ones((10, 10)), epochs=2)
return model
def test_error_if_invalid_quantifier_type(self):
model = self._dummy_stochastic_classifier()
# Test that only string and objects are accepted as quantifiers
with self.assertRaises(TypeError):
model.predict_quantified(np.ones((10, 1000)), quantifier=5)
def test_error_if_point_predictors(self):
model = self._dummy_stochastic_classifier()
# Test that only string and objects are accepted as quantifiers
with self.assertRaises(ValueError):
model.predict_quantified(
np.ones((10, 1000)), quantifier=["PCS", "SoftmaxEntropy"]
)
def test_error_if_point_predictors(self):
model = self._dummy_stochastic_classifier()
# Test that only string and objects are accepted as quantifiers
with self.assertWarns(UncertaintyWizardWarning):
model.predict_quantified(
np.ones((10, 1000)), quantifier=["PCS", "SoftmaxEntropy"]
)
# Test that no warning is printed if passed individually
with warnings.catch_warnings(record=True) as w:
model.predict_quantified(np.ones((10, 1000)), quantifier=["SoftmaxEntropy"])
model.predict_quantified(np.ones((10, 1000)), quantifier=["PCS"])
self.assertEqual(len(w), 0, w)
|
import discord
import os
import asyncio
from datetime import datetime
from discord.ext import tasks, commands
from discord.utils import get
client = commands.Bot(command_prefix='.')
client.remove_command('help')
TOKEN = os.getenv("TOKEN")
@client.event
async def on_ready():
print('Started {0.user}'.format(client))
await client.change_presence(activity=discord.Activity(status=discord.Status.idle, type=discord.ActivityType.watching, name="Around the Clock"))
await updateDate()
timer = False
@client.event
async def on_message(message):
global timer
await updateDate()
guild = client.get_guild(802565984602423367)
channel = guild.get_channel(802577298267963412)
try:
if 'Check it out' in message.embeds[0].description:
embed = discord.Embed(title = "Disboard is off cooldown!", description = "Time to bump! 🍌", color = discord.Color.dark_blue())
embed.set_thumbnail(url="https://i.pinimg.com/originals/ee/b0/e6/eeb0e632af64b76830c5777e07770202.png")
print('Bump Detected :D')
cd = 7201
print('Countdown Started')
timer = True
while cd > 0:
cd -= 1
await asyncio.sleep(1)
print(cd)
await channel.send(embed=embed)
print('Reminder Sent')
timer = False
except IndexError:
pass
if message.author.id == 594352318464524289 and '.bump' in message.content:
if not timer:
embed = discord.Embed(title = "Disboard is off cooldown!", description = "Time to bump! 🍌", color = discord.Color.dark_blue())
embed.set_thumbnail(url="https://i.pinimg.com/originals/ee/b0/e6/eeb0e632af64b76830c5777e07770202.png")
print('Reminder Manually Started')
timer = True
cd = 7201
while cd > 0:
cd -= 1
await asyncio.sleep(1)
print(cd)
await channel.send(embed=embed)
print('Reminder Sent')
timer = False
await client.process_commands(message)
@tasks.loop(minutes=10.0)
async def time_check():
global timer
await updateDate()
guild = client.get_guild(802565984602423367)
channel = guild.get_channel(802577298267963412)
message = await channel.fetch_message(channel.last_message_id)
if message.author.id == 302050872383242240 and 'Check it on DISBOARD:' in message.embeds[0].description:
if not timer:
diff = datetime.utcnow() - message.created_at
m = diff.seconds // 60
if m > 120:
print('Need to bump')
embed = discord.Embed(title = "Disboard is off cooldown!", description = "Time to bump! 🍌", color = discord.Color.dark_blue())
embed.set_thumbnail(url="https://i.pinimg.com/originals/ee/b0/e6/eeb0e632af64b76830c5777e07770202.png")
await channel.send(embed=embed)
print('Reminder Sent')
async def updateDate():
guild = client.get_guild(802565984602423367)
channel = guild.get_channel(806150413773963275)
date = datetime.now()
date = datetime.now()
if date.month == 1:
month = 'jan'
elif date.month == 2:
month = 'feb'
elif date.month == 3:
month = 'mar'
elif date.month == 4:
month = 'apr'
elif date.month == 5:
month = 'may'
elif date.month == 6:
month = 'jun'
elif date.month == 7:
month = 'jul'
elif date.month == 8:
month = 'aug'
elif date.month == 9:
month = 'sep'
elif date.month == 10:
month = 'oct'
elif date.month == 11:
month = 'nov'
elif date.month == 12:
month = 'dec'
await channel.edit(name=f'﹕📋﹕daily﹕{month}-{date.day}')
@time_check.before_loop
async def before_time_check():
print('waiting...')
await client.wait_until_ready()
@client.command(name='dateUpdate', help='Manual Date Update')
@commands.has_permissions(administrator=True)
async def manualDateUpdate(ctx):
await updateDate()
await ctx.send('```Channel Updated```')
time_check.start()
client.run(TOKEN) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------
# test_spaceAfterComma.py
#
# test for spaceAfterComma rule
# ----------------------------------------------------------------
# copyright (c) 2014 - Domen Ipavec
# Distributed under The MIT License, see LICENSE
# ----------------------------------------------------------------
import unittest
from cssqc.parser import CSSQC
from cssqc.qualityWarning import QualityWarning
class Test_spaceAfterComma(unittest.TestCase):
def parse(self, data):
c = CSSQC({"spaceAfterComma": "on"})
c.parse(data)
return c
def test_sac(self):
sample = '''@include box-shadow(0 2px 2px rgba(0,0,0,.2));
color: rgba(0,0,0,.1);
@include box-shadow(0 2px 2px rgba(0, 0, 0, .2));
color: rgba(0, 0, 0, .1);'''
c = self.parse(sample)
self.assertEqual(c.warnings, [
QualityWarning('spaceAfterComma', 1),
QualityWarning('spaceAfterComma', 1),
QualityWarning('spaceAfterComma', 1),
QualityWarning('spaceAfterComma', 2),
QualityWarning('spaceAfterComma', 2),
QualityWarning('spaceAfterComma', 2)
])
|
import unittest
import os
from exm.conf import settings
class TestGetSetting(unittest.TestCase):
"""
Test get config
"""
def test_getenv_setting_path(self):
self.assertEqual(os.getenv('SETTING_PATH') != '', True)
def test_get_CELERY_PATH(self):
self.assertEqual(settings.CELERY_PATH.startswith('redis://'), True)
if __name__ == '__main__':
unittest.main()
|
larg = float(input('largura da parede:'))
alt = float(input('altura da parede:'))
area = larg * alt
print('sua parede tem a dimensão de {} x {} e sua Área é de {}m². '.format(larg, alt, area, ))
tinta = area / 2
print('para pintar essa area de tinta voce precisará de {}L de tinta.'.format(tinta, )) |
from django.urls import path
from .views import UserRegisterView, UserPostView, UserListView, UserUpdateView
from django.contrib.auth import views
from evileg_core.decorators import recaptcha
app_name = 'users'
urlpatterns = [
path('', UserListView.as_view(), name='users-list'),
path('register/', recaptcha(UserRegisterView.as_view()), name='users-register'),
path('login/', views.LoginView.as_view(template_name='users/login.html'), name="users-login"),
path('logout/', views.LogoutView.as_view(template_name='users/logout.html'), name="users-logout"),
path('<username>/', UserPostView.as_view(), name='users-single'),
path('<username>/update', UserUpdateView.as_view(), name='users-update'),
] |
from flask import Flask
from flask_restful import Resource, Api
from firebase import firebase
import json
import os.path
import urllib.parse
import requests
app = Flask(__name__)
api = Api(app)
google_places = {}
firebase_url = ""
firebase_name = ""
firebase = firebase.FirebaseApplication(firebase_url, None)
class GooglePlaces(Resource):
def get(self, language, city_name):
language_city = language + '+' + city_name
if language_city in google_places:
return google_places[language_city]
main_api = 'https://maps.googleapis.com/maps/api/place/textsearch/json?'
api_key = ''
url = main_api + urllib.parse.urlencode({'query': city_name + ' point of interest'})
url = url + '&' + urllib.parse.urlencode({'language': language})
url = url + '&' + urllib.parse.urlencode({'key': api_key})
json_data = requests.get(url).json()
google_places[language_city] = json_data
firebase.post('/' + firebase_name + '/googleplaces', {language_city: google_places[language_city]})
return google_places[language_city]
api.add_resource(GooglePlaces, '/<string:language>/googleplaces/<string:city_name>')
def main():
result = firebase.get('/' + firebase_name + '/googleplaces', '')
if result != None:
for place in result.values():
for place_name, value in place.items():
google_places[place_name] = value
#app.run(host='0.0.0.0', debug=True)
main() |
import datetime
from django.test import TestCase
from django.contrib.auth.models import User
from addressbook.models import Address, Country
from .models import Invoice
class InvoiceTestCase(TestCase):
def setUp(self):
usr = User.objects.create(username='test',
first_name='John',
last_name='Doe',
email='example@example.com')
country = Country.objects.create(name='TestCountry')
address = Address.objects.create(contact_name='John Doe',
address_one='Street',
town='Town',
postcode='PostCode',
country=country)
self.inv = Invoice.objects.create(user=usr, address=address)
def testInvoiceId(self):
inv = self.inv
self.assertEquals(inv.invoice_id, u'TTH9R')
inv.invoice_id = False
inv.save()
self.assertEquals(inv.invoice_id, u'TTH9R')
def testGetDue(self):
inv = self.inv
inv.draft = True
inv.save()
self.assertEquals(len(Invoice.objects.get_due()), 0)
inv.draft = False
inv.save()
self.assertEquals(len(Invoice.objects.get_due()), 1)
inv.invoiced = True
inv.save()
self.assertEquals(len(Invoice.objects.get_due()), 0)
today = datetime.date.today()
yesterday = today - datetime.timedelta(1)
tomorrow = today + datetime.timedelta(1)
inv.invoiced = False
inv.invoice_date = yesterday
inv.save()
self.assertEquals(len(Invoice.objects.get_due()), 1)
inv.invoice_date = tomorrow
inv.save()
self.assertEquals(len(Invoice.objects.get_due()), 0)
|
#
# PySNMP MIB module HUAWEI-NAT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/HUAWEI-NAT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:35:44 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint")
hwDatacomm, = mibBuilder.importSymbols("HUAWEI-MIB", "hwDatacomm")
NotificationGroup, ObjectGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ObjectGroup", "ModuleCompliance")
ObjectIdentity, TimeTicks, iso, Counter32, MibIdentifier, ModuleIdentity, NotificationType, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Integer32, Unsigned32, Counter64, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "TimeTicks", "iso", "Counter32", "MibIdentifier", "ModuleIdentity", "NotificationType", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Integer32", "Unsigned32", "Counter64", "IpAddress")
TruthValue, TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "TextualConvention", "RowStatus", "DisplayString")
hwNATCommon = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1))
if mibBuilder.loadTexts: hwNATCommon.setLastUpdated('200303180900Z')
if mibBuilder.loadTexts: hwNATCommon.setOrganization('Huawei Technologies Co., Ltd. ')
class SessionType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34))
namedValues = NamedValues(("dnsSession", 1), ("finrstSession", 2), ("fragSession", 3), ("ftpSession", 4), ("ftpDataSession", 5), ("h225Session", 6), ("h245Session", 7), ("h323rtcpSession", 8), ("h323rtpSession", 9), ("h323t120Session", 10), ("httpSession", 11), ("hwccSession", 12), ("icmpSession", 13), ("ilsSession", 14), ("netbiosdataSession", 15), ("netbiosnameSession", 16), ("netbiosSession", 17), ("rasSession", 18), ("rtcpSession", 19), ("rtpSession", 20), ("rtspSession", 21), ("smtpSession", 22), ("synSession", 23), ("tcpSession", 24), ("telnetSession", 25), ("pptpSession", 26), ("udpSession", 27), ("qqSession", 28), ("msnSession", 29), ("userdefineSession", 30), ("sipSession", 31), ("siprtpSession", 32), ("siptrcpSession", 33), ("greSesion", 34))
class AlgType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
namedValues = NamedValues(("h323Alg", 1), ("dnsAlg", 2), ("netbiosAlg", 3), ("ilsAlg", 4), ("ftpAlg", 5), ("icmpAlg", 6), ("pptpAlg", 7), ("hwccAlg", 8), ("qqAlg", 9), ("msnAlg", 10), ("userdefineAlg", 11))
class NatType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("nat", 1), ("pat", 2), ("easyIP", 3))
hwNAT = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7))
hwNatMibObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1))
hwNatAddressGroupInfoTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1), )
if mibBuilder.loadTexts: hwNatAddressGroupInfoTable.setStatus('current')
hwNatAddressGroupInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1), ).setIndexNames((0, "HUAWEI-NAT-MIB", "hwNatAddrGrpIndex"))
if mibBuilder.loadTexts: hwNatAddressGroupInfoEntry.setStatus('current')
hwNatAddrGrpIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 128)))
if mibBuilder.loadTexts: hwNatAddrGrpIndex.setStatus('current')
hwNatAddrGrpBeginningIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 2), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatAddrGrpBeginningIpAddr.setStatus('current')
hwNatAddrGrpEndingIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 3), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatAddrGrpEndingIpAddr.setStatus('current')
hwNatAddrGrpRefCount = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatAddrGrpRefCount.setStatus('current')
hwNatAddrGrpRowstatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatAddrGrpRowstatus.setStatus('current')
hwNatAddrGrpVrrpID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatAddrGrpVrrpID.setStatus('current')
hwNatAddrGrpVrfName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 1, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32)).clone('--')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatAddrGrpVrfName.setStatus('current')
hwNatInternalServerTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2), )
if mibBuilder.loadTexts: hwNatInternalServerTable.setStatus('current')
hwNatInternalServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1), ).setIndexNames((0, "HUAWEI-NAT-MIB", "hwNatServerIndex"))
if mibBuilder.loadTexts: hwNatInternalServerEntry.setStatus('current')
hwNatServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256)))
if mibBuilder.loadTexts: hwNatServerIndex.setStatus('current')
hwNatServerProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerProtocol.setStatus('current')
hwNatServerInsideBeginIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 3), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerInsideBeginIpAddr.setStatus('current')
hwNatServerInsideEndIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerInsideEndIpAddr.setStatus('current')
hwNatServerInsidePort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerInsidePort.setStatus('current')
hwNatServerOutsideIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 6), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerOutsideIpAddr.setStatus('current')
hwNatServerOutsideBeginPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerOutsideBeginPort.setStatus('current')
hwNatServerOutsideEndPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerOutsideEndPort.setStatus('current')
hwNatServerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerRowStatus.setStatus('current')
hwNatServerVrrpID = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerVrrpID.setStatus('current')
hwNatServerVrfName = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 2, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32)).clone('--')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: hwNatServerVrfName.setStatus('current')
hwNatTimeoutTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 3), )
if mibBuilder.loadTexts: hwNatTimeoutTable.setStatus('current')
hwNatTimeoutEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 3, 1), ).setIndexNames((0, "HUAWEI-NAT-MIB", "hwNatTimeoutSessionType"))
if mibBuilder.loadTexts: hwNatTimeoutEntry.setStatus('current')
hwNatTimeoutSessionType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 3, 1, 1), SessionType())
if mibBuilder.loadTexts: hwNatTimeoutSessionType.setStatus('current')
hwNatTimeoutValue = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwNatTimeoutValue.setStatus('current')
hwNatAlgEnableTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 4), )
if mibBuilder.loadTexts: hwNatAlgEnableTable.setStatus('current')
hwNatAlgEnableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 4, 1), ).setIndexNames((0, "HUAWEI-NAT-MIB", "hwNatAlgEnableProtocol"))
if mibBuilder.loadTexts: hwNatAlgEnableEntry.setStatus('current')
hwNatAlgEnableProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 4, 1, 1), AlgType())
if mibBuilder.loadTexts: hwNatAlgEnableProtocol.setStatus('current')
hwNatAlgEnableFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 1, 4, 1, 2), TruthValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hwNatAlgEnableFlag.setStatus('current')
hwNatMonitorObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2))
hwNatMonitorGlobalHash = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 1))
hwNatHashStatPatCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatHashStatPatCount.setStatus('current')
hwNatHashStatNoPatCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatHashStatNoPatCount.setStatus('current')
hwNatHashStatServerHashCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatHashStatServerHashCount.setStatus('current')
hwNatHashStatFragHashCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatHashStatFragHashCount.setStatus('current')
hwNatMonitorGlobalPkts = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 2))
hwNatStatPktsCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 2, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatStatPktsCount.setStatus('current')
hwNatStatFailedPktsCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 2, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatStatFailedPktsCount.setStatus('current')
hwNatStatTCPPktsCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 2, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatStatTCPPktsCount.setStatus('current')
hwNatStatUDPPktsCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 2, 4), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatStatUDPPktsCount.setStatus('current')
hwNatStatICMPPktsCount = MibScalar((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 2, 2, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hwNatStatICMPPktsCount.setStatus('current')
hwNatConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 3))
hwNatMibGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 3, 1))
hwNatCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 3, 1, 1)).setObjects(("HUAWEI-NAT-MIB", "hwNatAddrGrpBeginningIpAddr"), ("HUAWEI-NAT-MIB", "hwNatAddrGrpEndingIpAddr"), ("HUAWEI-NAT-MIB", "hwNatAddrGrpRowstatus"), ("HUAWEI-NAT-MIB", "hwNatAddrGrpVrrpID"), ("HUAWEI-NAT-MIB", "hwNatAddrGrpVrfName"), ("HUAWEI-NAT-MIB", "hwNatServerProtocol"), ("HUAWEI-NAT-MIB", "hwNatServerInsideBeginIpAddr"), ("HUAWEI-NAT-MIB", "hwNatServerInsideEndIpAddr"), ("HUAWEI-NAT-MIB", "hwNatServerInsidePort"), ("HUAWEI-NAT-MIB", "hwNatServerRowStatus"), ("HUAWEI-NAT-MIB", "hwNatTimeoutValue"), ("HUAWEI-NAT-MIB", "hwNatAlgEnableFlag"), ("HUAWEI-NAT-MIB", "hwNatAddrGrpRefCount"), ("HUAWEI-NAT-MIB", "hwNatServerOutsideIpAddr"), ("HUAWEI-NAT-MIB", "hwNatServerOutsideBeginPort"), ("HUAWEI-NAT-MIB", "hwNatServerOutsideEndPort"), ("HUAWEI-NAT-MIB", "hwNatServerVrrpID"), ("HUAWEI-NAT-MIB", "hwNatServerVrfName"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwNatCfgGroup = hwNatCfgGroup.setStatus('current')
hwNatMonitorGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 5, 25, 7, 1, 3, 1, 2)).setObjects(("HUAWEI-NAT-MIB", "hwNatHashStatPatCount"), ("HUAWEI-NAT-MIB", "hwNatHashStatNoPatCount"), ("HUAWEI-NAT-MIB", "hwNatHashStatServerHashCount"), ("HUAWEI-NAT-MIB", "hwNatHashStatFragHashCount"), ("HUAWEI-NAT-MIB", "hwNatStatPktsCount"), ("HUAWEI-NAT-MIB", "hwNatStatFailedPktsCount"), ("HUAWEI-NAT-MIB", "hwNatStatTCPPktsCount"), ("HUAWEI-NAT-MIB", "hwNatStatUDPPktsCount"), ("HUAWEI-NAT-MIB", "hwNatStatICMPPktsCount"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
hwNatMonitorGroup = hwNatMonitorGroup.setStatus('current')
mibBuilder.exportSymbols("HUAWEI-NAT-MIB", hwNatConformance=hwNatConformance, hwNatCfgGroup=hwNatCfgGroup, hwNatAddrGrpBeginningIpAddr=hwNatAddrGrpBeginningIpAddr, hwNAT=hwNAT, hwNatStatUDPPktsCount=hwNatStatUDPPktsCount, hwNatServerOutsideIpAddr=hwNatServerOutsideIpAddr, hwNatMonitorObjects=hwNatMonitorObjects, hwNatServerOutsideBeginPort=hwNatServerOutsideBeginPort, hwNatStatICMPPktsCount=hwNatStatICMPPktsCount, hwNatAddressGroupInfoTable=hwNatAddressGroupInfoTable, hwNatMonitorGlobalPkts=hwNatMonitorGlobalPkts, hwNatAlgEnableProtocol=hwNatAlgEnableProtocol, hwNatAlgEnableEntry=hwNatAlgEnableEntry, hwNatServerIndex=hwNatServerIndex, hwNATCommon=hwNATCommon, hwNatAddrGrpRefCount=hwNatAddrGrpRefCount, hwNatServerVrfName=hwNatServerVrfName, hwNatStatTCPPktsCount=hwNatStatTCPPktsCount, hwNatHashStatNoPatCount=hwNatHashStatNoPatCount, hwNatMibObjects=hwNatMibObjects, hwNatMonitorGroup=hwNatMonitorGroup, AlgType=AlgType, hwNatInternalServerTable=hwNatInternalServerTable, SessionType=SessionType, hwNatStatFailedPktsCount=hwNatStatFailedPktsCount, hwNatHashStatServerHashCount=hwNatHashStatServerHashCount, hwNatMibGroups=hwNatMibGroups, hwNatHashStatFragHashCount=hwNatHashStatFragHashCount, hwNatServerOutsideEndPort=hwNatServerOutsideEndPort, NatType=NatType, hwNatMonitorGlobalHash=hwNatMonitorGlobalHash, hwNatTimeoutSessionType=hwNatTimeoutSessionType, hwNatAlgEnableFlag=hwNatAlgEnableFlag, hwNatAddressGroupInfoEntry=hwNatAddressGroupInfoEntry, hwNatAddrGrpVrrpID=hwNatAddrGrpVrrpID, hwNatServerRowStatus=hwNatServerRowStatus, hwNatHashStatPatCount=hwNatHashStatPatCount, hwNatServerInsideEndIpAddr=hwNatServerInsideEndIpAddr, hwNatTimeoutTable=hwNatTimeoutTable, hwNatAlgEnableTable=hwNatAlgEnableTable, hwNatServerProtocol=hwNatServerProtocol, hwNatTimeoutValue=hwNatTimeoutValue, hwNatAddrGrpEndingIpAddr=hwNatAddrGrpEndingIpAddr, hwNatAddrGrpRowstatus=hwNatAddrGrpRowstatus, hwNatServerInsideBeginIpAddr=hwNatServerInsideBeginIpAddr, hwNatTimeoutEntry=hwNatTimeoutEntry, hwNatServerVrrpID=hwNatServerVrrpID, hwNatStatPktsCount=hwNatStatPktsCount, hwNatAddrGrpIndex=hwNatAddrGrpIndex, hwNatInternalServerEntry=hwNatInternalServerEntry, hwNatAddrGrpVrfName=hwNatAddrGrpVrfName, hwNatServerInsidePort=hwNatServerInsidePort, PYSNMP_MODULE_ID=hwNATCommon)
|
import unittest
from hwt.interfaces.utils import addClkRstn, propagateClkRstn
from hwt.simulator.simTestCase import SimTestCase
from hwt.synthesizer.param import Param
from hwt.synthesizer.unit import Unit
from hwtLib.amba.axis import AxiStream, axis_recieve_bytes, axis_send_bytes
from hwtLib.amba.axis_comp.strformat_fn import axiS_strFormat
from hwtLib.types.ctypes import uint8_t
from hwtSimApi.constants import CLK_PERIOD
class _example_AxiS_strFormat_no_args(Unit):
def _config(self):
self.DATA_WIDTH = Param(8)
def _declr(self):
addClkRstn(self)
with self._paramsShared():
self.out = AxiStream()._m()
def _impl(self):
o = axiS_strFormat(self, "f0", self.DATA_WIDTH, "test 1234")
self.out(o)
propagateClkRstn(self)
class _example_AxiS_strFormat_args_numbers(_example_AxiS_strFormat_no_args):
def _config(self):
_example_AxiS_strFormat_no_args._config(self)
self.FORMAT = Param("0b{0:08b}, 0o{0:04o}, {0:03d}, 0x{0:02x}, 0x{0:02X}")
def _impl(self):
n = self._sig("n", dtype=uint8_t)
n(13)
o = axiS_strFormat(
self, "f0", self.DATA_WIDTH,
self.FORMAT,
n)
self.out(o)
propagateClkRstn(self)
class _example_AxiS_strFormat_kwargs_numbers(_example_AxiS_strFormat_no_args):
def _config(self):
_example_AxiS_strFormat_no_args._config(self)
self.FORMAT = "0b{arg0:08b}, 0o{arg0:04o}, {arg0:03d}, 0x{arg0:02x}, 0x{arg0:02X}"
def _impl(self):
n = self._sig("n", dtype=uint8_t)
n(13)
o = axiS_strFormat(
self, "f0", self.DATA_WIDTH,
self.FORMAT,
arg0=n)
self.out(o)
propagateClkRstn(self)
class _example_AxiS_strFormat_1x_str(Unit):
def _config(self):
self.DATA_WIDTH = Param(8)
def _declr(self):
addClkRstn(self)
with self._paramsShared():
self.out = AxiStream()._m()
self.str0 = AxiStream()
def _impl(self):
o = axiS_strFormat(self, "f0", self.DATA_WIDTH, "str0:{0:s}", self.str0)
self.out(o)
propagateClkRstn(self)
class _example_AxiS_strFormat_3x_str(_example_AxiS_strFormat_1x_str):
def _declr(self):
super(_example_AxiS_strFormat_3x_str, self)._declr()
with self._paramsShared():
self.str1 = AxiStream()
self.str2 = AxiStream()
def _impl(self):
o = axiS_strFormat(self, "f0", self.DATA_WIDTH, "{0:s}{1:s}xyz{str2:s}",
self.str0, self.str1, str2=self.str2)
self.out(o)
propagateClkRstn(self)
class AxiS_strFormat_TC(SimTestCase):
def tearDown(self):
self.rmSim()
SimTestCase.tearDown(self)
def test_args_numbers(self):
u = self.compileSimAndStart(_example_AxiS_strFormat_args_numbers())
self.runSim(200 * CLK_PERIOD)
for _ in range(3):
frame = axis_recieve_bytes(u.out)
s = bytes(frame[1]).decode("utf-8")
self.assertEqual(s, "0b{0:08b}, 0o{0:04o}, {0:03d}, 0x{0:02x}, 0x{0:02X}".format(13))
def test_kwargs_numbers(self):
u = self.compileSimAndStart(_example_AxiS_strFormat_kwargs_numbers())
self.runSim(200 * CLK_PERIOD)
for _ in range(3):
frame = axis_recieve_bytes(u.out)
s = bytes(frame[1]).decode("utf-8")
self.assertEqual(s, "0b{0:08b}, 0o{0:04o}, {0:03d}, 0x{0:02x}, 0x{0:02X}".format(13))
def test_no_args(self):
u = self.compileSimAndStart(_example_AxiS_strFormat_no_args())
self.randomize(u.out)
self.runSim(50 * CLK_PERIOD)
for _ in range(3):
frame = axis_recieve_bytes(u.out)
s = bytes(frame[1]).decode("utf-8")
self.assertEqual(s, 'test 1234')
def test_1x_str(self):
u = self.compileSimAndStart(_example_AxiS_strFormat_1x_str())
self.randomize(u.out)
self.randomize(u.str0)
strings = ["test0", "x", "1234567890"]
for s in strings:
axis_send_bytes(u.str0, s.encode("utf-8"))
self.runSim(200 * CLK_PERIOD)
for s_ref in strings:
frame = axis_recieve_bytes(u.out)
s = bytes(frame[1]).decode("utf-8")
self.assertEqual(s, "str0:{0:s}".format(s_ref))
def test_3x_str(self):
u = self.compileSimAndStart(_example_AxiS_strFormat_3x_str())
self.randomize(u.out)
self.randomize(u.str0)
self.randomize(u.str1)
self.randomize(u.str2)
strings = [("test0", "str1", "str3"),
("x", "y", "z"),
("1234567890", "abc", "\t\n")]
for s0, s1, s2 in strings:
axis_send_bytes(u.str0, s0.encode("utf-8"))
axis_send_bytes(u.str1, s1.encode("utf-8"))
axis_send_bytes(u.str2, s2.encode("utf-8"))
self.runSim(200 * CLK_PERIOD)
for s_ref in strings:
frame = axis_recieve_bytes(u.out)
s = bytes(frame[1]).decode("utf-8")
self.assertEqual(s, "{0:s}{1:s}xyz{2:s}".format(*s_ref))
if __name__ == "__main__":
suite = unittest.TestSuite()
# suite.addTest(AxiS_strFormat_TC('test_args_numbers'))
suite.addTest(unittest.makeSuite(AxiS_strFormat_TC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
# from hwt.synthesizer.utils import to_rtl_str
# u = _example_AxiS_strFormat_1x_str()
# print(to_rtl_str(u))
|
import math
import logging
from datetime import date
nTypes = {0: "Nordic Event",
1: "Nordic Main Header",
2: "Nordic Macroseismic Header",
3: "Nordic Comment Header",
5: "Nordic Error Header",
6: "Nordic Waveform Header",
8: "Nordic Phase Data"}
class values():
maxInt = 9223372036854775807
def validateInteger(val, valueName, low, high, limits, nType):
if val == "":
return True
try:
int(val)
except:
msg = "Validation Error - {0}: {1} is not an integer! ({2})"
logging.error(msg.format(nTypes[nType], valueName, val))
return False
if int(val) < low and limits:
msg = "Validation Error - {0}: {1} is smaller than {2}! ({3})"
logging.error(msg.format(nTypes[nType], valueName, low, val))
return False
if int(val) > high and limits:
msg = "Validation Error - {0}: {1} is larger than {2}! ({3})"
logging.error(msg.format(nTypes[nType], valueName, high, val))
return False
return True
def validateFloat(val, valueName, low, high, limits, nType):
if val == "":
return True
try:
float(val)
except:
msg = "Validation Error - {0}: {1} is not an float! ({2})"
logging.error(msg.format(nTypes[nType], valueName, val))
return False
if math.isnan(float(val)):
msg = "Validation Error - {0}: {1} is {2} which is not allowed!"
logging.error(msg.format(nTypes[nType], valueName, val))
return False
if math.isinf(float(val)):
msg = "Validation Error - {0}: {1} is {2} which is not allowed!"
logging.error(msg.format(nTypes[nType], valueName, val))
return False
if float(val) < low and limits:
msg = "Validation Error - {0}: {1} is smaller than {2}! ({3})"
logging.error(msg.format(nTypes[nType], valueName, low, val))
return False
if float(val) > high and limits:
msg = "Validation Error - {0}: {1} is larger than {2}! ({3})"
logging.error(msg.format(nTypes[nType], valueName, high, val))
return False
return True
def validateString(string, stringName, minlen, maxlen, listOfAllowed, isList, nType):
if string is "":
return True
if string not in listOfAllowed and isList:
msg = "Validation Error - {0}: {1} not in the list of allowed strings! ({2})\nAllowed:\n"
for allowed in listOfAllowed:
msg += " -" + allowed + "\n"
logging.error(msg.format(nTypes[nType], stringName, string))
return False
if minlen > -1 and len(string) < minlen:
msg = "Validation Error - {0}: {1} is shorter than the minimum allowed length {2}! ({3})"
logging.error(msg.format(nTypes[nType], stringName, minlen, string))
return False
if minlen > -1 and len(string) > maxlen:
msg = "Validation Error - {0}: {1} is longer than the maximum allowed length {2}! ({3})"
logging.error(msg.format(nTypes[nType], stringName, maxlen, string))
return False
return True
def validateDate(dateS, dateName, nType):
if dateS == "":
return True
try:
date(year=int(dateS[:4].strip()), month=int(dateS[5:7].strip()), day=int(dateS[8:].strip()))
except:
msg = "Validation Error - {0}: {1} is not parsable into date!({2})"
logging.error(msg.format(nTypes[nType], dateName, dateS))
return False
return True
def fixDate(nordic):
if nordic.date[5] == " ":
nordic.date = nordic.date[:5] + "0" + nordic.date[6:]
if nordic.date[8] == " ":
nordic.date = nordic.date[:8] + "0" + nordic.date[9:]
|
import streamlit as st
import pandas as pd
import base64
import os
import datetime
import sqlalchemy as sa
from pathlib import Path
import psycopg2
#creating sql alchemy engine
engine = sa.create_engine('postgresql://xiamtznyktfwmk:c4218e192fc96997efcc66e19d80352dfed962907d9a19e76b297fce47197227@ec2-54-224-120-186.compute-1.amazonaws.com:5432/dekfhtva5ndr6b',echo=False)
def check_if_weekend(today):
try:
isinstance(today, datetime.datetime)
upper_limit = today + datetime.timedelta(days=(6 - today.weekday()))
lower_limit = today + datetime.timedelta(days=(5 - today.weekday()))
if today >= lower_limit <= upper_limit:
return True
else:
return False
except ValueError:
pass
today_date = datetime.datetime.today()
weekend = check_if_weekend(today_date)
if weekend==True:
os.remove('week_log.csv')
try:
engine.execute('DROP TABLE table2')
except:
pass
new_week_log = pd.DataFrame(columns=['Name', 'Time', 'Days', 'Hours', 'Reason', 'Team'],index=None)
new_week_log.to_csv('week_log.csv', mode='w', header=True,index=None)
new_week_log.to_sql('table2',con=engine,index=False,index_label=None,if_exists='replace')
else:
try:
new_week_log=pd.read_sql('table2',con=engine,index_col=None)
except:
new_week_log = pd.DataFrame(columns=['Name', 'Time', 'Days', 'Hours', 'Reason', 'Team'])
new_week_log.to_sql('table2', con=engine, index=False, index_label=None, if_exists='replace')
new_week_log = pd.read_sql('table2', con=engine, index_col=None)
st.title('Work Checkin System')
st.sidebar.image('logo.jpg')
st.sidebar.markdown("""
***XYZ Team***
""")
data=pd.read_csv('data.csv',header=[0])
if os.path.exists('record.csv'):
try:
record=pd.read_sql('table1',con=engine,index_col=None)
except:
record=pd.read_csv('record.csv',index_col=None)
record.to_sql('table1',con=engine,index=False,index_label=None,if_exists='append')
else:
record = pd.DataFrame(columns=['Name', 'Time', 'Days', 'Hours', 'Reason', 'Team'],index=None)
record.to_csv('record.csv', mode='w', header=True,index=None)
record.to_sql('table1',con=engine,index=False,index_label=None,if_exists='replace')
st.write(record)
#st.write(pd.read_sql('table1',con=engine,index_col=None))
days=['mon','tue','wed','thurs','fri','sat','sun']
teams=['Development','PR','management']
st.warning('Avoid duplication, ignore if not applicable')
st.error('During the time of weekend it will reset itself and you wont be able to do any changes , dont checkin during the weekends')
def input_values():
data2 = pd.read_csv('data.csv', header=[0])
if st.sidebar.checkbox('Work for this week'):
selected_name = st.sidebar.selectbox('Name', options=data['Members'])
days_selected=st.sidebar.multiselect('Days free to work',options=days)
hours=st.sidebar.slider('No.of hours per week will be able to work',1.0,1.0,8.0)
team_willing=st.sidebar.multiselect('Team willing to work in',options=teams)
password=str(st.sidebar.text_input('enter the passphrase')).lower()
if st.sidebar.button('Submit details'):
y=data2.loc[data2.Members == str(selected_name)]
z=y.iloc[:,-1].values
if password==str(z[0]):
st.balloons()
input_data={
'Name':[str(selected_name)],
'Time':[str(datetime.datetime.today())],
'Days':[str(days_selected)],
'Hours':[str(hours)],
'Reason':['None'],
'Team':[str(team_willing)]
}
input_df=pd.DataFrame(input_data)
input_df.to_csv('record.csv', mode='a', header=False,index=None)
input_df.to_sql('table1',if_exists='append',con=engine,index=False,index_label=None)
record_changed = pd.read_sql('table1',con=engine,index_col=None)
record_reverse = record_changed.iloc[::-1]
st.subheader('Continous Log')
st.write(record_reverse.head())
input_df.to_csv('week_log.csv', mode='a', header=False,index=None)
input_df.to_sql('table2', if_exists='append', con=engine, index=False, index_label=None)
record_changed_wl = pd.read_sql('table2',con=engine,index_col=None)
record_reverse_wl = record_changed_wl.iloc[::-1]
st.subheader('Weekly Log')
st.write(record_reverse_wl.head())
else:
st.sidebar.warning('Wrong passphrase')
elif st.sidebar.checkbox('Cannot Work this week'):
selected_name = st.sidebar.selectbox('Name', options=data['Members'])
reason=st.sidebar.text_input('Reason')
password = str(st.sidebar.text_input('enter the passphrase')).lower()
if st.sidebar.button('Submit details'):
y = data2.loc[data2.Members == str(selected_name)]
z = y.iloc[:, -1].values
if password == str(z[0]):
st.balloons()
input_data={
'Name':[str(selected_name)],
'Time':[str(datetime.datetime.today())],
'Days':['None'],
'Hours':0,
'Reason':[str(reason)],
'Team':['None']
}
input_df=pd.DataFrame(input_data)
input_df.to_csv('record.csv', mode='a', header=False,index=None)
input_df.to_sql('table1', if_exists='append', con=engine, index=False, index_label=None)
record_changed = pd.read_sql('table1',con=engine,index_col=None)
record_reverse=record_changed.iloc[::-1]
st.subheader('Continous Log')
st.write(record_reverse.head())
input_df.to_csv('week_log.csv', mode='a', header=False,index=None)
input_df.to_sql('table2', if_exists='append', con=engine, index=False, index_label=None)
record_changed_wl = pd.read_sql('table2',con=engine,index_col=None)
record_reverse_wl = record_changed_wl.iloc[::-1]
st.subheader('Weekly Log')
st.write(record_reverse_wl.head())
else:
st.sidebar.warning('Wrong passphrase')
input_values() # input values function
def filedownload():
log=pd.read_sql('table1',con=engine,index_col=None)
csv = log.to_csv(index=False)
b64 = base64.b64encode(csv.encode()).decode() # strings <-> bytes conversions
href = f'<a href="data:file/csv;base64,{b64}" download="teamlog.csv">Download Team entire Log File</a>'
return href
def filedownload_week():
log=pd.read_sql('table2',con=engine,index_col=None)
csv = log.to_csv(index=False)
b64 = base64.b64encode(csv.encode()).decode() # strings <-> bytes conversions
href = f'<a href="data:file/csv;base64,{b64}" download="teamlog_week.csv">Download Team week Log File</a>'
return href
new_log_df=pd.read_sql('table2',con=engine,index_col=None)
people_data=data.copy()
st.write('Total no.of work hours reported {}'.format(new_log_df['Hours'].sum()))
col1,col2,col3=st.beta_columns(3)
with col1:
st.header('Team updated')
unique_names=new_log_df['Name'].unique()
st.write(unique_names)
with col2:
st.header('Team Not updated')
name1 = set(new_log_df['Name'])
name2 = set(people_data['Members'])
diff = sorted(name2 - name1)
st.write(pd.DataFrame(diff))
with col3:
data={
'Updated':new_log_df['Name'].nunique(),
'Not-Updated':people_data['Members'].nunique()-new_log_df['Name'].nunique()
}
st.header('Comparision between updation for current week')
st.bar_chart(data=pd.DataFrame(data,index=[0]),use_container_width=True)
bar_df=pd.DataFrame(data,index=[0])
st.markdown(filedownload_week(), unsafe_allow_html=True)
st.markdown(filedownload(), unsafe_allow_html=True)
|
from sys import platform
from cffi import FFI
def initialize_dynamic_lib():
if platform.startswith('darwin'):
prefix = 'lib'
ext = 'dylib'
elif platform.startswith('win32'):
prefix = ''
ext = 'dll'
elif platform.startswith('linux'):
prefix = 'lib'
ext = 'so'
else:
raise RuntimeError("OS Platform not supported.")
ffi = FFI()
ffi.cdef('''
size_t lru_cache(size_t);
bool LruCache_contains_key(int);
void* LruCache_new(size_t);
size_t LruCache_len(void*);
''')
return ffi.dlopen("../../target/release/{}structbox.{}".format(prefix, ext))
structbox = initialize_dynamic_lib()
|
# Author: Eric Bezzam
# Date: July 15, 2016
import numpy as np
from .doa import DOA
class MUSIC(DOA):
"""
Class to apply MUltiple SIgnal Classication (MUSIC) direction-of-arrival
(DoA) for a particular microphone array.
.. note:: Run locate_source() to apply the MUSIC algorithm.
Parameters
----------
L: numpy array
Microphone array positions. Each column should correspond to the
cartesian coordinates of a single microphone.
fs: float
Sampling frequency.
nfft: int
FFT length.
c: float
Speed of sound. Default: 343 m/s
num_src: int
Number of sources to detect. Default: 1
mode: str
'far' or 'near' for far-field or near-field detection
respectively. Default: 'far'
r: numpy array
Candidate distances from the origin. Default: np.ones(1)
azimuth: numpy array
Candidate azimuth angles (in radians) with respect to x-axis.
Default: np.linspace(-180.,180.,30)*np.pi/180
colatitude: numpy array
Candidate elevation angles (in radians) with respect to z-axis.
Default is x-y plane search: np.pi/2*np.ones(1)
"""
def __init__(self, L, fs, nfft, c=343.0, num_src=1, mode='far', r=None,
azimuth=None, colatitude=None, **kwargs):
DOA.__init__(self, L=L, fs=fs, nfft=nfft, c=c, num_src=num_src,
mode=mode, r=r, azimuth=azimuth, colatitude=colatitude, **kwargs)
self.Pssl = None
def _process(self, X):
"""
Perform MUSIC for given frame in order to estimate steered response
spectrum.
"""
# compute steered response
self.Pssl = np.zeros((self.num_freq,self.grid.n_points))
C_hat = self._compute_correlation_matricesvec(X)
# subspace decomposition
Es, En, ws, wn = self._subspace_decompositionvec(C_hat[None,...])
# compute spatial spectrum
identity = np.zeros((self.num_freq,self.M,self.M))
identity[:,list(np.arange(self.M)),list(np.arange(self.M))] = 1
cross = identity - np.matmul(Es,np.moveaxis(np.conjugate(Es),-1,-2))
self.Pssl = self._compute_spatial_spectrumvec(cross)
self.grid.set_values(np.squeeze(np.sum(self.Pssl, axis=1)/self.num_freq))
def plot_individual_spectrum(self):
"""
Plot the steered response for each frequency.
"""
# check if matplotlib imported
try:
import matplotlib.pyplot as plt
except ImportError:
import warnings
warnings.warn('Matplotlib is required for plotting')
return
# only for 2D
if self.grid.dim == 3:
pass
else:
import warnings
warnings.warn('Only for 2D.')
return
# plot
for k in range(self.num_freq):
freq = float(self.freq_bins[k])/self.nfft*self.fs
azimuth = self.grid.azimuth * 180 / np.pi
plt.plot(azimuth, self.Pssl[k,0:len(azimuth)])
plt.ylabel('Magnitude')
plt.xlabel('Azimuth [degrees]')
plt.xlim(min(azimuth),max(azimuth))
plt.title('Steering Response Spectrum - ' + str(freq) + ' Hz')
plt.grid(True)
def _compute_spatial_spectrumvec(self,cross):
mod_vec = np.transpose(np.array(self.mode_vec[self.freq_bins,:,:]),axes=[2,0,1])
# timeframe, frequ, no idea
denom = np.matmul(np.conjugate(mod_vec[...,None,:]),np.matmul(cross,mod_vec[...,None]))
return np.squeeze(1/abs(denom))
def _compute_spatial_spectrum(self,cross,k):
P = np.zeros(self.grid.n_points)
for n in range(self.grid.n_points):
Dc = np.array(self.mode_vec[k,:,n],ndmin=2).T
Dc_H = np.conjugate(np.array(self.mode_vec[k,:,n],ndmin=2))
denom = np.dot(np.dot(Dc_H,cross),Dc)
P[n] = 1/abs(denom)
return P
# non-vectorized version
def _compute_correlation_matrices(self, X):
C_hat = np.zeros([self.num_freq,self.M,self.M], dtype=complex)
for i in range(self.num_freq):
k = self.freq_bins[i]
for s in range(self.num_snap):
C_hat[i,:,:] = C_hat[i,:,:] + np.outer(X[:,k,s],
np.conjugate(X[:,k,s]))
return C_hat/self.num_snap
# vectorized version
def _compute_correlation_matricesvec(self, X):
# change X such that time frames, frequency microphones is the result
X = np.transpose(X,axes=[2,1,0])
# select frequency bins
X = X[...,list(self.freq_bins),:]
# Compute PSD and average over time frame
C_hat = np.matmul(X[...,None],np.conjugate(X[...,None,:]))
# Average over time-frames
C_hat = np.mean(C_hat,axis=0)
return C_hat
# non-vectorized version
def _subspace_decomposition(self, R):
# eigenvalue decomposition!
w,v = np.linalg.eig(R)
# sort out signal and noise subspace
# Signal comprises the leading eigenvalues
# Noise takes the rest
eig_order = np.flipud(np.argsort(abs(w)))
sig_space = eig_order[:self.num_src]
noise_space = eig_order[self.num_src:]
# eigenvalues
ws = w[sig_space]
wn = w[noise_space]
# eigenvectors
Es = v[:,sig_space]
En = v[:,noise_space]
return Es, En, ws, wn
# vectorized versino
def _subspace_decompositionvec(self,R):
# eigenvalue decomposition!
w,v = np.linalg.eig(R)
# sort out signal and noise subspace
# Signal comprises the leading eigenvalues
# Noise takes the rest
eig_order = np.argsort(abs(w),axis=-1)[...,::-1]
sig_space = eig_order[...,:self.num_src]
noise_space = eig_order[...,self.num_src:]
# eigenvalues
# broadcasting for fancy indexing
b = np.asarray(np.arange(w.shape[0]))[:,None,None]
c = np.asarray(np.arange(w.shape[1]))[None,:,None]
d = np.asarray(np.arange(w.shape[2]))[None,None,:,None]
ws = w[b,c,sig_space]
wn = w[b,c,noise_space]
# eigenvectors
Es = v[b[...,None],c[...,None],d,sig_space[...,None,:]]
En = v[b[...,None],c[...,None],d,noise_space[...,None,:]]
return (Es, En, ws, wn)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import gunicorn.util
from gunicorn.app.wsgiapp import WSGIApplication
class TWSGIAPP(WSGIApplication):
def init(self, parser, opts, args):
print(self.cfg)
self.app_uri = ".app:app"
args = [self.app_uri]
super(TWSGIAPP, self).init(parser, opts, args)
def load_wsgiapp(self):
self.chdir()
return gunicorn.util.import_app(self.app_uri)
def run(self):
super(TWSGIAPP, self).run()
def run_app():
TWSGIAPP().run()
|
import datetime
import webbrowser
import dateutil.rrule
from .compat import quote_plus
from .utils import indent
# What.
name = 'Fresno.py'
description = 'The Fresno Python User Group'
# Where.
location = 'Bitwise Industries'
address = '''
700 Van Ness Ave
Fresno, CA
'''
map = 'https://www.google.com/maps?q={0}'.format(
quote_plus(' '.join(address.split())))
# When.
schedule = dateutil.rrule.rrule(
freq=dateutil.rrule.MONTHLY,
byweekday=dateutil.rrule.TU,
bysetpos=4
)
# On the web.
website = 'http://FresnoPython.com'
twitter = 'http://twitter.com/FresnoPython'
def open_website():
return webbrowser.open(website)
def open_twitter():
return webbrowser.open(twitter)
def open_map():
return webbrowser.open(map)
def next_meeting_date(date=None):
if date is None:
date = datetime.date.today()
if isinstance(date, datetime.date):
date = datetime.datetime.combine(date, datetime.datetime.min.time())
return datetime.datetime.combine(
schedule.after(date).date(),
datetime.time(18, 30)
)
def message():
template = '''
{name} -- {description}
{website}
Fresno.py's mission is to foster a welcoming and diverse community
of Python developers and promote the use of the Python programming
language in the Central Valley. People of all skill levels are welcome;
if you are interested in Python, we'd love to have you join us!
Next meeting: {next_meeting}
{location}
{address}
Map: {map}
Follow us on Twitter!
{twitter}
'''
content = indent(template.format(
name=name,
description=description,
website=website,
twitter=twitter,
location=location,
address=address.strip(),
map=map,
next_meeting=next_meeting_date().strftime('%A %B %e, %Y at%l:%M%P'),
))
return content
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
@File : test_wangge.py
Description :
@Author : pchaos
date: 18-4-1
-------------------------------------------------
Change Activity:
18-4-1:
@Contact : p19992003#gmail.com
-------------------------------------------------
"""
from unittest import TestCase
from WANGGE.wangge import *
__author__ = 'pchaos'
class TestWangge(TestCase):
def setUp(self):
self.high = 100.0
self.low = 0.0
self.n = 20 # 网格为20格
# self.wg = wangGebase(self.high, self.low, self.n)
self.wg = simpleWange(self.high, self.low, self.n)
def tearDown(self):
self.wg = None
def test_doCaculateSimpleWangge(self):
wangge = self.wg()
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.high = 1.323
self.low = 0.414
wangge = self.wg(self.high, self.low, self.n)
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.n = 30
wangge = self.wg(self.high, self.low, self.n)
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.n = 40
wangge = self.wg(self.high, self.low, self.n)
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.assertTrue(self.isequal(wangge[0][1], self.high), "计算错误,计算结果和实际网格最大值不相等: {0} != {1}".format(wangge[0][1], self.high))
def test_doCaculateROExWangge(self):
"""
价格等比变化
:return:
"""
self.high = 20.0
self.low = 10.0
self.n = 20 # 网格为20格
# self.wg = wangGebase(self.high, self.low, self.n)
self.wg = ROEWangge(self.high, self.low, self.n)
wangge = self.wg()
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.assertTrue(self.isequal(wangge[0][1], self.high), "计算错误,计算结果和实际网格最大值不相等: {0} != {1}".format(wangge[0][1], self.high))
self.assertTrue(wangge[self.n // 2][1] < (self.high + self.low) / 2,
"计算错误,计算结果和实际网格中间值: {0} < {1}".format(wangge[self.n // 2][1], (self.high + self.low) / 2))
self.high = 1.323
self.low = 0.414
self.n = 39 # 网格为39格
# self.wg = wangGebase(self.high, self.low, self.n)
self.wg = ROEWangge(self.high, self.low, self.n)
wangge = self.wg()
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.assertTrue(self.isequal(wangge[0][1], self.high), "计算错误,计算结果和实际网格最大值不相等: {0} != {1}".format(wangge[0][1], self.high))
self.high = 1.323
self.low = 0.414
self.n = 40 # 网格为40格
# self.wg = wangGebase(self.high, self.low, self.n)
self.wg = ROEWangge(self.high, self.low, self.n)
wangge = self.wg()
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.assertTrue(self.isequal(wangge[0][1], self.high), "计算错误,计算结果和实际网格最大值不相等: {0} != {1}".format(wangge[0][1], self.high))
# 603180 2018 05 05
self.high = np.round(179.9*1.015, 2)
self.low = np.round(133.53*0.985, 2)
# self.low = np.round(self.high * 0.75, 2)
self.n = 11 # 网格为11格
adp = 3 # 计算结果保留3位小数
# self.wg = wangGebase(self.high, self.low, self.n)
self.wg = ROEWangge(self.high, self.low, self.n, adp)
wangge = self.wg()
print(wangge)
self.assertTrue(self.isequal(wangge[-1][1], self.low, diff=0.001), "计算错误,计算结果和实际网格最小值不相等: {0} != {1}".format(wangge[-1][1], self.low))
self.assertTrue(self.isequal(wangge[0][1], self.high, diff= 0.001), "计算错误,计算结果和实际网格最大值不相等: {0} != {1}".format(wangge[0][1], self.high))
def isequal(self, value1, value2, diff = 0.0001):
"""
比较两个值的差是否在diff区间内
:param value1: 值一
:param value2: 值二
:param diff: 相差范围
:return: 两个值的差在diff范围内,True
两个值的差不在diff范围内,False
"""
return np.abs(value1 - value2) < diff
|
w
#import stuff
#little bit about general stuff
grid = [
[1,2,3],
[4,5,6],
[7,8,9],
['a','b','c','d']
]
'''
try:
asdasd
except dividedbyzero
except invalidvalue
'''
##########
'''
mode = 'r' # read
# mode = 'w' # write, can create files
# mode = 'a' # append only
# mode = 'r+'# read & write
f = open("file",mode)
f.readline()
f.readlines()
for line in f.readlines():
print(line)
f.close()
f.write("\nasdasd")
'''
#classes
#str,num,boolean
class Building:
#lvl = None
def __init__(self, levels, name):
self.lvl = levels
a = Building(10,"aa")
a.lvl |
import numpy as np
def mask_np(array, null_val):
if np.isnan(null_val):
return (~np.isnan(null_val)).astype('float32')
else:
return np.not_equal(array, null_val).astype('float32')
def masked_mape_np(y_true, y_pred, null_val=np.nan):
with np.errstate(divide='ignore', invalid='ignore'):
mask = mask_np(y_true, null_val)
mask /= mask.mean()
mape = np.abs((y_pred - y_true) / y_true)
mape = np.nan_to_num(mask * mape)
return np.mean(mape) * 100
def masked_rmse_np(y_true, y_pred, null_val=np.nan):
mask = mask_np(y_true, null_val)
mask /= mask.mean()
mse = (y_true - y_pred) ** 2
return np.sqrt(np.mean(np.nan_to_num(mask * mse)))
def masked_mae_np(y_true, y_pred, null_val=np.nan):
mask = mask_np(y_true, null_val)
mask /= mask.mean()
mae = np.abs(y_true - y_pred)
return np.mean(np.nan_to_num(mask * mae))
|
"""empty message
Revision ID: 130e75eac0f7
Revises: None
Create Date: 2017-04-14 17:51:12.533272
"""
# revision identifiers, used by Alembic.
revision = '130e75eac0f7'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('blog_post',
sa.Column('pk', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=120), nullable=True),
sa.Column('markdown', sa.Text(), nullable=True),
sa.Column('html', sa.Text(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.Column('published', sa.Boolean(), nullable=True),
sa.Column('published_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('pk'),
sa.UniqueConstraint('title')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('blog_post')
# ### end Alembic commands ###
|
import zarrdump
from zarrdump.core import dump, _open_with_xarray_or_zarr
from click.testing import CliRunner
import fsspec
import pytest
import numpy as np
import xarray as xr
import zarr
def test_version():
assert zarrdump.__version__ == "0.2.2"
@pytest.fixture()
def tmp_xarray_ds(tmpdir):
def write_ds_to_zarr(consolidated=False):
ds = xr.Dataset({"var1": xr.DataArray(range(3))})
path = str(tmpdir.join("test.zarr"))
ds.to_zarr(path, consolidated=consolidated)
return ds, path
return write_ds_to_zarr
@pytest.fixture()
def tmp_zarr_group(tmpdir):
def write_group_to_zarr(consolidated=False):
path = str(tmpdir.join("test.zarr"))
z = zarr.open_group(path)
arr = z.create_dataset("var1", shape=(3, 5))
arr[:] = 1.0
if consolidated:
zarr.consolidate_metadata(path)
return z, path
return write_group_to_zarr
@pytest.mark.parametrize("consolidated", [True, False])
def test__open_with_xarray_or_zarr_on_zarr_group(tmp_zarr_group, consolidated):
group, path = tmp_zarr_group(consolidated=consolidated)
m = fsspec.get_mapper(path)
opened_group, is_xarray_dataset = _open_with_xarray_or_zarr(m, consolidated)
np.testing.assert_allclose(group["var1"], opened_group["var1"])
assert not is_xarray_dataset
@pytest.mark.parametrize("consolidated", [True, False])
def test__open_with_xarray_or_zarr_on_xarray_ds(tmp_xarray_ds, consolidated):
ds, path = tmp_xarray_ds(consolidated=consolidated)
m = fsspec.get_mapper(path)
opened_ds, is_xarray_dataset = _open_with_xarray_or_zarr(m, consolidated)
np.testing.assert_allclose(ds["var1"], opened_ds["var1"])
assert is_xarray_dataset
def test_dump_non_existent_url():
runner = CliRunner()
result = runner.invoke(dump, ["non/existent/path"])
assert result.exit_code == 1
assert result.output == "Error: No file or directory at non/existent/path\n"
@pytest.mark.parametrize("options", [[], ["-v", "var1"]])
def test_dump_executes_on_zarr_group(tmp_zarr_group, options):
runner = CliRunner()
_, path = tmp_zarr_group()
result = runner.invoke(dump, [path] + options)
assert result.exit_code == 0
@pytest.mark.parametrize("options", [[], ["-v", "var1"]])
def test_dump_executes_on_xarray_dataset(tmp_xarray_ds, options):
runner = CliRunner()
_, path = tmp_xarray_ds()
result = runner.invoke(dump, [path] + options)
assert result.exit_code == 0
|
#Leia um valor de massa em libras e apresente-o convertido em quilogramas.
#A formula de conversao eh: K=L*0.45 ,
#Sendo K a massa em quilogramas e L a massa em libras.
l=float(input("Informe a massa em libras: "))
k=l*0.45
print(f"O peso convertido em KG eh {k}") |
from absl import app
import jax
from jax._src.numpy.lax_numpy import argsort
import jax.numpy as jnp
from jaxopt import implicit_diff
from jaxopt import linear_solve
from jaxopt import OptaxSolver, GradientDescent
from matplotlib.pyplot import vlines
import optax
from sklearn import datasets
from sklearn import model_selection
from sklearn import preprocessing
import matplotlib.pylab as plt
import numpy as np
import tqdm
import cvgutils.Image as cvgim
lmbda_init = 0.1
lmbda_gt = 4.
h,w = 100,100
key = jax.random.PRNGKey(42)
# inpt = jnp.array([[0.,10.,0.],[10.,0.,10.],[0.,10.,0.]])
inpt = jax.random.uniform(key,(h*w,))
init_inpt = jnp.zeros_like(inpt)
@jax.jit
def screen_poisson_residual(params, lmbda, data):
"""Objective function."""
r1 = 1 / len(data.reshape(-1)) ** 0.5 * (params.reshape(h,w) - data.reshape(h,w))
r2 = 1 / len(data.reshape(-1)) ** 0.5 * lmbda ** 0.5 * (params.reshape(h,w)[1:,:] - params.reshape(h,w)[:-1,:])
r3 = 1 / len(data.reshape(-1)) ** 0.5 * lmbda ** 0.5 * (params.reshape(h,w)[:,1:] - params.reshape(h,w)[:,:-1])
return jnp.concatenate((0.5 ** 0.5 * r1.reshape(-1),0.5 ** 0.5 * r2.reshape(-1),0.5 ** 0.5 * r3.reshape(-1)),axis=0)
@jax.jit
def screen_poisson_objective(params, lmbda, data):
"""Objective function."""
return (screen_poisson_residual(params, lmbda, data) ** 2).sum()
@implicit_diff.custom_root(jax.grad(screen_poisson_objective))
def screen_poisson_solver(init_params,lmbda, data):
def matvec(u):
a = u
a = a.at[1:].set(a[1:] + lmbda * (u[1:] - u[:-1]))
a = a.at[:-1].set(a[:-1] + lmbda * (u[:-1] - u[1:]))
return a
return linear_solve.solve_cg(matvec=matvec,
b=data,
init=init_params,
maxiter=100)
@implicit_diff.custom_root(jax.grad(screen_poisson_objective))
def screen_poisson_solver2(init_params,lmbda, data):
# grad_f = jax.grad(screen_poisson_objective,argnums=0)
f = lambda u:screen_poisson_residual(u,lmbda,data)
def matvec2(u):
jtd = jax.jvp(f,(init_params,),(u,))[1]
return jax.vjp(f,init_params)[1](jtd)[0]
jtf = jax.vjp(f,init_params)[1](f(init_params))[0]
#gauss newton loop
gn_iters = 3
x = init_params
for i in range(gn_iters):
x += linear_solve.solve_cg(matvec=matvec2,
b=-jtf,
init=x,
maxiter=100)
return x
@jax.jit
def outer_objective(lmbda, init_inner, data):
"""Validation loss."""
inpt, gt = data
# We use the bijective mapping l2reg = jnp.exp(theta)
# both to optimize in log-space and to ensure positivity.
f = lambda u: screen_poisson_solver2(init_inner, u, inpt)
f_v = (f(lmbda) - gt) ** 2
return f_v.mean()
def plot_tangent():
im_gt = screen_poisson_solver2(init_inpt,lmbda_gt,inpt)
data = [inpt, im_gt]
count = 20
delta = 0.0001
lmbdas = jnp.linspace(0.5,4,count)
valid_loss = jnp.array([outer_objective(lmbdas[i], init_inpt, data) for i in range(count)])
grad_lmbdas = jnp.array([jax.grad(outer_objective,argnums=0)(lmbdas[i], init_inpt, data) for i in range(count)])
fd_lmbdas = jnp.array([(outer_objective(lmbdas[i] + delta/2, init_inpt, data) - outer_objective(lmbdas[i] - delta/2, init_inpt, data)) / delta for i in range(count)])
plt.figure(figsize=(24,7))
plt.subplot(1,3,1)
plt.plot(lmbdas,valid_loss,'r')
for i in tqdm.trange(valid_loss.shape[0]):
plt.arrow(lmbdas[i],valid_loss[i],1,grad_lmbdas[i],color='g')
plt.ylabel('Validation loss')
plt.xlabel('Smoothness weight')
plt.subplot(1,3,2)
plt.plot(lmbdas,valid_loss,'r')
for i in tqdm.trange(valid_loss.shape[0]):
plt.arrow(lmbdas[i],valid_loss[i],1,fd_lmbdas[i],color='b')
plt.xlabel('Smoothness weight')
plt.subplot(1,3,3)
plt.plot(lmbdas,valid_loss,'r')
for i in tqdm.trange(valid_loss.shape[0]):
plt.arrow(lmbdas[i],valid_loss[i],1,grad_lmbdas[i],color='g')
plt.arrow(lmbdas[i],valid_loss[i],1,fd_lmbdas[i],color='b')
plt.legend(['Finite difference','Autodiff'])
plt.xlabel('Smoothness weight')
plt.savefig('out/plot.pdf')
plt.close()
# jax.check_grads(outer_objective,(lmbdas[0],init_inpt, data),order=1)
def hyper_optimization_jaxopt():
lr = 0.05
im_gt = screen_poisson_solver2(init_inpt,lmbda_gt,inpt)
data = [inpt, im_gt]
lmbda = lmbda_init
f_v = lambda u: outer_objective(u,init_inpt,data)
gd = GradientDescent(f_v,stepsize=lr,maxiter=50)
print('solution ',gd.run(lmbda).params)
return gd.run(0.1).params
def hyper_optimization_jaxopt_adam():
lr = 0.05
im_gt = screen_poisson_solver2(init_inpt,lmbda_gt,inpt)
data = [inpt, im_gt]
lmbda = lmbda_init
f_v = lambda u: outer_objective(u,init_inpt,data)
gd = OptaxSolver(opt=optax.adam(0.1),fun=f_v)
print('solution ',gd.run(lmbda).params)
return gd.run(0.1).params
def hyper_optimization():
im_gt = screen_poisson_solver2(init_inpt,lmbda_gt,inpt)
count = 20
lmbda = lmbda_init
data = [inpt, im_gt]
lmbdas = jnp.linspace(lmbda_init,lmbda_gt,count)
valid_loss = jnp.array([outer_objective(lmbdas[i], init_inpt, data) for i in range(count)])
lr = .9
import cv2
for i in tqdm.trange(2000):
g = jax.grad(outer_objective,argnums=0)(lmbda, init_inpt, data)
if(i%100 == 0):
plt.figure(figsize=(12*1.3,3.5*1.3))
im = screen_poisson_solver2(init_inpt, lmbda, inpt)
imshow = jnp.concatenate((im.reshape(h,w),im_gt.reshape(h,w)),axis=1)
imshow = np.array(imshow * 255).astype(np.uint8)
# cv2.imwrite('./out/%05i.png' % i,)
plt.subplot(1,3,1)
plt.imshow((im.reshape(h,w) * 255))
plt.title('Prediction')
plt.xlabel('Iteration %04i, lambda %05d'%(i,lmbda))
plt.subplot(1,3,2)
plt.imshow((im_gt.reshape(h,w) * 255))
plt.title('Ground truth')
plt.xlabel('lambda = ' + '%05d'%lmbda_gt)
plt.subplot(1,3,3)
plt.plot(lmbdas,valid_loss,'r')
loss = outer_objective(lmbda, init_inpt, data)
loss_gt = outer_objective(lmbda_gt, init_inpt, data)
nrm = jnp.linalg.norm(jnp.array([1,g]))
plt.arrow(lmbda,loss,1/nrm,g/nrm,color='b')
plt.scatter(lmbda,loss,color='r',marker='o')
plt.scatter(lmbda_gt,loss_gt,color='g',marker='o')
plt.legend(['Validation loss','Tangent vector','Prediction loss','Ground truth loss'])
plt.ylabel('Validation loss')
plt.xlabel('Smoothness weight')
plt.savefig('./out/plt_%04i.png'%i)
plt.close()
lmbda -= lr * g
print('g ',g,' lmbda ', lmbda, ' loss ', outer_objective(lmbda, init_inpt, data))
import glob
imfns = sorted(glob.glob('./out/plt_*.png'))
fn = './out/plt.avi'
im = cv2.imread(imfns[0])
out = cv2.VideoWriter(fn,cv2.VideoWriter_fourcc(*'DIVX'), 5, (im.shape[1],im.shape[0]))
for imfn in imfns:
im = cv2.imread(imfn)
out.write(im)
out.release()
# cvgim.imageseq2avi(fn,cvgim.loadImageSeq(sorted(imfns)))
# plot_tangent()
hyper_optimization()
# hyper_optimization_jaxopt()
# hyper_optimization_jaxopt_adam()
import matplotlib.pyplot as plt
plt.figure((24,10))
plt.subplot() |
from typing import Union
from inject import autoparams
from loaders.database import DatabaseProvider
from models.student.StudentModel import StudentModel, UpdateStudentModel
from infrastructure.students.repo.StudentRepository import StudentRepository
class MongoDBStudentRepository(StudentRepository):
@autoparams()
def __init__(self, provider: DatabaseProvider):
self.dbTable = provider.getMongo_DB()["students"]
# all of the following should be dtos on return
def get_all(self) -> list[StudentModel]:
students = self.dbTable.find()
return list(students)
def create(self, student: StudentModel) -> StudentModel:
new_student = self.dbTable.insert_one(student)
created_student = self.dbTable.find_one(
{"_id": new_student.inserted_id})
return created_student
def get_one(self, id: Union[str, int]):
return self.dbTable.find_one({"_id": id})
def update(self, id, student: UpdateStudentModel):
student = {k: v for k, v in student.dict().items() if v is not None}
if len(student) >= 1:
update_result = self.dbTable.update_one(
{"_id": id}, {"$set": student})
if update_result.modified_count == 1:
if (
updated_student := self.dbTable.find_one({"_id": id})
) is not None:
return updated_student
def delete(self, id):
result = self.dbTable.delete_one({"_id": id})
if result.deleted_count == 0:
return False
return True
|
import re
import dotenv
import requests
import os
from model import utils
# sets cities and times for weather report
# return true if time_input is valid 24hr time, else return false
def valid_time(time_input) -> bool:
pattern = re.compile(r'((([0-1]\d)|(2[0-3])):[0-5]\d)|24:00')
if pattern.match(time_input) is None:
return False
return True
# return true if city_input is valid city name, else return false
def valid_city(city_input) -> bool:
key = os.getenv('WEATHER_TOKEN')
url = r'http://api.openweathermap.org/data/2.5/weather?q=' + city_input + '&appid=' + key
response = requests.get(url)
response = response.json()
if response['cod'] == '404':
return False
return True
if __name__ == "__main__":
dotenv.load_dotenv(utils.ENV_PATH)
list_of_cities = ''
list_of_times = ''
while True:
city_name = input("enter a city name, type 'done' when done")
if city_name == 'done':
break
elif not valid_city(city_name):
print("Not a valid city, try again")
continue
list_of_cities = list_of_cities + '/' + city_name
while True:
time = input("enter a time in 24h, type 'done' when done")
if time == 'done':
break
elif not valid_time(time):
print("Not a valid time, try again")
continue
list_of_times = list_of_times + ',' + time
with open(utils.CITY_DIR, 'w') as city, open(utils.TIMES_DIR, 'w') as time:
city.write(list_of_cities)
time.write(list_of_times)
print("Cities and times successfully set")
|
from django.conf.urls import url,include
from Weatherstack import views
app_name = 'Weatherstack'
urlpatterns =[
url(r'^batch/$',views.batch,name='batch'),
url(r'^time-series/$',views.time_series, name='time-series'),
url(r'^ current/$',views.current,name='current'),
]
|
# Generated by Django 3.1.7 on 2021-03-21 12:34
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pic', '0003_auto_20210321_1429'),
]
operations = [
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=60)),
('pic', models.ImageField(upload_to='uploads/')),
('description', models.TextField()),
('image_category', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='pic.category')),
('image_location', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='pic.location')),
],
options={
'ordering': ['name'],
},
),
migrations.DeleteModel(
name='Picture',
),
]
|
# MODELS GESTION TOMA DATOS
from django.db import models
#from matrix_field import MatrixField
# Create your models here.
# CREAR UNA CLASE POR CADA TABLA DE NUESTRA BASE DE DATOS
class Participantes(models.Model):
nombre=models.CharField(max_length=30)
apellido=models.CharField(max_length=30)
fechaNac=models.DateField(verbose_name="Fecha de Nacimiento")
DNI=models.IntegerField()
direccion=models.CharField(max_length=50)
email=models.EmailField(blank=True, null= True) # Acepta email y nulo
tfno=models.CharField(max_length=15, verbose_name="Telefono")
def __str__(self):
return 'El participante es ', (self.nombre), (self.apellido)
class Investigador_Clinico(models.Model):
nombre=models.CharField(max_length=30)
apellido=models.CharField(max_length=30)
fechaNac=models.DateField(verbose_name="Fecha de Nacimiento")
DNI=models.IntegerField()
MP=models.IntegerField(verbose_name="Matricula Profesional")
direccion=models.CharField(max_length=50)
email=models.EmailField()
tfno=models.CharField(max_length=15, verbose_name="Telefono")
#def __str__(self):
#return 'El investigador es: ' %(self.nombre, self.apellido)
class Estudio(models.Model):
nombre=models.CharField(max_length=30, verbose_name="Nombre del Estudio")
categoriaEstudio=models.CharField(max_length=30, verbose_name="Categoria")
fechaEstudio=models.DateField(verbose_name="Fecha del Estudio")
clave_Estudio=models.IntegerField(verbose_name= "ID de Estudio")
num_muestra=models.IntegerField(verbose_name="Numero de Muestra")
invResp=models.CharField(max_length=30, verbose_name="Investigador Responsable")
protocoloEstudio=models.CharField(max_length=30, verbose_name="Nombre del Protocolo")
clave_Protocolo=models.IntegerField(null= True,verbose_name= "ID de Protocolo")
participanteEstudio=models.IntegerField(verbose_name="ID del Participante")
efectuado=models.BooleanField() # este es un comentario
class ProtocoloClinico(models.Model):
protocoloEstudio=models.CharField(max_length=300, verbose_name="Nombre del Protocolo")
clave_Protocolo=models.IntegerField(null= True,verbose_name= "ID de Protocolo")
tipoInvestigacion=models.CharField(max_length=150, verbose_name="Tipo de Investigacion")
aceptadoDiseño_Metodológico=models.BooleanField(verbose_name="Aceptacion Diseño Metodológico") # este es un comentario
fechaAceptacionDM=models.DateField(verbose_name="Fecha Diseño Metodológico")
aceptadoConsetimiento_Informado=models.BooleanField(verbose_name="Aceptacion Consentimiento Informado") # este es un comentario
fechaAceptacionCI=models.DateField(verbose_name="Fecha Consentimiento Informado")
fechaInicioInvestigacion=models.DateField(verbose_name="Fecha Inicion de Investigacion")
investigadorResponsable1=models.CharField(max_length=30, verbose_name="Investigador Responsable 1- IR_1")
MP_IR1=models.IntegerField(verbose_name="Matricula Profesional IR_1")
investigadorResponsable2=models.CharField(blank=True, null= True, max_length=30, verbose_name="Investigador Responsable 2- IR_2")
MP_IR2=models.IntegerField(blank=True, null= True, verbose_name="Matricula Profesional IR_2")
investigadorResponsable3=models.CharField(blank=True, null= True, max_length=30, verbose_name="Investigador Responsable 3- IR_3")
MP_IR3=models.IntegerField(blank=True, null= True, verbose_name="Matricula Profesional IR_3")
class AdqData_EEG_Protocolo(models.Model):
nombre=models.CharField(max_length=50, verbose_name="Nombre del Estudio")
estadoConciencia=models.CharField(max_length=50, verbose_name="Estado de Conciencia") # Vigilia-Dormido-Coma-Sedado
condicionFisica=models.CharField(max_length=50, verbose_name="Condicion Fisica") # Reposo - Esfuerzo - Relajado
estadoElectrofisiologico=models.CharField(max_length=150, verbose_name="Estado Electrofisiológico") # Reposo Ojos Cerrado- Reposo Ojos Abiertos - Hiperventilacion - Optoestimulacio -Suelo REM -Sueño No REM etc
cantidad_EstadosElectrofisio=models.IntegerField(null= True,verbose_name= "N° de Estados Electrofisiologicos")
duracion_Registro=models.IntegerField(null= True,verbose_name= "Duracion del Registro: min.")
numero_Canales=models.IntegerField(null= True,verbose_name= "Numero de Canales") #20 - 32 - 64
sample_canales=models.IntegerField(null= True,verbose_name= "Tasa de Muestreo: Hz") # 65Hz - 128Hz - 256Hz 512Hz
eegData=models.CharField(null= True, max_length=50, verbose_name="Raw Signals") # aca va la matriz de datos como un objeto en un lugar en la memoria
# class EEG_Data(models.Model):
# eegValue = MatrixField(datatype='float', dimensions=(3, 2))
# eegChannel = MatrixField(datatype='str', dimensions=(2,))
|
import unittest
import mock
import uiza
from uiza.api_resources.storage import Storage
from uiza.exceptions import (
BadRequestError,
UnauthorizedError,
NotFoundError,
UnprocessableError,
InternalServerError,
ServiceUnavailableError,
ClientError,
ServerError,
ClientException
)
class TestStorageBaseTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestStorageBaseTestCase, self).__init__(*args, **kwargs)
uiza.authorization = 'test api key'
uiza.app_id = 'test app id'
self.storage_id = '37d6706e-be91-463e-b3b3-b69451dd4752'
self.storage_data_create = {
'name': 'FTP Uiza test 4',
'description': 'FTP of Uiza, use for transcode',
'storageType': 'ftp',
'host': 'ftp-example.uiza.io'
}
self.storage_data_update = {
'id': '37d6706e-be91-463e-b3b3-b69451dd4752',
'name': 'FTP Uiza test 5',
'description': 'FTP of Uiza, use for transcode',
'storageType': 'ftp',
'host': 'ftp-example.uiza.io'
}
class TestAddStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_add_storage_valid(self, mock_request_http):
mock_request_http.return_value = True, 200
data = Storage().add(**self.storage_data_create)
self.assertEqual(data[1], 200)
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_400(self, mock_request_http):
mock_request_http.return_value = True, 400
with self.assertRaises(BadRequestError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'BadRequestError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_401(self, mock_request_http):
mock_request_http.return_value = True, 401
with self.assertRaises(UnauthorizedError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'UnauthorizedError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_404(self, mock_request_http):
mock_request_http.return_value = True, 404
with self.assertRaises(NotFoundError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'NotFoundError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_422(self, mock_request_http):
mock_request_http.return_value = True, 422
with self.assertRaises(UnprocessableError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'UnprocessableError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_500(self, mock_request_http):
mock_request_http.return_value = True, 500
with self.assertRaises(InternalServerError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'InternalServerError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_503(self, mock_request_http):
mock_request_http.return_value = True, 503
with self.assertRaises(ServiceUnavailableError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'ServiceUnavailableError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_4xx(self, mock_request_http):
mock_request_http.return_value = True, 412
with self.assertRaises(ClientError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'ClientError')
@mock.patch('uiza.Connection._request_http')
def test_add_storage_invalid_with_status_code_5xx(self, mock_request_http):
mock_request_http.return_value = True, 512
with self.assertRaises(ServerError) as context:
Storage().add(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'ServerError')
class TestRetrieveStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_valid(self, mock_request_http):
mock_request_http.return_value = True, 200
data = Storage().retrieve(id=self.storage_id)
self.assertEqual(data[1], 200)
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_400(self, mock_request_http):
mock_request_http.return_value = True, 400
with self.assertRaises(BadRequestError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'BadRequestError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_401(self, mock_request_http):
mock_request_http.return_value = True, 401
with self.assertRaises(UnauthorizedError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'UnauthorizedError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_404(self, mock_request_http):
mock_request_http.return_value = True, 404
with self.assertRaises(NotFoundError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'NotFoundError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_422(self, mock_request_http):
mock_request_http.return_value = True, 422
with self.assertRaises(UnprocessableError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'UnprocessableError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_500(self, mock_request_http):
mock_request_http.return_value = True, 500
with self.assertRaises(InternalServerError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'InternalServerError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_503(self, mock_request_http):
mock_request_http.return_value = True, 503
with self.assertRaises(ServiceUnavailableError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ServiceUnavailableError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_4xx(self, mock_request_http):
mock_request_http.return_value = True, 412
with self.assertRaises(ClientError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ClientError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_status_code_5xx(self, mock_request_http):
mock_request_http.return_value = True, 512
with self.assertRaises(ServerError) as context:
Storage().retrieve(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ServerError')
@mock.patch('uiza.Connection._request_http')
def test_retrieve_storage_invalid_with_not_storage_id(self, mock_request_http):
mock_request_http.return_value = True, 200
with self.assertRaises(Exception) as context:
Storage().retrieve()
self.assertTrue(context.exception.__class__.__name__, 'TypeError')
class TestUpdateStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_update_storage_valid(self, mock_request_http):
mock_request_http.return_value = True, 200
data = Storage().update(**self.storage_data_update)
self.assertEqual(data[1], 200)
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_400(self, mock_request_http):
mock_request_http.return_value = True, 400
with self.assertRaises(BadRequestError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'BadRequestError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_401(self, mock_request_http):
mock_request_http.return_value = True, 401
with self.assertRaises(UnauthorizedError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'UnauthorizedError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_404(self, mock_request_http):
mock_request_http.return_value = True, 404
with self.assertRaises(NotFoundError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'NotFoundError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_422(self, mock_request_http):
mock_request_http.return_value = True, 422
with self.assertRaises(UnprocessableError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'UnprocessableError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_500(self, mock_request_http):
mock_request_http.return_value = True, 500
with self.assertRaises(InternalServerError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'InternalServerError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_503(self, mock_request_http):
mock_request_http.return_value = True, 503
with self.assertRaises(ServiceUnavailableError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'ServiceUnavailableError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_4xx(self, mock_request_http):
mock_request_http.return_value = True, 412
with self.assertRaises(ClientError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'ClientError')
@mock.patch('uiza.Connection._request_http')
def test_update_storage_invalid_with_status_code_5xx(self, mock_request_http):
mock_request_http.return_value = True, 512
with self.assertRaises(ServerError) as context:
Storage().update(**self.storage_data_update)
self.assertTrue(context.exception.__class__.__name__, 'ServerError')
class TestRemoveStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_valid(self, mock_request_http):
mock_request_http.return_value = True, 200
data = Storage().remove(id=self.storage_id)
self.assertEqual(data[1], 200)
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_400(self, mock_request_http):
mock_request_http.return_value = True, 400
with self.assertRaises(BadRequestError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'BadRequestError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_401(self, mock_request_http):
mock_request_http.return_value = True, 401
with self.assertRaises(UnauthorizedError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'UnauthorizedError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_404(self, mock_request_http):
mock_request_http.return_value = True, 404
with self.assertRaises(NotFoundError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'NotFoundError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_422(self, mock_request_http):
mock_request_http.return_value = True, 422
with self.assertRaises(UnprocessableError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'UnprocessableError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_500(self, mock_request_http):
mock_request_http.return_value = True, 500
with self.assertRaises(InternalServerError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'InternalServerError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_503(self, mock_request_http):
mock_request_http.return_value = True, 503
with self.assertRaises(ServiceUnavailableError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ServiceUnavailableError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_4xx(self, mock_request_http):
mock_request_http.return_value = True, 412
with self.assertRaises(ClientError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ClientError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid_with_status_code_5xx(self, mock_request_http):
mock_request_http.return_value = True, 512
with self.assertRaises(ServerError) as context:
Storage().remove(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ServerError')
@mock.patch('uiza.Connection._request_http')
def test_remove_storage_invalid(self, mock_request_http):
mock_request_http.return_value = True, 200
with self.assertRaises(TypeError) as context:
Storage().remove()
self.assertTrue(context.exception.__class__.__name__, 'TypeError')
class TestListStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_list_storage_invalid(self, mock_request_http):
mock_request_http.return_value = True, 200
with self.assertRaises(ClientException) as context:
Storage().list()
self.assertTrue(context.exception.__class__.__name__, 'ClientException')
class TestCreateStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_create_storage_invalid(self, mock_request_http):
mock_request_http.return_value = True, 200
with self.assertRaises(ClientException) as context:
Storage().create(**self.storage_data_create)
self.assertTrue(context.exception.__class__.__name__, 'ClientException')
class TestDeleteStorage(TestStorageBaseTestCase):
@mock.patch('uiza.Connection._request_http')
def test_delete_storage_invalid(self, mock_request_http):
mock_request_http.return_value = True, 200
with self.assertRaises(ClientException) as context:
Storage().delete(id=self.storage_id)
self.assertTrue(context.exception.__class__.__name__, 'ClientException')
|
import sqlite3
import db_init
def insert_dummy_data(conn):
cur = conn.cursor()
#records or rows in a list
records = [(1, 'chair', 100, 'living room chairs', "1.jpg"),
(2, 'table', 200, 'living room table', "2.jpg")
]
#insert multiple records in a single query
cur.executemany('INSERT INTO products VALUES(?,?,?,?,?);',records);
print('We have inserted', cur.rowcount, 'records to the table.')
#commit the changes to db
conn.commit()
#close the connection
conn.close()
#execute all functions
if __name__ == '__main__':
conn = db_init.create_connection()
insert_dummy_data(conn)
|
from manimlib.imports import *
from old_projects.eoc.chapter2 import Car, MoveCar
class CircleScene(PiCreatureScene):
CONFIG = {
"radius": 1.5,
"stroke_color": WHITE,
"fill_color": BLUE_E,
"fill_opacity": 0.75,
"radial_line_color": MAROON_B,
"outer_ring_color": GREEN_E,
"ring_colors": [BLUE, GREEN],
"dR": 0.1,
"dR_color": YELLOW,
"unwrapped_tip": ORIGIN,
"include_pi_creature": False,
"circle_corner": UP+LEFT,
}
def setup(self):
PiCreatureScene.setup(self)
self.circle = Circle(
radius=self.radius,
stroke_color=self.stroke_color,
fill_color=self.fill_color,
fill_opacity=self.fill_opacity,
)
self.circle.to_corner(self.circle_corner, buff=MED_LARGE_BUFF)
self.radius_line = Line(
self.circle.get_center(),
self.circle.get_right(),
color=self.radial_line_color
)
self.radius_brace = Brace(self.radius_line, buff=SMALL_BUFF)
self.radius_label = self.radius_brace.get_text("$R$", buff=SMALL_BUFF)
self.radius_group = VGroup(
self.radius_line, self.radius_brace, self.radius_label
)
self.add(self.circle, *self.radius_group)
if not self.include_pi_creature:
self.remove(self.get_primary_pi_creature())
def introduce_circle(self, added_anims=[]):
self.remove(self.circle)
self.play(
ShowCreation(self.radius_line),
GrowFromCenter(self.radius_brace),
Write(self.radius_label),
)
self.circle.set_fill(opacity=0)
self.play(
Rotate(
self.radius_line, 2*np.pi-0.001,
about_point=self.circle.get_center(),
),
ShowCreation(self.circle),
*added_anims,
run_time=2
)
self.play(
self.circle.set_fill, self.fill_color, self.fill_opacity,
Animation(self.radius_line),
Animation(self.radius_brace),
Animation(self.radius_label),
)
def increase_radius(self, numerical_dr=True, run_time=2):
radius_mobs = VGroup(
self.radius_line, self.radius_brace, self.radius_label
)
nudge_line = Line(
self.radius_line.get_right(),
self.radius_line.get_right() + self.dR*RIGHT,
color=self.dR_color
)
nudge_arrow = Arrow(
nudge_line.get_center() + 0.5*RIGHT+DOWN,
nudge_line.get_center(),
color=YELLOW,
buff=SMALL_BUFF,
tip_length=0.2,
)
if numerical_dr:
nudge_label = TexMobject("%.01f" % self.dR)
else:
nudge_label = TexMobject("dr")
nudge_label.set_color(self.dR_color)
nudge_label.scale(0.75)
nudge_label.next_to(nudge_arrow.get_start(), DOWN)
radius_mobs.add(nudge_line, nudge_arrow, nudge_label)
outer_ring = self.get_outer_ring()
self.play(
FadeIn(outer_ring),
ShowCreation(nudge_line),
ShowCreation(nudge_arrow),
Write(nudge_label),
run_time=run_time/2.
)
self.wait(run_time/2.)
self.nudge_line = nudge_line
self.nudge_arrow = nudge_arrow
self.nudge_label = nudge_label
self.outer_ring = outer_ring
return outer_ring
def get_ring(self, radius, dR, color=GREEN):
ring = Circle(radius=radius + dR).center()
inner_ring = Circle(radius=radius)
inner_ring.rotate(np.pi, RIGHT)
ring.append_vectorized_mobject(inner_ring)
ring.set_stroke(width=0)
ring.set_fill(color)
ring.move_to(self.circle)
ring.R = radius
ring.dR = dR
return ring
def get_rings(self, **kwargs):
dR = kwargs.get("dR", self.dR)
colors = kwargs.get("colors", self.ring_colors)
radii = np.arange(0, self.radius, dR)
colors = color_gradient(colors, len(radii))
rings = VGroup(*[
self.get_ring(radius, dR=dR, color=color)
for radius, color in zip(radii, colors)
])
return rings
def get_outer_ring(self):
return self.get_ring(
radius=self.radius, dR=self.dR,
color=self.outer_ring_color
)
def unwrap_ring(self, ring, **kwargs):
self.unwrap_rings(ring, **kwargs)
def unwrap_rings(self, *rings, **kwargs):
added_anims = kwargs.get("added_anims", [])
rings = VGroup(*rings)
unwrapped = VGroup(*[
self.get_unwrapped(ring, **kwargs)
for ring in rings
])
self.play(
rings.rotate, np.pi/2,
rings.next_to, unwrapped.get_bottom(), UP,
run_time=2,
path_arc=np.pi/2,
)
self.play(
Transform(rings, unwrapped, run_time=3),
*added_anims
)
def get_unwrapped(self, ring, to_edge=LEFT, **kwargs):
R = ring.R
R_plus_dr = ring.R + ring.dR
n_anchors = ring.get_num_curves()
result = VMobject()
result.set_points_as_corners([
interpolate(np.pi*R_plus_dr*LEFT, np.pi*R_plus_dr*RIGHT, a)
for a in np.linspace(0, 1, n_anchors/2)
]+[
interpolate(np.pi*R*RIGHT+ring.dR*UP, np.pi*R*LEFT+ring.dR*UP, a)
for a in np.linspace(0, 1, n_anchors/2)
])
result.set_style_data(
stroke_color=ring.get_stroke_color(),
stroke_width=ring.get_stroke_width(),
fill_color=ring.get_fill_color(),
fill_opacity=ring.get_fill_opacity(),
)
result.move_to(self.unwrapped_tip, aligned_edge=DOWN)
result.shift(R_plus_dr*DOWN)
if to_edge is not None:
result.to_edge(to_edge)
return result
def create_pi_creature(self):
self.pi_creature = Randolph(color=BLUE_C)
self.pi_creature.to_corner(DOWN+LEFT)
return self.pi_creature
#############
class Chapter1OpeningQuote(OpeningQuote):
CONFIG = {
"quote": [
"""The art of doing mathematics is finding
that """, "special case",
"""that contains all the
germs of generality."""
],
"quote_arg_separator": " ",
"highlighted_quote_terms": {
"special case": BLUE
},
"author": "David Hilbert",
}
class Introduction(TeacherStudentsScene):
def construct(self):
self.show_series()
self.show_many_facts()
self.invent_calculus()
def show_series(self):
series = VideoSeries()
series.to_edge(UP)
this_video = series[0]
this_video.set_color(YELLOW)
this_video.save_state()
this_video.set_fill(opacity=0)
this_video.center()
this_video.set_height(FRAME_HEIGHT)
self.this_video = this_video
words = TextMobject(
"Welcome to \\\\",
"Essence of calculus"
)
words.set_color_by_tex("Essence of calculus", YELLOW)
self.teacher.change_mode("happy")
self.play(
FadeIn(
series,
lag_ratio=0.5,
run_time=2
),
Blink(self.get_teacher())
)
self.teacher_says(words, target_mode="hooray")
self.change_student_modes(
*["hooray"]*3,
look_at_arg=series[1].get_left(),
added_anims=[
ApplyMethod(this_video.restore, run_time=3),
]
)
self.play(*[
ApplyMethod(
video.shift, 0.5*video.get_height()*DOWN,
run_time=3,
rate_func=squish_rate_func(
there_and_back, alpha, alpha+0.3
)
)
for video, alpha in zip(series, np.linspace(0, 0.7, len(series)))
]+[
Animation(self.teacher.bubble),
Animation(self.teacher.bubble.content),
])
essence_words = words.get_part_by_tex("Essence").copy()
self.play(
FadeOut(self.teacher.bubble),
FadeOut(self.teacher.bubble.content),
essence_words.next_to, series, DOWN,
*[
ApplyMethod(pi.change_mode, "pondering")
for pi in self.get_pi_creatures()
]
)
self.wait(3)
self.series = series
self.essence_words = essence_words
def show_many_facts(self):
rules = list(it.starmap(TexMobject, [
("{d(", "x", "^2)", "\\over \\,", "dx}", "=", "2", "x"),
(
"d(", "f", "g", ")", "=",
"f", "dg", "+", "g", "df",
),
(
"F(x)", "=", "\\int_0^x",
"\\frac{dF}{dg}(t)\\,", "dt"
),
(
"f(x)", "=", "\\sum_{n = 0}^\\infty",
"f^{(n)}(a)", "\\frac{(x-a)^n}{n!}"
),
]))
video_indices = [2, 3, 7, 10]
tex_to_color = [
("x", BLUE),
("f", BLUE),
("df", BLUE),
("g", YELLOW),
("dg", YELLOW),
("f(x)", BLUE),
("f^{(n)}(a)", BLUE),
]
for rule in rules:
for tex, color in tex_to_color:
rule.set_color_by_tex(tex, color, substring=False)
rule.next_to(self.teacher.get_corner(UP+LEFT), UP)
rule.shift_onto_screen()
student_index = 1
student = self.get_students()[student_index]
self.change_student_modes(
"pondering", "sassy", "pondering",
look_at_arg=self.teacher.eyes,
added_anims=[
self.teacher.change_mode, "plain"
]
)
self.wait(2)
self.play(
Write(rules[0]),
self.teacher.change_mode, "raise_right_hand",
)
self.wait()
alt_rules_list = list(
rules[1:]) + [VectorizedPoint(self.teacher.eyes.get_top())]
for last_rule, rule, video_index in zip(rules, alt_rules_list, video_indices):
video = self.series[video_index]
self.play(
last_rule.replace, video,
FadeIn(rule),
)
self.play(Animation(rule))
self.wait()
self.play(
self.teacher.change_mode, "happy",
self.teacher.look_at, student.eyes
)
def invent_calculus(self):
student = self.get_students()[1]
creatures = self.get_pi_creatures()
creatures.remove(student)
creature_copies = creatures.copy()
self.remove(creatures)
self.add(creature_copies)
calculus = VGroup(*self.essence_words[-len("calculus"):])
calculus.generate_target()
invent = TextMobject("Invent")
invent_calculus = VGroup(invent, calculus.target)
invent_calculus.arrange(RIGHT, buff=MED_SMALL_BUFF)
invent_calculus.next_to(student, UP, 1.5*LARGE_BUFF)
invent_calculus.shift(RIGHT)
arrow = Arrow(invent_calculus, student)
fader = Rectangle(
width=FRAME_WIDTH,
height=FRAME_HEIGHT,
stroke_width=0,
fill_color=BLACK,
fill_opacity=0.5,
)
self.play(
FadeIn(fader),
Animation(student),
Animation(calculus)
)
self.play(
Write(invent),
MoveToTarget(calculus),
student.change_mode, "erm",
student.look_at, calculus
)
self.play(ShowCreation(arrow))
self.wait(2)
class PreviewFrame(Scene):
def construct(self):
frame = Rectangle(height=9, width=16, color=WHITE)
frame.set_height(1.5*FRAME_Y_RADIUS)
colors = iter(color_gradient([BLUE, YELLOW], 3))
titles = [
TextMobject("Chapter %d:" % d, s).to_edge(
UP).set_color(next(colors))
for d, s in [
(3, "Derivative formulas through geometry"),
(4, "Chain rule, product rule, etc."),
(7, "Limits"),
]
]
title = titles[0]
frame.next_to(title, DOWN)
self.add(frame, title)
self.wait(3)
for next_title in titles[1:]:
self.play(Transform(title, next_title))
self.wait(3)
class ProductRuleDiagram(Scene):
def construct(self):
df = 0.4
dg = 0.2
rect_kwargs = {
"stroke_width": 0,
"fill_color": BLUE,
"fill_opacity": 0.6,
}
rect = Rectangle(width=4, height=3, **rect_kwargs)
rect.shift(DOWN)
df_rect = Rectangle(
height=rect.get_height(),
width=df,
**rect_kwargs
)
dg_rect = Rectangle(
height=dg,
width=rect.get_width(),
**rect_kwargs
)
corner_rect = Rectangle(
height=dg,
width=df,
**rect_kwargs
)
d_rects = VGroup(df_rect, dg_rect, corner_rect)
for d_rect, direction in zip(d_rects, [RIGHT, DOWN, RIGHT+DOWN]):
d_rect.next_to(rect, direction, buff=0)
d_rect.set_fill(YELLOW, 0.75)
corner_pairs = [
(DOWN+RIGHT, UP+RIGHT),
(DOWN+RIGHT, DOWN+LEFT),
(DOWN+RIGHT, DOWN+RIGHT),
]
for d_rect, corner_pair in zip(d_rects, corner_pairs):
line = Line(*[
rect.get_corner(corner)
for corner in corner_pair
])
d_rect.line = d_rect.copy().replace(line, stretch=True)
d_rect.line.set_color(d_rect.get_color())
f_brace = Brace(rect, UP)
g_brace = Brace(rect, LEFT)
df_brace = Brace(df_rect, UP)
dg_brace = Brace(dg_rect, LEFT)
f_label = f_brace.get_text("$f$")
g_label = g_brace.get_text("$g$")
df_label = df_brace.get_text("$df$")
dg_label = dg_brace.get_text("$dg$")
VGroup(f_label, df_label).set_color(GREEN)
VGroup(g_label, dg_label).set_color(RED)
f_label.generate_target()
g_label.generate_target()
fg_group = VGroup(f_label.target, g_label.target)
fg_group.generate_target()
fg_group.target.arrange(RIGHT, buff=SMALL_BUFF)
fg_group.target.move_to(rect.get_center())
for mob in df_brace, df_label, dg_brace, dg_label:
mob.save_state()
mob.scale(0.01, about_point=rect.get_corner(
mob.get_center() - rect.get_center()
))
self.add(rect)
self.play(
GrowFromCenter(f_brace),
GrowFromCenter(g_brace),
Write(f_label),
Write(g_label),
)
self.play(MoveToTarget(fg_group))
self.play(*[
mob.restore
for mob in (df_brace, df_label, dg_brace, dg_label)
] + [
ReplacementTransform(d_rect.line, d_rect)
for d_rect in d_rects
])
self.wait()
self.play(
d_rects.space_out_submobjects, 1.2,
MaintainPositionRelativeTo(
VGroup(df_brace, df_label),
df_rect
),
MaintainPositionRelativeTo(
VGroup(dg_brace, dg_label),
dg_rect
),
)
self.wait()
deriv = TexMobject(
"d(", "fg", ")", "=",
"f", "\\cdot", "dg", "+", "g", "\\cdot", "df"
)
deriv.to_edge(UP)
alpha_iter = iter(np.linspace(0, 0.5, 5))
self.play(*[
ApplyMethod(
mob.copy().move_to,
deriv.get_part_by_tex(tex, substring=False),
rate_func=squish_rate_func(smooth, alpha, alpha+0.5)
)
for mob, tex in [
(fg_group, "fg"),
(f_label, "f"),
(dg_label, "dg"),
(g_label, "g"),
(df_label, "df"),
]
for alpha in [next(alpha_iter)]
]+[
Write(VGroup(*it.chain(*[
deriv.get_parts_by_tex(tex, substring=False)
for tex in ("d(", ")", "=", "\\cdot", "+")
])))
], run_time=3)
self.wait()
class IntroduceCircle(CircleScene):
CONFIG = {
"include_pi_creature": True,
"unwrapped_tip": 2*RIGHT
}
def construct(self):
self.force_skipping()
self.introduce_area()
self.question_area()
self.show_calculus_symbols()
def introduce_area(self):
area = TexMobject("\\text{Area}", "=", "\\pi", "R", "^2")
area.next_to(self.pi_creature.get_corner(UP+RIGHT), UP+RIGHT)
self.remove(self.circle, self.radius_group)
self.play(
self.pi_creature.change_mode, "pondering",
self.pi_creature.look_at, self.circle
)
self.introduce_circle()
self.wait()
R_copy = self.radius_label.copy()
self.play(
self.pi_creature.change_mode, "raise_right_hand",
self.pi_creature.look_at, area,
Transform(R_copy, area.get_part_by_tex("R"))
)
self.play(Write(area))
self.remove(R_copy)
self.wait()
self.area = area
def question_area(self):
q_marks = TexMobject("???")
q_marks.next_to(self.pi_creature, UP)
rings = VGroup(*reversed(self.get_rings()))
unwrapped_rings = VGroup(*[
self.get_unwrapped(ring, to_edge=None)
for ring in rings
])
unwrapped_rings.arrange(UP, buff=SMALL_BUFF)
unwrapped_rings.move_to(self.unwrapped_tip, UP)
ring_anim_kwargs = {
"run_time": 3,
"lag_ratio": 0.5
}
self.play(
Animation(self.area),
Write(q_marks),
self.pi_creature.change_mode, "confused",
self.pi_creature.look_at, self.area,
)
self.wait()
self.play(
FadeIn(rings, **ring_anim_kwargs),
Animation(self.radius_group),
FadeOut(q_marks),
self.pi_creature.change_mode, "thinking"
)
self.wait()
self.play(
rings.rotate, np.pi/2,
rings.move_to, unwrapped_rings.get_top(),
Animation(self.radius_group),
path_arc=np.pi/2,
**ring_anim_kwargs
)
self.play(
Transform(rings, unwrapped_rings, **ring_anim_kwargs),
)
self.wait()
def show_calculus_symbols(self):
ftc = TexMobject(
"\\int_0^R", "\\frac{dA}{dr}", "\\,dr",
"=", "A(R)"
)
ftc.shift(2*UP)
self.play(
ReplacementTransform(
self.area.get_part_by_tex("R").copy(),
ftc.get_part_by_tex("int")
),
self.pi_creature.change_mode, "plain"
)
self.wait()
self.play(
ReplacementTransform(
self.area.get_part_by_tex("Area").copy(),
ftc.get_part_by_tex("frac")
),
ReplacementTransform(
self.area.get_part_by_tex("R").copy(),
ftc.get_part_by_tex("\\,dr")
)
)
self.wait()
self.play(Write(VGroup(*ftc[-2:])))
self.wait(2)
class ApproximateOneRing(CircleScene, ReconfigurableScene):
CONFIG = {
"num_lines": 24,
"ring_index_proportion": 0.6,
"ring_shift_val": 6*RIGHT,
"ring_colors": [BLUE, GREEN_E],
"unwrapped_tip": 2*RIGHT+0.5*UP,
}
def setup(self):
CircleScene.setup(self)
ReconfigurableScene.setup(self)
def construct(self):
self.force_skipping()
self.write_radius_three()
self.try_to_understand_area()
self.slice_into_rings()
self.isolate_one_ring()
self.revert_to_original_skipping_status()
self.straighten_ring_out()
self.force_skipping()
self.approximate_as_rectangle()
def write_radius_three(self):
three = TexMobject("3")
three.move_to(self.radius_label)
self.look_at(self.circle)
self.play(Transform(
self.radius_label, three,
path_arc=np.pi
))
self.wait()
def try_to_understand_area(self):
line_sets = [
VGroup(*[
Line(
self.circle.point_from_proportion(alpha),
self.circle.point_from_proportion(func(alpha)),
)
for alpha in np.linspace(0, 1, self.num_lines)
])
for func in [
lambda alpha: 1-alpha,
lambda alpha: (0.5-alpha) % 1,
lambda alpha: (alpha + 0.4) % 1,
lambda alpha: (alpha + 0.5) % 1,
]
]
for lines in line_sets:
lines.set_stroke(BLACK, 2)
lines = line_sets[0]
self.play(
ShowCreation(
lines,
run_time=2,
lag_ratio=0.5
),
Animation(self.radius_group),
self.pi_creature.change_mode, "maybe"
)
self.wait(2)
for new_lines in line_sets[1:]:
self.play(
Transform(lines, new_lines),
Animation(self.radius_group)
)
self.wait()
self.wait()
self.play(FadeOut(lines), Animation(self.radius_group))
def slice_into_rings(self):
rings = self.get_rings()
rings.set_stroke(BLACK, 1)
self.play(
FadeIn(
rings,
lag_ratio=0.5,
run_time=3
),
Animation(self.radius_group),
self.pi_creature.change_mode, "pondering",
self.pi_creature.look_at, self.circle
)
self.wait(2)
for x in range(2):
self.play(
Rotate(rings, np.pi, in_place=True, run_time=2),
Animation(self.radius_group),
self.pi_creature.change_mode, "happy"
)
self.wait(2)
self.rings = rings
def isolate_one_ring(self):
rings = self.rings
index = int(self.ring_index_proportion*len(rings))
original_ring = rings[index]
ring = original_ring.copy()
radius = Line(ORIGIN, ring.R*RIGHT, color=WHITE)
radius.rotate(np.pi/4)
r_label = TexMobject("r")
r_label.next_to(radius.get_center(), UP+LEFT, SMALL_BUFF)
area_q = TextMobject("Area", "?", arg_separator="")
area_q.set_color(YELLOW)
self.play(
ring.shift, self.ring_shift_val,
original_ring.set_fill, None, 0.25,
Animation(self.radius_group),
)
VGroup(radius, r_label).shift(ring.get_center())
area_q.next_to(ring, RIGHT)
self.play(ShowCreation(radius))
self.play(Write(r_label))
self.wait()
self.play(Write(area_q))
self.wait()
self.play(*[
ApplyMethod(
r.set_fill, YELLOW,
rate_func=squish_rate_func(there_and_back, alpha, alpha+0.15),
run_time=3
)
for r, alpha in zip(rings, np.linspace(0, 0.85, len(rings)))
]+[
Animation(self.radius_group)
])
self.wait()
self.change_mode("thinking")
self.wait()
self.original_ring = original_ring
self.ring = ring
self.ring_radius_group = VGroup(radius, r_label)
self.area_q = area_q
def straighten_ring_out(self):
ring = self.ring.copy()
trapezoid = TextMobject("Trapezoid?")
rectangle_ish = TextMobject("Rectangle-ish")
for text in trapezoid, rectangle_ish:
text.next_to(
self.pi_creature.get_corner(UP+RIGHT),
DOWN+RIGHT, buff=MED_LARGE_BUFF
)
self.unwrap_ring(ring, to_edge=RIGHT)
self.change_mode("pondering")
self.wait()
self.play(Write(trapezoid))
self.wait()
self.play(trapezoid.shift, DOWN)
strike = Line(
trapezoid.get_left(), trapezoid.get_right(),
stroke_color=RED,
stroke_width=8
)
self.play(
Write(rectangle_ish),
ShowCreation(strike),
self.pi_creature.change_mode, "happy"
)
self.wait()
self.play(*list(map(FadeOut, [trapezoid, strike])))
self.unwrapped_ring = ring
def approximate_as_rectangle(self):
top_brace, side_brace = [
Brace(
self.unwrapped_ring, vect, buff=SMALL_BUFF,
min_num_quads=2,
)
for vect in (UP, LEFT)
]
top_brace.scale_in_place(self.ring.R/(self.ring.R+self.dR))
side_brace.set_stroke(WHITE, 0.5)
width_label = TexMobject("2\\pi", "r")
width_label.next_to(top_brace, UP, SMALL_BUFF)
dr_label = TexMobject("dr")
q_marks = TexMobject("???")
concrete_dr = TexMobject("=0.1")
concrete_dr.submobjects.reverse()
for mob in dr_label, q_marks, concrete_dr:
mob.next_to(side_brace, LEFT, SMALL_BUFF)
dr_label.save_state()
alt_side_brace = side_brace.copy()
alt_side_brace.move_to(ORIGIN, UP+RIGHT)
alt_side_brace.rotate(-np.pi/2)
alt_side_brace.shift(
self.original_ring.get_boundary_point(RIGHT)
)
alt_dr_label = dr_label.copy()
alt_dr_label.next_to(alt_side_brace, UP, SMALL_BUFF)
approx = TexMobject("\\approx")
approx.next_to(
self.area_q.get_part_by_tex("Area"),
RIGHT,
align_using_submobjects=True,
)
two_pi_r_dr = VGroup(width_label, dr_label).copy()
two_pi_r_dr.generate_target()
two_pi_r_dr.target.arrange(
RIGHT, buff=SMALL_BUFF, aligned_edge=DOWN
)
two_pi_r_dr.target.next_to(approx, RIGHT, aligned_edge=DOWN)
self.play(GrowFromCenter(top_brace))
self.play(
Write(width_label.get_part_by_tex("pi")),
ReplacementTransform(
self.ring_radius_group[1].copy(),
width_label.get_part_by_tex("r")
)
)
self.wait()
self.play(
GrowFromCenter(side_brace),
Write(q_marks)
)
self.change_mode("confused")
self.wait()
for num_rings in 20, 7:
self.show_alternate_width(num_rings)
self.play(ReplacementTransform(q_marks, dr_label))
self.play(
ReplacementTransform(side_brace.copy(), alt_side_brace),
ReplacementTransform(dr_label.copy(), alt_dr_label),
run_time=2
)
self.wait()
self.play(
dr_label.next_to, concrete_dr.copy(), LEFT, SMALL_BUFF, DOWN,
Write(concrete_dr, run_time=2),
self.pi_creature.change_mode, "pondering"
)
self.wait(2)
self.play(
MoveToTarget(two_pi_r_dr),
FadeIn(approx),
self.area_q.get_part_by_tex("?").fade, 1,
)
self.wait()
self.play(
FadeOut(concrete_dr),
dr_label.restore
)
self.show_alternate_width(
40,
transformation_kwargs={"run_time": 4},
return_to_original_configuration=False,
)
self.wait(2)
self.look_at(self.circle)
self.play(
ApplyWave(self.rings, amplitude=0.1),
Animation(self.radius_group),
Animation(alt_side_brace),
Animation(alt_dr_label),
run_time=3,
lag_ratio=0.5
)
self.wait(2)
def show_alternate_width(self, num_rings, **kwargs):
self.transition_to_alt_config(
dR=self.radius/num_rings, **kwargs
)
class MoveForwardWithApproximation(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"Move forward with \\\\",
"the", "approximation"
)
self.change_student_modes("hesitant", "erm", "sassy")
self.wait()
words = TextMobject(
"It gets better",
"\\\\ for smaller ",
"$dr$"
)
words.set_color_by_tex("dr", BLUE)
self.teacher_says(words, target_mode="shruggie")
self.wait(3)
class GraphRectangles(CircleScene, GraphScene):
CONFIG = {
"graph_origin": 3.25*LEFT+2.5*DOWN,
"x_min": 0,
"x_max": 4,
"x_axis_width": 7,
"x_labeled_nums": list(range(5)),
"x_axis_label": "$r$",
"y_min": 0,
"y_max": 20,
"y_tick_frequency": 2.5,
"y_labeled_nums": list(range(5, 25, 5)),
"y_axis_label": "",
"exclude_zero_label": False,
"num_rings_in_ring_sum_start": 3,
"tick_height": 0.2,
}
def setup(self):
CircleScene.setup(self)
GraphScene.setup(self)
self.setup_axes()
self.remove(self.axes)
# self.pi_creature.change_mode("pondering")
# self.pi_creature.look_at(self.circle)
# self.add(self.pi_creature)
three = TexMobject("3")
three.move_to(self.radius_label)
self.radius_label.save_state()
Transform(self.radius_label, three).update(1)
def construct(self):
self.draw_ring_sum()
self.draw_r_values()
self.unwrap_rings_onto_graph()
self.draw_graph()
self.point_out_approximation()
self.let_dr_approah_zero()
self.compute_area_under_graph()
self.show_circle_unwrapping()
def draw_ring_sum(self):
rings = self.get_rings()
rings.set_stroke(BLACK, 1)
ring_sum, draw_ring_sum_anims = self.get_ring_sum(rings)
area_label = TexMobject(
"\\text{Area}", "\\approx",
"2\\pi", "r", "\\,dr"
)
area_label.set_color_by_tex("r", YELLOW, substring=False)
area_label.next_to(ring_sum, RIGHT, aligned_edge=UP)
area = area_label.get_part_by_tex("Area")
arrow_start = area.get_corner(DOWN+LEFT)
arrows = VGroup(*[
Arrow(
arrow_start,
ring.target.get_boundary_point(
arrow_start - ring.target.get_center()
),
color=ring.get_color()
)
for ring in rings
if ring.target.get_fill_opacity() > 0
])
self.add(rings, self.radius_group)
self.remove(self.circle)
self.wait()
self.play(*draw_ring_sum_anims)
self.play(Write(area_label, run_time=2))
self.play(ShowCreation(arrows))
self.wait()
self.ring_sum = ring_sum
area_label.add(arrows)
self.area_label = area_label
self.rings = rings
def draw_r_values(self):
values_of_r = TextMobject("Values of ", "$r$")
values_of_r.set_color_by_tex("r", YELLOW)
values_of_r.next_to(
self.x_axis, UP,
buff=2*LARGE_BUFF,
aligned_edge=LEFT
)
r_ticks = VGroup(*[
Line(
self.coords_to_point(r, -self.tick_height),
self.coords_to_point(r, self.tick_height),
color=YELLOW
)
for r in np.arange(0, 3, 0.1)
])
arrows = VGroup(*[
Arrow(
values_of_r.get_part_by_tex("r").get_bottom(),
tick.get_top(),
buff=SMALL_BUFF,
color=YELLOW,
tip_length=0.15
)
for tick in (r_ticks[0], r_ticks[-1])
])
first_tick = r_ticks[0].copy()
moving_arrow = arrows[0].copy()
index = 2
dr_brace = Brace(
VGroup(*r_ticks[index:index+2]),
DOWN, buff=SMALL_BUFF
)
dr_label = TexMobject("dr")
dr_label.next_to(
dr_brace, DOWN,
buff=SMALL_BUFF,
aligned_edge=LEFT
)
dr_group = VGroup(dr_brace, dr_label)
self.play(
FadeIn(values_of_r),
FadeIn(self.x_axis),
)
self.play(
ShowCreation(moving_arrow),
ShowCreation(first_tick),
)
self.play(Indicate(self.rings[0]))
self.wait()
self.play(
Transform(moving_arrow, arrows[-1]),
ShowCreation(r_ticks, lag_ratio=0.5),
run_time=2
)
self.play(Indicate(self.rings[-1]))
self.wait()
self.play(FadeIn(dr_group))
self.wait()
self.play(*list(map(FadeOut, [moving_arrow, values_of_r])))
self.x_axis.add(r_ticks)
self.r_ticks = r_ticks
self.dr_group = dr_group
def unwrap_rings_onto_graph(self):
rings = self.rings
graph = self.get_graph(lambda r: 2*np.pi*r)
flat_graph = self.get_graph(lambda r: 0)
rects, flat_rects = [
self.get_riemann_rectangles(
g, x_min=0, x_max=3, dx=self.dR,
start_color=self.rings[0].get_fill_color(),
end_color=self.rings[-1].get_fill_color(),
)
for g in (graph, flat_graph)
]
self.graph, self.flat_rects = graph, flat_rects
transformed_rings = VGroup()
self.ghost_rings = VGroup()
for index, rect, r in zip(it.count(), rects, np.arange(0, 3, 0.1)):
proportion = float(index)/len(rects)
ring_index = int(len(rings)*proportion**0.6)
ring = rings[ring_index]
if ring in transformed_rings:
ring = ring.copy()
transformed_rings.add(ring)
if ring.get_fill_opacity() > 0:
ghost_ring = ring.copy()
ghost_ring.set_fill(opacity=0.25)
self.add(ghost_ring, ring)
self.ghost_rings.add(ghost_ring)
ring.rect = rect
n_anchors = ring.get_num_curves()
target = VMobject()
target.set_points_as_corners([
interpolate(ORIGIN, DOWN, a)
for a in np.linspace(0, 1, n_anchors/2)
]+[
interpolate(DOWN+RIGHT, RIGHT, a)
for a in np.linspace(0, 1, n_anchors/2)
])
target.replace(rect, stretch=True)
target.stretch_to_fit_height(2*np.pi*r)
target.move_to(rect, DOWN)
target.set_stroke(BLACK, 1)
target.set_fill(ring.get_fill_color(), 1)
ring.target = target
ring.original_ring = ring.copy()
foreground_animations = list(
map(Animation, [self.x_axis, self.area_label]))
example_ring = transformed_rings[2]
self.play(
MoveToTarget(
example_ring,
path_arc=-np.pi/2,
run_time=2
),
Animation(self.x_axis),
)
self.wait(2)
self.play(*[
MoveToTarget(
ring,
path_arc=-np.pi/2,
run_time=4,
rate_func=squish_rate_func(smooth, alpha, alpha+0.25)
)
for ring, alpha in zip(
transformed_rings,
np.linspace(0, 0.75, len(transformed_rings))
)
] + foreground_animations)
self.wait()
# Demonstrate height of one rect
highlighted_ring = transformed_rings[6].copy()
original_ring = transformed_rings[6].original_ring
original_ring.move_to(highlighted_ring, RIGHT)
original_ring.set_fill(opacity=1)
highlighted_ring.save_state()
side_brace = Brace(highlighted_ring, RIGHT)
height_label = side_brace.get_text("2\\pi", "r")
height_label.set_color_by_tex("r", YELLOW)
self.play(
transformed_rings.set_fill, None, 0.2,
Animation(highlighted_ring),
*foreground_animations
)
self.play(
self.dr_group.arrange, DOWN,
self.dr_group.next_to, highlighted_ring,
DOWN, SMALL_BUFF
)
self.wait()
self.play(
GrowFromCenter(side_brace),
Write(height_label)
)
self.wait()
self.play(Transform(highlighted_ring, original_ring))
self.wait()
self.play(highlighted_ring.restore)
self.wait()
self.play(
transformed_rings.set_fill, None, 1,
FadeOut(side_brace),
FadeOut(height_label),
*foreground_animations
)
self.remove(highlighted_ring)
self.wait()
# Rescale
self.play(*[
ApplyMethod(
ring.replace, ring.rect,
method_kwargs={"stretch": True}
)
for ring in transformed_rings
] + [
Write(self.y_axis),
FadeOut(self.area_label),
] + foreground_animations)
self.remove(transformed_rings)
self.add(rects)
self.wait()
self.rects = rects
def draw_graph(self):
graph_label = self.get_graph_label(
self.graph, "2\\pi r",
direction=UP+LEFT,
x_val=2.5,
buff=SMALL_BUFF
)
self.play(ShowCreation(self.graph))
self.play(Write(graph_label))
self.wait()
self.play(*[
Transform(
rect, flat_rect,
run_time=2,
rate_func=squish_rate_func(
lambda t: 0.1*there_and_back(t),
alpha, alpha+0.5
),
lag_ratio=0.5
)
for rect, flat_rect, alpha in zip(
self.rects, self.flat_rects,
np.linspace(0, 0.5, len(self.rects))
)
] + list(map(Animation, [self.x_axis, self.graph]))
)
self.wait(2)
def point_out_approximation(self):
rect = self.rects[10]
rect.generate_target()
rect.save_state()
approximation = TextMobject("= Approximation")
approximation.scale(0.8)
group = VGroup(rect.target, approximation)
group.arrange(RIGHT)
group.to_edge(RIGHT)
self.play(
MoveToTarget(rect),
Write(approximation),
)
self.wait(2)
self.play(
rect.restore,
FadeOut(approximation)
)
self.wait()
def let_dr_approah_zero(self):
thinner_rects_list = [
self.get_riemann_rectangles(
self.graph,
x_min=0,
x_max=3,
dx=1./(10*2**n),
stroke_width=1./(2**n),
start_color=self.rects[0].get_fill_color(),
end_color=self.rects[-1].get_fill_color(),
)
for n in range(1, 5)
]
self.play(*list(map(FadeOut, [self.r_ticks, self.dr_group])))
self.x_axis.remove(self.r_ticks, *self.r_ticks)
for new_rects in thinner_rects_list:
self.play(
Transform(
self.rects, new_rects,
lag_ratio=0.5,
run_time=2
),
Animation(self.axes),
Animation(self.graph),
)
self.wait()
self.play(ApplyWave(
self.rects,
direction=RIGHT,
run_time=2,
lag_ratio=0.5,
))
self.wait()
def compute_area_under_graph(self):
formula, formula_with_R = formulas = [
self.get_area_formula(R)
for R in ("3", "R")
]
for mob in formulas:
mob.to_corner(UP+RIGHT, buff=MED_SMALL_BUFF)
brace = Brace(self.rects, RIGHT)
height_label = brace.get_text("$2\\pi \\cdot 3$")
height_label_with_R = brace.get_text("$2\\pi \\cdot R$")
base_line = Line(
self.coords_to_point(0, 0),
self.coords_to_point(3, 0),
color=YELLOW
)
fresh_rings = self.get_rings(dR=0.025)
fresh_rings.set_stroke(width=0)
self.radius_label.restore()
VGroup(
fresh_rings, self.radius_group
).to_corner(UP+LEFT, buff=SMALL_BUFF)
self.play(Write(formula.top_line, run_time=2))
self.play(FocusOn(base_line))
self.play(ShowCreation(base_line))
self.wait()
self.play(
GrowFromCenter(brace),
Write(height_label)
)
self.wait()
self.play(FocusOn(formula))
self.play(Write(formula.mid_line))
self.wait()
self.play(Write(formula.bottom_line))
self.wait(2)
self.play(*list(map(FadeOut, [
self.ghost_rings,
self.ring_sum.tex_mobs
])))
self.play(*list(map(FadeIn, [fresh_rings, self.radius_group])))
self.wait()
self.play(
Transform(formula, formula_with_R),
Transform(height_label, height_label_with_R),
)
self.wait(2)
self.fresh_rings = fresh_rings
def show_circle_unwrapping(self):
rings = self.fresh_rings
rings.rotate_in_place(np.pi)
rings.submobjects.reverse()
ghost_rings = rings.copy()
ghost_rings.set_fill(opacity=0.25)
self.add(ghost_rings, rings, self.radius_group)
unwrapped = VGroup(*[
self.get_unwrapped(ring, to_edge=None)
for ring in rings
])
unwrapped.stretch_to_fit_height(1)
unwrapped.stretch_to_fit_width(2)
unwrapped.move_to(ORIGIN, DOWN)
unwrapped.apply_function(
lambda p: np.dot(p,
np.array([[1, 0, 0], [-1, 1, 0], [0, 0, 1]])
),
maintain_smoothness=False
)
unwrapped.rotate(np.pi/2)
unwrapped.replace(self.rects, stretch=True)
self.play(self.rects.fade, 0.8)
self.play(
Transform(
rings, unwrapped,
run_time=5,
lag_ratio=0.5,
),
Animation(self.radius_group)
)
self.wait()
#####
def get_ring_sum(self, rings):
arranged_group = VGroup()
tex_mobs = VGroup()
for ring in rings:
ring.generate_target()
ring.target.set_stroke(width=0)
for ring in rings[:self.num_rings_in_ring_sum_start]:
plus = TexMobject("+")
arranged_group.add(ring.target)
arranged_group.add(plus)
tex_mobs.add(plus)
dots = TexMobject("\\vdots")
plus = TexMobject("+")
arranged_group.add(dots, plus)
tex_mobs.add(dots, plus)
last_ring = rings[-1]
arranged_group.add(last_ring.target)
arranged_group.arrange(DOWN, buff=SMALL_BUFF)
arranged_group.set_height(FRAME_HEIGHT-1)
arranged_group.to_corner(DOWN+LEFT, buff=MED_SMALL_BUFF)
for mob in tex_mobs:
mob.scale_in_place(0.7)
middle_rings = rings[self.num_rings_in_ring_sum_start:-1]
alphas = np.linspace(0, 1, len(middle_rings))
for ring, alpha in zip(middle_rings, alphas):
ring.target.set_fill(opacity=0)
ring.target.move_to(interpolate(
dots.get_left(), last_ring.target.get_center(), alpha
))
draw_ring_sum_anims = [Write(tex_mobs)]
draw_ring_sum_anims += [
MoveToTarget(
ring,
run_time=3,
path_arc=-np.pi/3,
rate_func=squish_rate_func(smooth, alpha, alpha+0.8)
)
for ring, alpha in zip(rings, np.linspace(0, 0.2, len(rings)))
]
draw_ring_sum_anims.append(FadeOut(self.radius_group))
ring_sum = VGroup(rings, tex_mobs)
ring_sum.rings = VGroup(*[r.target for r in rings])
ring_sum.tex_mobs = tex_mobs
return ring_sum, draw_ring_sum_anims
def get_area_formula(self, R):
formula = TexMobject(
"\\text{Area}", "&= \\frac{1}{2}", "b", "h",
"\\\\ &=", "\\frac{1}{2}", "(%s)" % R, "(2\\pi \\cdot %s)" % R,
"\\\\ &=", "\\pi ", "%s" % R, "^2"
)
formula.set_color_by_tex("b", GREEN, substring=False)
formula.set_color_by_tex("h", RED, substring=False)
formula.set_color_by_tex("%s" % R, GREEN)
formula.set_color_by_tex("(2\\pi ", RED)
formula.set_color_by_tex("(2\\pi ", RED)
formula.scale(0.8)
formula.top_line = VGroup(*formula[:4])
formula.mid_line = VGroup(*formula[4:8])
formula.bottom_line = VGroup(*formula[8:])
return formula
class ThinkLikeAMathematician(TeacherStudentsScene):
def construct(self):
pi_R_squraed = TexMobject("\\pi", "R", "^2")
pi_R_squraed.set_color_by_tex("R", YELLOW)
pi_R_squraed.move_to(self.get_students(), UP)
pi_R_squraed.set_fill(opacity=0)
self.play(
pi_R_squraed.shift, 2*UP,
pi_R_squraed.set_fill, None, 1
)
self.change_student_modes(*["hooray"]*3)
self.wait(2)
self.change_student_modes(
*["pondering"]*3,
look_at_arg=self.teacher.eyes,
added_anims=[PiCreatureSays(
self.teacher, "But why did \\\\ that work?"
)]
)
self.play(FadeOut(pi_R_squraed))
self.look_at(2*UP+4*LEFT)
self.wait(5)
class TwoThingsToNotice(TeacherStudentsScene):
def construct(self):
words = TextMobject(
"Two things to \\\\ note about",
"$dr$",
)
words.set_color_by_tex("dr", GREEN)
self.teacher_says(words, run_time=1)
self.wait(3)
class RecapCircleSolution(GraphRectangles, ReconfigurableScene):
def setup(self):
GraphRectangles.setup(self)
ReconfigurableScene.setup(self)
def construct(self):
self.break_up_circle()
self.show_sum()
self.dr_indicates_spacing()
self.smaller_dr()
self.show_riemann_sum()
self.limiting_riemann_sum()
self.full_precision()
def break_up_circle(self):
self.remove(self.circle)
rings = self.get_rings()
rings.set_stroke(BLACK, 1)
ring_sum, draw_ring_sum_anims = self.get_ring_sum(rings)
hard_problem = TextMobject("Hard problem")
down_arrow = TexMobject("\\Downarrow")
sum_words = TextMobject("Sum of many \\\\ small values")
integral_condition = VGroup(hard_problem, down_arrow, sum_words)
integral_condition.arrange(DOWN)
integral_condition.scale(0.8)
integral_condition.to_corner(UP+RIGHT)
self.add(rings, self.radius_group)
self.play(FadeIn(
integral_condition,
lag_ratio=0.5
))
self.wait()
self.play(*draw_ring_sum_anims)
self.rings = rings
self.integral_condition = integral_condition
def show_sum(self):
visible_rings = [
ring for ring in self.rings if ring.get_fill_opacity() > 0]
radii = self.dR*np.arange(len(visible_rings))
radii[-1] = 3-self.dR
radial_lines = VGroup()
for ring in visible_rings:
radius_line = Line(ORIGIN, ring.R*RIGHT, color=YELLOW)
radius_line.rotate(np.pi/4)
radius_line.shift(ring.get_center())
radial_lines.add(radius_line)
approximations = VGroup()
for ring, radius in zip(visible_rings, radii):
label = TexMobject(
"\\approx", "2\\pi",
"(%s)" % str(radius), "(%s)" % str(self.dR)
)
label[2].set_color(YELLOW)
label[3].set_color(GREEN)
label.scale(0.75)
label.next_to(ring, RIGHT)
approximations.add(label)
approximations[-1].shift(UP+0.5*LEFT)
area_label = TexMobject("2\\pi", "r", "\\, dr")
area_label.set_color_by_tex("r", YELLOW)
area_label.set_color_by_tex("dr", GREEN)
area_label.next_to(approximations, RIGHT, buff=2*LARGE_BUFF)
arrows = VGroup(*[
Arrow(
area_label.get_left(),
approximation.get_right(),
color=WHITE
)
for approximation in approximations
])
self.play(Write(area_label))
self.play(
ShowCreation(arrows, lag_ratio=0),
FadeIn(radial_lines),
*[
ReplacementTransform(
area_label.copy(),
VGroup(*approximation[1:])
)
for approximation in approximations
]
)
self.wait()
self.play(Write(VGroup(*[
approximation[0]
for approximation in approximations
])))
self.wait()
self.area_label = area_label
self.area_arrows = arrows
self.approximations = approximations
def dr_indicates_spacing(self):
r_ticks = VGroup(*[
Line(
self.coords_to_point(r, -self.tick_height),
self.coords_to_point(r, self.tick_height),
color=YELLOW
)
for r in np.arange(0, 3, self.dR)
])
index = int(0.75*len(r_ticks))
brace_ticks = VGroup(*r_ticks[index:index+2])
dr_brace = Brace(brace_ticks, UP, buff=SMALL_BUFF)
dr = self.area_label.get_part_by_tex("dr")
dr_copy = dr.copy()
circle = Circle().replace(dr)
circle.scale_in_place(1.3)
dr_num = self.approximations[0][-1]
self.play(ShowCreation(circle))
self.play(FadeOut(circle))
self.play(ReplacementTransform(
dr.copy(), dr_num,
run_time=2,
path_arc=np.pi/2,
))
self.wait()
self.play(FadeIn(self.x_axis))
self.play(Write(r_ticks, run_time=1))
self.wait()
self.play(
GrowFromCenter(dr_brace),
dr_copy.next_to, dr_brace.copy(), UP
)
self.wait()
self.r_ticks = r_ticks
self.dr_brace_group = VGroup(dr_brace, dr_copy)
def smaller_dr(self):
self.transition_to_alt_config(dR=0.05)
def show_riemann_sum(self):
graph = self.get_graph(lambda r: 2*np.pi*r)
graph_label = self.get_graph_label(
graph, "2\\pi r",
x_val=2.5,
direction=UP+LEFT
)
rects = self.get_riemann_rectangles(
graph,
x_min=0,
x_max=3,
dx=self.dR
)
self.play(
Write(self.y_axis, run_time=2),
*list(map(FadeOut, [
self.approximations,
self.area_label,
self.area_arrows,
self.dr_brace_group,
self.r_ticks,
]))
)
self.play(
ReplacementTransform(
self.rings.copy(), rects,
run_time=2,
lag_ratio=0.5
),
Animation(self.x_axis),
)
self.play(ShowCreation(graph))
self.play(Write(graph_label))
self.wait()
self.graph = graph
self.graph_label = graph_label
self.rects = rects
def limiting_riemann_sum(self):
thinner_rects_list = [
self.get_riemann_rectangles(
self.graph,
x_min=0,
x_max=3,
dx=1./(10*2**n),
stroke_width=1./(2**n),
start_color=self.rects[0].get_fill_color(),
end_color=self.rects[-1].get_fill_color(),
)
for n in range(1, 4)
]
for new_rects in thinner_rects_list:
self.play(
Transform(
self.rects, new_rects,
lag_ratio=0.5,
run_time=2
),
Animation(self.axes),
Animation(self.graph),
)
self.wait()
def full_precision(self):
words = TextMobject("Area under \\\\ a graph")
group = VGroup(TexMobject("\\Downarrow"), words)
group.arrange(DOWN)
group.set_color(YELLOW)
group.scale(0.8)
group.next_to(self.integral_condition, DOWN)
arc = Arc(start_angle=2*np.pi/3, angle=2*np.pi/3)
arc.scale(2)
arc.add_tip()
arc.add(arc[1].copy().rotate(np.pi, RIGHT))
arc_next_to_group = VGroup(
self.integral_condition[0][0],
words[0]
)
arc.set_height(
arc_next_to_group.get_height()-MED_LARGE_BUFF
)
arc.next_to(arc_next_to_group, LEFT, SMALL_BUFF)
self.play(Write(group))
self.wait()
self.play(ShowCreation(arc))
self.wait()
class ExampleIntegralProblems(PiCreatureScene, GraphScene):
CONFIG = {
"dt": 0.2,
"t_max": 7,
"x_max": 8,
"y_axis_height": 5.5,
"x_axis_label": "$t$",
"y_axis_label": "",
"graph_origin": 3*DOWN + 4.5*LEFT
}
def construct(self):
self.write_integral_condition()
self.show_car()
self.show_graph()
self.let_dt_approach_zero()
self.show_confusion()
def write_integral_condition(self):
words = TextMobject(
"Hard problem $\\Rightarrow$ Sum of many small values"
)
words.to_edge(UP)
self.play(
Write(words),
self.pi_creature.change_mode, "raise_right_hand"
)
self.wait()
self.words = words
def show_car(self):
car = Car()
start, end = 3*LEFT+UP, 5*RIGHT+UP
car.move_to(start)
line = Line(start, end)
tick_height = MED_SMALL_BUFF
ticks = VGroup(*[
Line(
p+tick_height*UP/2,
p+tick_height*DOWN/2,
color=YELLOW,
stroke_width=2
)
for t in np.arange(0, self.t_max, self.dt)
for p in [
line.point_from_proportion(smooth(t/self.t_max))
]
])
index = int(len(ticks)/2)
brace_ticks = VGroup(*ticks[index:index+2])
brace = Brace(brace_ticks, UP)
v_dt = TexMobject("v(t)", "dt")
v_dt.next_to(brace, UP, SMALL_BUFF)
v_dt.set_color(YELLOW)
v_dt_brace_group = VGroup(brace, v_dt)
self.play(
FadeIn(car),
self.pi_creature.change_mode, "plain"
)
self.play(
MoveCar(car, end),
FadeIn(
ticks,
lag_ratio=1,
rate_func=linear,
),
ShowCreation(line),
FadeIn(
v_dt_brace_group,
rate_func=squish_rate_func(smooth, 0.6, 0.8)
),
run_time=self.t_max
)
self.wait()
for mob in v_dt:
self.play(Indicate(mob))
self.wait(2)
self.v_dt_brace_group = v_dt_brace_group
self.line = line
self.ticks = ticks
self.car = car
def show_graph(self):
self.setup_axes()
self.remove(self.axes)
s_graph = self.get_graph(
lambda t: 1.8*self.y_max*smooth(t/self.t_max)
)
v_graph = self.get_derivative_graph(s_graph)
rects = self.get_riemann_rectangles(
v_graph,
x_min=0,
x_max=self.t_max,
dx=self.dt
)
rects.set_fill(opacity=0.5)
pre_rects = rects.copy()
pre_rects.rotate(-np.pi/2)
for index, pre_rect in enumerate(pre_rects):
ti1 = len(self.ticks)*index/len(pre_rects)
ti2 = min(ti1+1, len(self.ticks)-1)
tick_pair = VGroup(self.ticks[ti1], self.ticks[ti2])
pre_rect.stretch_to_fit_width(tick_pair.get_width())
pre_rect.move_to(tick_pair)
special_rect = rects[int(0.6*len(rects))]
brace = Brace(special_rect, LEFT, buff=0)
v_dt_brace_group_copy = self.v_dt_brace_group.copy()
start_brace, (v_t, dt) = v_dt_brace_group_copy
self.play(
FadeIn(
pre_rects,
run_time=2,
lag_ratio=0.5
),
Animation(self.ticks)
)
self.play(
ReplacementTransform(
pre_rects, rects,
run_time=3,
lag_ratio=0.5
),
Animation(self.ticks),
Write(self.axes, run_time=1)
)
self.play(ShowCreation(v_graph))
self.change_mode("pondering")
self.wait()
self.play(
v_t.next_to, brace, LEFT, SMALL_BUFF,
dt.next_to, special_rect, DOWN,
special_rect.set_fill, None, 1,
ReplacementTransform(start_brace, brace),
)
self.wait(3)
self.v_graph = v_graph
self.rects = rects
self.v_dt_brace_group_copy = v_dt_brace_group_copy
def let_dt_approach_zero(self):
thinner_rects_list = [
self.get_riemann_rectangles(
self.v_graph,
x_min=0,
x_max=self.t_max,
dx=self.dt/(2**n),
stroke_width=1./(2**n)
)
for n in range(1, 4)
]
self.play(
self.rects.set_fill, None, 1,
Animation(self.x_axis),
FadeOut(self.v_dt_brace_group_copy),
)
self.change_mode("thinking")
self.wait()
for thinner_rects in thinner_rects_list:
self.play(
Transform(
self.rects, thinner_rects,
run_time=2,
lag_ratio=0.5
)
)
self.wait()
def show_confusion(self):
randy = Randolph(color=BLUE_C)
randy.to_corner(DOWN+LEFT)
randy.to_edge(LEFT, buff=MED_SMALL_BUFF)
self.play(FadeIn(randy))
self.play(
randy.change_mode, "confused",
randy.look_at, self.rects
)
self.play(
self.pi_creature.change_mode, "confused",
self.pi_creature.look_at, randy.eyes
)
self.play(Blink(randy))
self.wait()
class MathematicianPonderingAreaUnderDifferentCurves(PiCreatureScene):
def construct(self):
self.play(
self.pi_creature.change_mode, "raise_left_hand",
self.pi_creature.look, UP+LEFT
)
self.wait(4)
self.play(
self.pi_creature.change_mode, "raise_right_hand",
self.pi_creature.look, UP+RIGHT
)
self.wait(4)
self.play(
self.pi_creature.change_mode, "pondering",
self.pi_creature.look, UP+LEFT
)
self.wait(2)
def create_pi_creature(self):
self.pi_creature = Randolph(color=BLUE_C)
self.pi_creature.to_edge(DOWN)
return self.pi_creature
class AreaUnderParabola(GraphScene):
CONFIG = {
"x_max": 4,
"x_labeled_nums": list(range(-1, 5)),
"y_min": 0,
"y_max": 15,
"y_tick_frequency": 2.5,
"y_labeled_nums": list(range(5, 20, 5)),
"n_rect_iterations": 6,
"default_right_x": 3,
"func": lambda x: x**2,
"graph_label_tex": "x^2",
"graph_label_x_val": 3.8,
}
def construct(self):
self.setup_axes()
self.show_graph()
self.show_area()
self.ask_about_area()
self.show_confusion()
self.show_variable_endpoint()
self.name_integral()
def show_graph(self):
graph = self.get_graph(self.func)
graph_label = self.get_graph_label(
graph, self.graph_label_tex,
direction=LEFT,
x_val=self.graph_label_x_val,
)
self.play(ShowCreation(graph))
self.play(Write(graph_label))
self.wait()
self.graph = graph
self.graph_label = graph_label
def show_area(self):
dx_list = [0.25/(2**n) for n in range(self.n_rect_iterations)]
rect_lists = [
self.get_riemann_rectangles(
self.graph,
x_min=0,
x_max=self.default_right_x,
dx=dx,
stroke_width=4*dx,
)
for dx in dx_list
]
rects = rect_lists[0]
foreground_mobjects = [self.axes, self.graph]
self.play(
DrawBorderThenFill(
rects,
run_time=2,
rate_func=smooth,
lag_ratio=0.5,
),
*list(map(Animation, foreground_mobjects))
)
self.wait()
for new_rects in rect_lists[1:]:
self.play(
Transform(
rects, new_rects,
lag_ratio=0.5,
),
*list(map(Animation, foreground_mobjects))
)
self.wait()
self.rects = rects
self.dx = dx_list[-1]
self.foreground_mobjects = foreground_mobjects
def ask_about_area(self):
rects = self.rects
question = TextMobject("Area?")
question.move_to(rects.get_top(), DOWN)
mid_rect = rects[2*len(rects)/3]
arrow = Arrow(question.get_bottom(), mid_rect.get_center())
v_lines = VGroup(*[
DashedLine(
FRAME_HEIGHT*UP, ORIGIN,
color=RED
).move_to(self.coords_to_point(x, 0), DOWN)
for x in (0, self.default_right_x)
])
self.play(
Write(question),
ShowCreation(arrow)
)
self.wait()
self.play(ShowCreation(v_lines, run_time=2))
self.wait()
self.foreground_mobjects += [question, arrow]
self.question = question
self.question_arrow = arrow
self.v_lines = v_lines
def show_confusion(self):
morty = Mortimer()
morty.to_corner(DOWN+RIGHT)
self.play(FadeIn(morty))
self.play(
morty.change_mode, "confused",
morty.look_at, self.question,
)
self.play(morty.look_at, self.rects.get_bottom())
self.play(Blink(morty))
self.play(morty.look_at, self.question)
self.wait()
self.play(Blink(morty))
self.play(FadeOut(morty))
def show_variable_endpoint(self):
triangle = RegularPolygon(
n=3,
start_angle=np.pi/2,
stroke_width=0,
fill_color=WHITE,
fill_opacity=1,
)
triangle.set_height(0.25)
triangle.move_to(self.v_lines[1].get_bottom(), UP)
x_label = TexMobject("x")
x_label.next_to(triangle, DOWN)
self.right_point_slider = VGroup(triangle, x_label)
A_func = TexMobject("A(x)")
A_func.move_to(self.question, DOWN)
self.play(FadeOut(self.x_axis.numbers))
self.x_axis.remove(*self.x_axis.numbers)
self.foreground_mobjects.remove(self.axes)
self.play(DrawBorderThenFill(self.right_point_slider))
self.move_right_point_to(2)
self.wait()
self.move_right_point_to(self.default_right_x)
self.wait()
self.play(ReplacementTransform(self.question, A_func))
self.wait()
self.A_func = A_func
def name_integral(self):
f_tex = "$%s$" % self.graph_label_tex
words = TextMobject("``Integral'' of ", f_tex)
words.set_color_by_tex(f_tex, self.graph_label.get_color())
brace = Brace(self.A_func, UP)
words.next_to(brace, UP)
self.play(
Write(words),
GrowFromCenter(brace)
)
self.wait()
for x in 4, 2, self.default_right_x:
self.move_right_point_to(x, run_time=2)
self.integral_words_group = VGroup(brace, words)
####
def move_right_point_to(self, target_x, **kwargs):
v_line = self.v_lines[1]
slider = self.right_point_slider
rects = self.rects
curr_x = self.x_axis.point_to_number(v_line.get_bottom())
group = VGroup(rects, v_line, slider)
def update_group(group, alpha):
rects, v_line, slider = group
new_x = interpolate(curr_x, target_x, alpha)
new_rects = self.get_riemann_rectangles(
self.graph,
x_min=0,
x_max=new_x,
dx=self.dx*new_x/3.0,
stroke_width=rects[0].get_stroke_width(),
)
point = self.coords_to_point(new_x, 0)
v_line.move_to(point, DOWN)
slider.move_to(point, UP)
Transform(rects, new_rects).update(1)
return VGroup(rects, v_line, slider)
self.play(
UpdateFromAlphaFunc(
group, update_group,
**kwargs
),
*list(map(Animation, self.foreground_mobjects))
)
class WhoCaresAboutArea(TeacherStudentsScene):
def construct(self):
point = 2*RIGHT+3*UP
self.student_says(
"Who cares!?!", target_mode="angry",
)
self.play(self.teacher.change_mode, "guilty")
self.wait()
self.play(
RemovePiCreatureBubble(self.students[1]),
self.teacher.change_mode, "raise_right_hand",
self.teacher.look_at, point
)
self.change_student_modes(
*["pondering"]*3,
look_at_arg=point,
added_anims=[self.teacher.look_at, point]
)
self.wait(3)
class PlayWithThisIdea(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"Play with", "the", "thought!",
target_mode="hooray"
)
self.change_student_modes(*["happy"]*3)
self.wait()
equation = TexMobject("A(x)", "\\leftrightarrow", "x^2")
equation.set_color_by_tex("x^2", BLUE)
self.teacher_says(equation, target_mode="sassy")
self.change_student_modes(*["thinking"]*3)
self.wait(2)
class PlayingTowardsDADX(AreaUnderParabola, ReconfigurableScene):
CONFIG = {
"n_rect_iterations": 6,
"deriv_dx": 0.2,
"graph_origin": 2.5*DOWN + 6*LEFT,
}
def setup(self):
AreaUnderParabola.setup(self)
ReconfigurableScene.setup(self)
def construct(self):
self.fast_forward_to_end_of_previous_scene()
self.nudge_x()
self.describe_sliver()
self.shrink_dx()
self.write_dA_dx()
self.dA_remains_a_mystery()
self.write_example_inputs()
self.show_dA_dx_in_detail()
self.show_smaller_x()
def fast_forward_to_end_of_previous_scene(self):
self.force_skipping()
AreaUnderParabola.construct(self)
self.revert_to_original_skipping_status()
def nudge_x(self):
shadow_rects = self.rects.copy()
shadow_rects.set_fill(BLACK, opacity=0.5)
original_v_line = self.v_lines[1].copy()
right_v_lines = VGroup(original_v_line, self.v_lines[1])
curr_x = self.x_axis.point_to_number(original_v_line.get_bottom())
self.add(original_v_line)
self.foreground_mobjects.append(original_v_line)
self.move_right_point_to(curr_x + self.deriv_dx)
self.play(
FadeIn(shadow_rects),
*list(map(Animation, self.foreground_mobjects))
)
self.shadow_rects = shadow_rects
self.right_v_lines = right_v_lines
def describe_sliver(self):
dx_brace = Brace(self.right_v_lines, DOWN, buff=0)
dx_label = dx_brace.get_text("$dx$")
dx_group = VGroup(dx_brace, dx_label)
dA_rect = Rectangle(
width=self.right_v_lines.get_width(),
height=self.shadow_rects[-1].get_height(),
stroke_width=0,
fill_color=YELLOW,
fill_opacity=0.5,
).move_to(self.right_v_lines, DOWN)
dA_label = TexMobject("d", "A")
dA_label.next_to(dA_rect, RIGHT, MED_LARGE_BUFF, UP)
dA_label.set_color(GREEN)
dA_arrow = Arrow(
dA_label.get_bottom()+MED_SMALL_BUFF*DOWN,
dA_rect.get_center(),
buff=0,
color=WHITE
)
difference_in_area = TextMobject(
"d", "ifference in ", "A", "rea",
arg_separator=""
)
difference_in_area.set_color_by_tex("d", GREEN)
difference_in_area.set_color_by_tex("A", GREEN)
difference_in_area.scale(0.7)
difference_in_area.next_to(dA_label, UP, MED_SMALL_BUFF, LEFT)
side_brace = Brace(dA_rect, LEFT, buff=0)
graph_label_copy = self.graph_label.copy()
self.play(
FadeOut(self.right_point_slider),
FadeIn(dx_group)
)
self.play(Indicate(dx_label))
self.wait()
self.play(ShowCreation(dA_arrow))
self.wait()
self.play(Write(dA_label, run_time=2))
self.wait()
self.play(
ReplacementTransform(dA_label[0].copy(), difference_in_area[0]),
ReplacementTransform(dA_label[1].copy(), difference_in_area[2]),
*list(map(FadeIn, [difference_in_area[1], difference_in_area[3]]))
)
self.wait(2)
self.play(FadeIn(dA_rect), Animation(dA_arrow))
self.play(GrowFromCenter(side_brace))
self.play(
graph_label_copy.set_color, WHITE,
graph_label_copy.next_to, side_brace, LEFT, SMALL_BUFF
)
self.wait()
self.play(Indicate(dx_group))
self.wait()
self.play(FadeOut(difference_in_area))
self.dx_group = dx_group
self.dA_rect = dA_rect
self.dA_label = dA_label
self.graph_label_copy = graph_label_copy
def shrink_dx(self, **kwargs):
self.transition_to_alt_config(
deriv_dx=0.05,
transformation_kwargs={"run_time": 2},
**kwargs
)
def write_dA_dx(self):
f_tex = self.graph_label_tex
equation = TexMobject("dA", "\\approx", f_tex, "dx")
equation.to_edge(RIGHT).shift(3*UP)
deriv_equation = TexMobject(
"{dA", "\\over \\,", "dx}", "\\approx", f_tex
)
deriv_equation.move_to(equation, UP+LEFT)
for tex_mob in equation, deriv_equation:
tex_mob.set_color_by_tex(
"dA", self.dA_label.get_color()
)
dA = VGroup(self.dA_label[0][0], self.dA_label[1][0])
x_squared = self.graph_label_copy
dx = self.dx_group[1]
self.play(*[
ReplacementTransform(
mob.copy(),
equation.get_part_by_tex(tex),
run_time=2
)
for mob, tex in [(x_squared, f_tex), (dx, "dx"), (dA, "dA")]
])
self.play(Write(equation.get_part_by_tex("approx")))
self.wait()
for tex, mob in (f_tex, x_squared), ("dx", dx):
self.play(*list(map(Indicate, [
equation.get_part_by_tex(tex),
mob
])))
self.wait(2)
self.play(*[
ReplacementTransform(
equation.get_part_by_tex(tex),
deriv_equation.get_part_by_tex(tex),
run_time=2,
)
for tex in ("dA", "approx", f_tex, "dx")
] + [
Write(deriv_equation.get_part_by_tex("over"))
])
self.wait(2)
self.shrink_dx(return_to_original_configuration=False)
self.wait()
self.deriv_equation = deriv_equation
def dA_remains_a_mystery(self):
randy = Randolph(color=BLUE_C)
randy.to_corner(DOWN+LEFT)
randy.look_at(self.A_func)
A_circle, dA_circle = [
Circle(color=color).replace(
mob, stretch=True
).scale_in_place(1.5)
for mob, color in [(self.A_func, RED), (self.deriv_equation, GREEN)]
]
q_marks = TexMobject("???")
q_marks.next_to(A_circle, UP)
self.play(
FadeOut(self.integral_words_group),
FadeIn(randy)
)
self.play(
ShowCreation(A_circle),
randy.change_mode, "confused"
)
self.play(Write(q_marks, run_time=2))
self.play(Blink(randy))
self.wait()
self.play(
randy.change_mode, "surprised",
randy.look_at, dA_circle,
ReplacementTransform(A_circle, dA_circle)
)
self.play(Blink(randy))
self.wait()
self.play(*list(map(FadeOut, [randy, q_marks, dA_circle])))
def write_example_inputs(self):
d = self.default_right_x
three = TexMobject("x =", "%d" % d)
three_plus_dx = TexMobject("x = ", "%d.001" % d)
labels_lines_vects = list(zip(
[three, three_plus_dx],
self.right_v_lines,
[LEFT, RIGHT]
))
for label, line, vect in labels_lines_vects:
point = line.get_bottom()
label.next_to(point, DOWN+vect, MED_SMALL_BUFF)
label.shift(LARGE_BUFF*vect)
label.arrow = Arrow(
label, point,
buff=SMALL_BUFF,
color=WHITE,
tip_length=0.15
)
line_copy = line.copy()
line_copy.set_color(YELLOW)
self.play(
FadeIn(label),
FadeIn(label.arrow),
ShowCreation(line_copy)
)
self.play(FadeOut(line_copy))
self.wait()
self.three = three
self.three_plus_dx = three_plus_dx
def show_dA_dx_in_detail(self):
d = self.default_right_x
expression = TexMobject(
"{A(", "%d.001" % d, ") ", "-A(", "%d" % d, ")",
"\\over \\,", "0.001}",
"\\approx", "%d" % d, "^2"
)
expression.scale(0.9)
expression.next_to(
self.deriv_equation, DOWN, MED_LARGE_BUFF
)
expression.to_edge(RIGHT)
self.play(
ReplacementTransform(
self.three_plus_dx.get_part_by_tex("%d.001" % d).copy(),
expression.get_part_by_tex("%d.001" % d)
),
Write(VGroup(
expression.get_part_by_tex("A("),
expression.get_part_by_tex(")"),
)),
)
self.wait()
self.play(
ReplacementTransform(
self.three.get_part_by_tex("%d" % d).copy(),
expression.get_part_by_tex("%d" % d, substring=False)
),
Write(VGroup(
expression.get_part_by_tex("-A("),
expression.get_parts_by_tex(")")[1],
)),
)
self.wait(2)
self.play(
Write(expression.get_part_by_tex("over")),
ReplacementTransform(
expression.get_part_by_tex("%d.001" % d).copy(),
expression.get_part_by_tex("0.001"),
)
)
self.wait()
self.play(
Write(expression.get_part_by_tex("approx")),
ReplacementTransform(
self.graph_label_copy.copy(),
VGroup(*expression[-2:]),
run_time=2
)
)
self.wait()
def show_smaller_x(self):
self.transition_to_alt_config(
default_right_x=2,
deriv_dx=0.04,
transformation_kwargs={"run_time": 2}
)
class AlternateAreaUnderCurve(PlayingTowardsDADX):
CONFIG = {
"func": lambda x: (x-2)**3 - 3*(x-2) + 6,
"graph_label_tex": "f(x)",
"deriv_dx": 0.1,
"x_max": 5,
"x_axis_width": 11,
"graph_label_x_val": 4.5,
}
def construct(self):
# Superclass parts to skip
self.force_skipping()
self.setup_axes()
self.show_graph()
self.show_area()
self.ask_about_area()
self.show_confusion()
# Superclass parts to show
self.revert_to_original_skipping_status()
self.show_variable_endpoint()
self.name_integral()
self.nudge_x()
self.describe_sliver()
self.write_dA_dx()
# New animations
self.approximation_improves_for_smaller_dx()
self.name_derivative()
def approximation_improves_for_smaller_dx(self):
color = YELLOW
approx = self.deriv_equation.get_part_by_tex("approx")
dx_to_zero_words = TextMobject(
"Gets better \\\\ as",
"$dx \\to 0$"
)
dx_to_zero_words.set_color_by_tex("dx", color)
dx_to_zero_words.next_to(approx, DOWN, 1.5*LARGE_BUFF)
arrow = Arrow(dx_to_zero_words, approx, color=color)
self.play(
approx.set_color, color,
ShowCreation(arrow),
FadeIn(dx_to_zero_words),
)
self.wait()
self.transition_to_alt_config(
deriv_dx=self.deriv_dx/4.0,
transformation_kwargs={"run_time": 2}
)
self.dx_to_zero_words = dx_to_zero_words
self.dx_to_zero_words_arrow = arrow
def name_derivative(self):
deriv_words = TextMobject("``Derivative'' of $A$")
deriv_words.scale(0.9)
deriv_words.to_edge(UP+RIGHT)
moving_group = VGroup(
self.deriv_equation,
self.dx_to_zero_words,
self.dx_to_zero_words_arrow,
)
moving_group.generate_target()
moving_group.target.next_to(deriv_words, DOWN, LARGE_BUFF)
moving_group.target.to_edge(RIGHT)
self.play(
FadeIn(deriv_words),
MoveToTarget(moving_group)
)
dA_dx = VGroup(*self.deriv_equation[:3])
box = Rectangle(color=GREEN)
box.replace(dA_dx, stretch=True)
box.scale_in_place(1.3)
brace = Brace(box, UP)
faders = VGroup(
self.dx_to_zero_words[0],
self.dx_to_zero_words_arrow
)
dx_to_zero = self.dx_to_zero_words[1]
self.play(*list(map(FadeIn, [box, brace])))
self.wait()
self.play(
FadeOut(faders),
dx_to_zero.next_to, box, DOWN
)
self.wait()
########
def show_smaller_x(self):
return
def shrink_dx(self, **kwargs):
return
class NextVideoWrapper(Scene):
def construct(self):
rect = Rectangle(height=9, width=16)
rect.set_height(1.5*FRAME_Y_RADIUS)
titles = [
TextMobject("Chapter %d:" % d, s)
for d, s in [
(2, "The paradox of the derivative"),
(3, "Derivative formulas through geometry"),
]
]
for title in titles:
title.to_edge(UP)
rect.next_to(VGroup(*titles), DOWN)
self.add(titles[0])
self.play(ShowCreation(rect))
self.wait(3)
self.play(Transform(*titles))
self.wait(3)
class ProblemSolvingTool(TeacherStudentsScene):
def construct(self):
self.teacher_says("""
The derivative is a
problem-solving tool
""")
self.wait(3)
class FundamentalTheorem(Scene):
def construct(self):
words = TextMobject("""
Fundamental theorem of calculus
""")
words.to_edge(UP)
arrow = DoubleArrow(LEFT, RIGHT).shift(2*RIGHT)
deriv = TexMobject(
"{dA", "\\over \\,", "dx}", "=", "x^2"
)
deriv.set_color_by_tex("dA", GREEN)
deriv.next_to(arrow, RIGHT)
self.play(ShowCreation(arrow))
self.wait()
self.play(Write(deriv))
self.wait()
self.play(Write(words))
self.wait()
class NextVideos(TeacherStudentsScene):
def construct(self):
series = VideoSeries()
series.to_edge(UP)
this_video = series[0]
this_video.set_color(YELLOW)
self.add(series)
self.teacher_says(
"That's a high-level view"
)
self.wait()
self.play(
RemovePiCreatureBubble(
self.teacher,
target_mode="raise_right_hand",
look_at_arg=this_video,
),
*it.chain(*[
[pi.change_mode, "pondering", pi.look_at, this_video]
for pi in self.get_students()
])
)
self.play(*[
ApplyMethod(pi.look_at, series)
for pi in self.get_pi_creatures()
])
self.play(*[
ApplyMethod(
video.shift, 0.5*video.get_height()*DOWN,
run_time=3,
rate_func=squish_rate_func(
there_and_back, alpha, alpha+0.3
)
)
for video, alpha in zip(series, np.linspace(0, 0.7, len(series)))
])
self.wait()
student = self.get_students()[1]
self.remove(student)
everything = VGroup(*self.get_top_level_mobjects())
self.add(student)
words = TextMobject("""
You could have
invented this.
""")
words.next_to(student, UP, LARGE_BUFF)
self.play(self.teacher.change_mode, "plain")
self.play(
everything.fade, 0.75,
student.change_mode, "plain"
)
self.play(
Write(words),
student.look_at, words,
)
self.play(
student.change_mode, "confused",
student.look_at, words
)
self.wait(3)
self.play(student.change_mode, "thinking")
self.wait(4)
class Chapter1PatreonThanks(PatreonThanks):
CONFIG = {
"specific_patrons": [
"Ali Yahya",
"CrypticSwarm",
"Juan Benet",
"Yu Jun",
"Othman Alikhan",
"Markus Persson",
"Joseph John Cox",
"Luc Ritchie",
"Einar Johansen",
"Rish Kundalia",
"Achille Brighton",
"Kirk Werklund",
"Ripta Pasay",
"Felipe Diniz",
],
"patron_scale_val": 0.9
}
class EndScreen(PiCreatureScene):
CONFIG = {
"seconds_to_blink": 3,
}
def construct(self):
words = TextMobject("Clicky stuffs")
words.scale(1.5)
words.next_to(self.pi_creature, UP)
words.to_edge(UP)
self.play(
FadeIn(
words,
run_time=2,
lag_ratio=0.5
),
self.pi_creature.change_mode, "hooray"
)
self.wait()
mode_point_pairs = [
("raise_left_hand", 5*LEFT+3*UP),
("raise_right_hand", 5*RIGHT+3*UP),
("thinking", 5*LEFT+2*DOWN),
("thinking", 5*RIGHT+2*DOWN),
("thinking", 5*RIGHT+2*DOWN),
("happy", 5*LEFT+3*UP),
("raise_right_hand", 5*RIGHT+3*UP),
]
for mode, point in mode_point_pairs:
self.play(self.pi_creature.change, mode, point)
self.wait()
self.wait(3)
def create_pi_creature(self):
self.pi_creature = Randolph()
self.pi_creature.shift(2*DOWN + 1.5*LEFT)
return self.pi_creature
class Thumbnail(AlternateAreaUnderCurve):
CONFIG = {
"x_axis_label": "",
"y_axis_label": "",
"graph_origin": 2.4*DOWN + 3*LEFT,
}
def construct(self):
self.setup_axes()
self.remove(*self.x_axis.numbers)
self.remove(*self.y_axis.numbers)
graph = self.get_graph(self.func)
rects = self.get_riemann_rectangles(
graph,
x_min=0,
x_max=4,
dx=0.25,
start_color=BLUE_E,
)
words = TextMobject("""
Essence of
calculus
""")
words.set_width(9)
words.to_edge(UP)
self.add(graph, rects, words)
|
#!/usr/bin/env python3
"""
Sums the contributions for all candidates in each race by election year
(mayor, city council, city attorney) and writes it to
/src/assets/candidates/{year}/campaign_race_totals.json as a JSON object.
The keys are "mayor", "city council", and "city attorney" respectively.
The key "last update" is the string date of when it was last updated,
in MM/DD/YY format.
The sum is rounded to the nearest whole number.
"""
import datetime
import glob
import json
import os
import pathlib
from typing import NamedTuple
from shared_calculations import DIRECTORY
class CandidateRaceContributionSums(NamedTuple):
"""
The sums of the candidates' contributions for each race.
This also includes a last updated date.
"""
mayor: int
city_council: int
city_attorney: int
last_update: datetime.date
def directories_under(path):
""" An iterator of the directories directly under `path` """
return (folder.name for folder in os.scandir(path) if folder.is_dir())
def sum_race_contributions(
base_directory=DIRECTORY,
mayor_directory="mayor",
city_council_directory="city_council*",
city_attorney_directory="city_attorney",
):
"""
Sums the contributions for each race by election year.
:param base_directory: The directory containing folders for each election year
These directories support taking a glob pattern for multiple directories.
These are under `base_directory` in the format
"base_directory/*/directory_parameter".
:param mayor_directory: The directory the mayoral race candidate JSON
files are in.
:param city_council_directory: The directory the city council race
candidate JSON files are in.
:param city_attorney_directory: The directory the city attorney race
candidate JSON files are in.
:returns: A dictionary with the string election year as the key and
a `CandidateRaceContributionSums` object as the value.
"""
year_sums = {}
for year in directories_under(base_directory):
sums = []
for directory in (
mayor_directory,
city_council_directory,
city_attorney_directory,
):
rounded_sum = 0
expanded_paths = glob.iglob(f"{base_directory}/{year}/{directory}")
for path in expanded_paths:
for json_file_path in pathlib.Path(path).rglob("*.json"):
with open(json_file_path) as file:
candidate_dict = json.load(file)
if "raised vs spent" in candidate_dict:
rounded_sum += int(
candidate_dict["raised vs spent"][0]["Raised"]
)
sums.append(rounded_sum)
year_sums[year] = CandidateRaceContributionSums(*sums, datetime.date.today())
return year_sums
def to_json(contribution_sums, path=DIRECTORY, file_name="campaign_race_totals.json"):
"""
Writes the race contributions to `campaign_race_totals.json`
:param contribution_sums: A dictionary with keys of the election year
and values of the corresponding `CandidateRaceContributionSums` object.
:param path: The path that contains folders for each election year.
:param file_name: The name of the output file
:returns: None.
"""
for year, contribution_sums in contribution_sums.items():
race_sums = {
"mayor": str(contribution_sums.mayor),
"city council": str(contribution_sums.city_council),
"city attorney": str(contribution_sums.city_attorney),
"last update": contribution_sums.last_update.strftime("%m/%d/%Y"),
}
with open(f"{path}/{year}/{file_name}", "w") as file:
json.dump(race_sums, file, indent=2)
file.write("\n")
if __name__ == "__main__":
to_json(sum_race_contributions())
|
# TODO: To be removed, replaced by CurrencyUnitsCalculation
import locale
from calculate_anything.calculation.base import _Calculation
from calculate_anything.query.result import QueryResult
from calculate_anything.constants import FLAGS
class CurrencyCalculation(_Calculation):
def __init__(self, value=None, error=None, order=0, rate=None, date=None, currency_from=None, currency_to=None):
super().__init__(value=value, error=error, order=order)
self.rate = rate
self.date = date
self.currency_from = currency_from
self.currency_to = currency_to
@_Calculation.Decorators.handle_error_results
def to_query_result(self):
converted_amount = self.value
rate = self.rate
date = self.date
currency_from = self.currency_from
currency_to = self.currency_to
if currency_from == currency_to:
description = ''
elif date:
description = '1 {} = {:f} {} as of {}'.format(
currency_from, rate, currency_to, date)
else:
description = '1 {} = {:f} {}'.format(
currency_from, rate, currency_to)
if currency_to in FLAGS:
icon = 'images/flags/{}'.format(FLAGS[currency_to])
else:
icon = 'images/currency.svg'
converted_amount = locale.currency(
converted_amount, symbol='', grouping=True)
name = '{} {}'.format(converted_amount, currency_to)
return QueryResult(
icon=icon,
name=name,
description=description,
clipboard=name,
value=converted_amount,
order=self.order
)
|
import pytest, requests_mock, subprocess, tarfile, os, json, shutil
from components.update import update
from components.cron import cron
from components.movies import movies
from components.updateip import updateip
#~@pytest.mark.skip(reason="this is how you skip tests")
with open('config.json', 'r') as config_file:
data=config_file.read()
config = json.loads(data)
print(config)
def test_ota(requests_mock):
"Test the OTA"
with tarfile.open('test/vanComputer.tar.gz', "w:gz") as tar:
tar.add('.')
tar.close()
with open('test/vanComputer.tar.gz', 'rb') as f:
requests_mock.get('https://www.always-onward.com/api/device/update', content=f.read(), status_code=200)
result = update()
assert result == 'Update Complete'
os.remove("test/vanComputer.tar.gz")
pass
def test_ota_not_needed(requests_mock):
"Test the OTA when it's not needed"
requests_mock.get('https://www.always-onward.com/api/device/update', text='', status_code=204)
result = update()
assert result == 'Software update not needed'
pass
def test_cron_update(requests_mock):
"Test cron update"
result = cron()
assert result == '@hourly bash '+os.getcwd()+'/cron.sh >> '+os.getcwd()+'/cron.log 2>&1 # vanComputer'
result = cron()
assert result == '@hourly bash '+os.getcwd()+'/cron.sh >> '+os.getcwd()+'/cron.log 2>&1 # vanComputer'
# cleanup
subprocess.run(['crontab', '-r'])
pass
def test_update_ip(requests_mock):
"Test updating IP address"
requests_mock.post('https://www.always-onward.com/api/device/updateip', text='{"Id":"/change/C03973962VN4BRB0MCVPA","Status":"PENDING","SubmittedAt":"2021-08-20T14:06:06.160Z","Comment":"Routing for vanComputer"}')
requests_mock.get('https://api.ipify.org/', text='8.8.8.8')
result = updateip()
assert result == '{"Id":"/change/C03973962VN4BRB0MCVPA","Status":"PENDING","SubmittedAt":"2021-08-20T14:06:06.160Z","Comment":"Routing for vanComputer"}'
# cleanup
subprocess.run(['crontab', '-r'])
pass
def test_movies(requests_mock):
"Test the movie download"
# create files as prep-work
os.mkdir(config['moviesDir'])
open(config['moviesDir'] + '/movie1.mp4', 'a').close()
requests_mock.get('https://www.always-onward.com/api/device/getmovie',
text='{"status":"movieToDownload","name":"movie3.mp4","url":"https://www.signedUrl.com"}')
requests_mock.get('https://www.signedUrl.com', content=b'Hello', status_code=200)
result = movies()
assert result == 'movie3.mp4 downloaded'
assert os.path.exists(config['moviesDir'] + '/movie3.mp4')
# cleanup
shutil.rmtree(config['moviesDir'])
pass
def test_movie_delete(requests_mock):
"Test the movie download"
# create files as prep-work)
os.mkdir(config['moviesDir'])
open(config['moviesDir'] + '/movie1.mp4', 'a').close()
open(config['moviesDir'] + '/movie2.mp4', 'a').close()
requests_mock.get('https://www.always-onward.com/api/device/getmovie',
text='{"status":"movieToDelete","name":"movie2.mp4"}')
result = movies()
assert result == 'movie2.mp4 deleted'
assert not os.path.exists(config['moviesDir'] + '/movie2.mp4')
# cleanup
shutil.rmtree(config['moviesDir'])
pass
def test_no_movie(requests_mock):
"Test no movies needed"
# create files as prep-work
os.mkdir(config['moviesDir'])
open(config['moviesDir'] + '/movie1.mp4', 'a').close()
requests_mock.get('https://www.always-onward.com/api/device/getmovie',
text='', status_code=204)
result = movies()
assert result == 'Movies are up to Date'
# cleanup
shutil.rmtree(config['moviesDir'])
pass
def test_movie_error(requests_mock):
"Test response error"
# create files as prep-work
os.mkdir(config['moviesDir'])
open(config['moviesDir'] + '/movie1.mp4', 'a').close()
requests_mock.get('https://www.always-onward.com/api/device/getmovie',
text='bad request', status_code=400)
result = movies()
assert result == 'error - bad request'
# cleanup
shutil.rmtree(config['moviesDir'])
pass
|
import BaseHTTPServer
import threading
import requests
from nose.plugins.attrib import attr
from tests.checks.common import AgentCheckTest
class HttpServerThread(threading.Thread):
def __init__(self, data):
super(HttpServerThread, self).__init__()
self.done = False
self.hostname = 'localhost'
self.port = 1337
class MockPortworx(BaseHTTPServer.BaseHTTPRequestHandler):
def do_GET(self):
if self.path != '/metrics':
self.send_response(404)
return
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write(data)
self.http = BaseHTTPServer.HTTPServer((self.hostname, self.port), MockPortworx)
def run(self):
while not self.done:
self.http.handle_request()
def end_http(self):
self.done = True
# just a dummy get to wake it up
requests.get("http://%s:%d" % (self.hostname, self.port))
@attr(requires='portworx')
class TestPortworx(AgentCheckTest):
"""Basic Test for portworx integration."""
CHECK_NAME = 'portworx'
data = ("""px_cluster_cpu_percent{cluster="clusterpaul",node="devbox",node_id="f6c68c67-7c4f-4b3b-ab50-f5a046be5c3d"} 0.76
px_cluster_disk_available_bytes{cluster="clusterpaul",node="devbox",node_id="f6c68c67-7c4f-4b3b-ab50-f5a046be5c3d"} 1.3470091182e+11
px_cluster_disk_total_bytes{cluster="clusterpaul",node="devbox",node_id="f6c68c67-7c4f-4b3b-ab50-f5a046be5c3d"} 1.37438953472e+11
px_cluster_disk_utilized_bytes{cluster="clusterpaul",node="devbox",node_id="f6c68c67-7c4f-4b3b-ab50-f5a046be5c3d"} 2.738041652e+09
px_cluster_memory_utilized_percent"{cluster="clusterpaul",node="devbox",node_id="f6c68c67-7c4f-4b3b-ab50-f5a046be5c3d"} 24
px_cluster_pendingio{cluster="clusterpaul",node="devbox",node_id="f6c68c67-7c4f-4b3b-ab50-f5a046be5c3d"} 0""")
def setUp(self):
self.http = HttpServerThread(self.data)
self.instance = {'prometheus_endpoint': 'http://localhost:1337/metrics'}
self.check_config = {'instances': [self.instance]}
self.http.start()
def tearDown(self):
self.http.end_http()
self.http.join()
def test_check_all_metrics(self):
"""
Testing Portworx check.
"""
self.run_check(self.check_config)
self.assertMetric("px.cluster_cpu_percent", count=1, value=0.76)
self.coverage_report() |
from typing import Tuple
import torch
import torch.nn as nn
from colossalai.amp.naive_amp import NaiveAMPModel
from colossalai.logging import get_dist_logger
from colossalai.zero.sharded_model.sharded_model_v2 import ShardedModelV2
from colossalai.zero.sharded_optim.sharded_optim_v2 import ShardedOptimizerV2
from torch.optim import Optimizer
from .sharded_model import ShardedModel
from .sharded_optim import ShardedOptimizer
def convert_to_zero_v2(model: nn.Module, optimizer: torch.optim.Optimizer, model_config,
optimizer_config) -> Tuple[ShardedModelV2, ShardedOptimizerV2]:
"""
A helper function to integrate the model and optimizer with ZeRO optimizer and off-loading
:param model: Your model object
:type model: :class:`torch.nn.Module`
:param optimizer_config: Your optimizer object
:type optimizer_config: :class:`dict`
:return: (model, optimizer)
:rtype: Tuple
"""
logger = get_dist_logger('convert_to_zero_v2')
logger.info(f'optimizer_config is {optimizer_config}')
if optimizer_config is None:
optimizer_config = dict()
logger.info(f'model_config is {model_config}')
if model_config is None:
model_config = dict()
zero_model = ShardedModelV2(model, **model_config)
zero_optimizer = ShardedOptimizerV2(zero_model, optimizer, **optimizer_config)
return zero_model, zero_optimizer
def convert_to_zero(model: nn.Module, optimizer: Optimizer, level: int, zero_config: dict):
"""
A helper function to integrate the model and optimizer with ZeRO optimizer and off-loading
:param model: Your model object
:type model: :class:`torch.nn.Module`
:param optimizer: Your optimizer object
:type optimizer: :class:`torch.optim.Optimizer`
:param level: Optimizer level, can be 2 or 3
:type level: int
:param zero_config: Configuration for zero
:type zero_config: dict
:return: (model, optimizer)
:rtype: Tuple
"""
assert 1 <= level <= 3, 'Only ZERO Optimizer Level 1-3 are provided'
if level in [1, 2]:
if level == 2:
if 'partition_grad' in zero_config:
assert zero_config['partition_grad'], \
'Sharded Optimizer requires partition_grad to be True'
else:
zero_config['partiton_grad'] = True
model = NaiveAMPModel(model, output_to_fp32=True)
optimizer = ShardedOptimizer(optimizer, **zero_config)
else:
model = ShardedModel(module=model, **zero_config)
return model, optimizer
__all__ = ['convert_to_zero', 'ShardedModel', 'ShardedOptimizer']
|
from collections.abc import Iterable
from typing import Any
from django.db.models.query import QuerySet
def is_query_set(value: Any) -> bool:
"""Gets whether the specified value is a :see:QuerySet."""
return isinstance(value, QuerySet)
def is_sql(value: Any) -> bool:
"""Gets whether the specified value could be a raw SQL query."""
return isinstance(value, str)
def is_sql_with_params(value: Any) -> bool:
"""Gets whether the specified value is a tuple of a SQL query (as a string)
and a tuple of bind parameters."""
return (
isinstance(value, tuple)
and len(value) == 2
and is_sql(value[0])
and isinstance(value[1], Iterable)
and not isinstance(value[1], (str, bytes, bytearray))
)
|
import uuid
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Job(models.Model):
access_token = models.CharField(
max_length=255,
help_text=_(
"Used to authenticate as the user on Reddit",
),
)
refresh_token = models.CharField(
max_length=255,
help_text=_(
"Used to refresh the access_token",
),
)
code = models.CharField(
max_length=255,
help_text=_(
"The code given by Reddit, we use this to exchange for tokens",
),
)
started = models.DateTimeField(
auto_now_add = True,
)
last_updated = models.DateTimeField(
auto_now = True,
)
identifier = models.UUIDField(
primary_key=True,
default=uuid.uuid4,
max_length=255,
help_text=_(
"The identifier for this task. Shown publicly.",
),
)
comments_deleted = models.PositiveSmallIntegerField(
default=0,
)
submissions_deleted = models.PositiveSmallIntegerField(
default=0,
)
STATE_AUTHORIZE = 10
STATE_AUTHENTICATED = 20
STATE_RECEIVED_CODE_AND_STATE = 30
STATE_DELETING_COMMENTS = 40
STATE_DELETING_SUBMISSIONS = 41
STATE_FINISHED = 50
STATE_UNKNOWN_ERROR = 100
STATE_ACCESS_DENIED = 101
# How far along in the deletion process we are. Note that there are
# large increments to begin with, this is so that if we later on
# decide to include additional states in between, then no additional
# migrations will required.
STATE_CHOICES = (
(STATE_AUTHORIZE, _('Asked user to authorize')),
(STATE_AUTHENTICATED, _('Authenticated as user on Reddit')),
(STATE_RECEIVED_CODE_AND_STATE, _('Received code and state')),
(STATE_DELETING_COMMENTS, _('Deleting comments')),
(STATE_DELETING_SUBMISSIONS, _('Deleting submissions')),
(STATE_FINISHED, _('Finished')),
(STATE_UNKNOWN_ERROR, _('Unknown error')),
(STATE_ACCESS_DENIED, _('Access denied')),
)
state = models.PositiveSmallIntegerField(
choices=STATE_CHOICES,
default=STATE_CHOICES[0][0],
help_text=_(u"How far are we along in the process.")
)
class Meta:
ordering = ['started', ]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.