signature
stringlengths 8
3.44k
| body
stringlengths 0
1.41M
| docstring
stringlengths 1
122k
| id
stringlengths 5
17
|
|---|---|---|---|
def update_anchor(self):
|
t = self.transform<EOL>self.update_collision_rect()<EOL>if t.anchor_x_r and t.anchor_y_r:<EOL><INDENT>t.anchor_x = self.min_x + (self.max_x - self.min_x) * t.anchor_x_r<EOL>t.anchor_y = self.min_y + (self.max_y - self.min_y) * t.anchor_y_r<EOL><DEDENT>
|
如果是使用set_anchor_rate来设定锚点,那么就需要不停的更新锚点的位置
|
f5265:c0:m6
|
def __init__(self, src, x=<NUM_LIT>, y=<NUM_LIT:200>, w=None, h=None, batch=None):
|
self.batch = batch<EOL>self.src = src<EOL>w = w or self._sprite.width<EOL>h = h or self._sprite.height<EOL>super().__init__(x, y, w, h)<EOL>self.collision_scale = <NUM_LIT><EOL>
|
默认位置: 300, 200
|
f5266:c0:m0
|
def center_image(self, img):
|
img.anchor_x = img.width // <NUM_LIT:2> <EOL>img.anchor_y = img.height // <NUM_LIT:2><EOL>
|
Sets an image's anchor point to its center
|
f5266:c0:m13
|
def __init__(self, x1=<NUM_LIT:100>, y1=<NUM_LIT:100>, x2=<NUM_LIT:200>, y2=<NUM_LIT:200>, line_width=<NUM_LIT:1>, color="<STR_LIT>"):
|
super().__init__(color, gl=pyglet.gl.GL_LINES, line_width=line_width)<EOL>self.x1, self.y1, self.x2, self.y2 = x1, y1, x2, y2<EOL>
|
线段
线段宽度: line_width, 默认为1
颜色: color, 默认为"orange"
|
f5267:c0:m0
|
@window.event<EOL>def on_mouse_motion(x, y, dx, dy):
|
mouse.x, mouse.y = x, y<EOL>mouse.move()<EOL>window.update_caption(mouse)<EOL>
|
当鼠标没有按下时移动的时候触发
|
f5270:m0
|
@window.event<EOL>def on_mouse_drag(x, y, dx, dy, buttons, modifiers):
|
mouse.x, mouse.y = x, y<EOL>mouse.move()<EOL>
|
当鼠标按下并且移动的时候触发
|
f5270:m1
|
@window.event<EOL>def on_mouse_press(x, y, button, modifiers):
|
if button == MouseKeyCode.LEFT:<EOL><INDENT>mouse.press()<EOL><DEDENT>elif button == MouseKeyCode.RIGHT:<EOL><INDENT>mouse.right_press()<EOL><DEDENT>shapes = list(all_shapes)<EOL>while shapes:<EOL><INDENT>shape = shapes.pop()<EOL>if(shape._press and shape_clicked(shape)):<EOL><INDENT>shape._press()<EOL><DEDENT><DEDENT>
|
按下鼠标时
|
f5270:m2
|
@window.event<EOL>def on_mouse_release(x, y, button, modifiers):
|
if button == MouseKeyCode.LEFT:<EOL><INDENT>mouse.release()<EOL><DEDENT>elif button == MouseKeyCode.RIGHT:<EOL><INDENT>mouse.right_release()<EOL><DEDENT>
|
松开鼠标时
|
f5270:m3
|
@window.event<EOL>def on_key_press(symbol, modifiers):
|
try:<EOL><INDENT>key[symbol].press()<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT>
|
当键盘按键按下时触发
|
f5270:m4
|
@window.event<EOL>def on_key_release(symbol, modifiers):
|
try:<EOL><INDENT>key[symbol].release()<EOL><DEDENT>except:<EOL><INDENT>pass<EOL><DEDENT>
|
当键盘按键松开时触发
|
f5270:m5
|
def line_cross(x1, y1, x2, y2, x3, y3, x4, y4):
|
<EOL>if min(x1, x2) > max(x3, x4) or max(x1, x2) < min(x3, x4) ormin(y1, y2) > max(y3, y4) or max(y1, y2) < min(y3, y4):<EOL><INDENT>return False<EOL><DEDENT>if ((y1 - y2) * (x3 - x4) == (x1 - x2) * (y3 - y4)):<EOL><INDENT>return False<EOL><DEDENT>if cross_product(x3, y3, x2, y2, x4, y4) * cross_product(x3, y3, x4, y4, x1, y1) < <NUM_LIT:0> orcross_product(x1, y1, x4, y4, x2, y2) * cross_product(x1, y1, x2, y2, x3, y3) < <NUM_LIT:0>:<EOL><INDENT>return False<EOL><DEDENT>b1 = (y2 - y1) * x1 + (x1 - x2) * y1<EOL>b2 = (y4 - y3) * x3 + (x3 - x4) * y3<EOL>D = (x2 - x1) * (y4 - y3) - (x4 - x3) * (y2 - y1)<EOL>D1 = b2 * (x2 - x1) - b1 * (x4 - x3)<EOL>D2 = b2 * (y2 - y1) - b1 * (y4 - y3)<EOL>return P(D1 / D, D2 / D)<EOL>
|
判断两条线段是否交叉
|
f5271:m4
|
def cross_product (x1, y1, x2, y2, x3, y3):
|
return (x2 - x1) * (y3 - y1) - (x3 - x1) * (y2 - y1)<EOL>
|
叉乘
vector 1: x1, y1, x2, y2
vector 2: x1, y1, x3, y3
|
f5271:m5
|
def update_collision_rect(self):
|
self.min_x = min(self.points[::<NUM_LIT:2>])<EOL>self.max_x = max(self.points[::<NUM_LIT:2>])<EOL>self.min_y = min(self.points[<NUM_LIT:1>::<NUM_LIT:2>])<EOL>self.max_y = max(self.points[<NUM_LIT:1>::<NUM_LIT:2>])<EOL>
|
获取外接矩形
|
f5271:c0:m0
|
def collide(self, s2):
|
s1 = self<EOL>s1.update_points()<EOL>s2.update_points()<EOL>if not (s1.points and s2.points):<EOL><INDENT>return False<EOL><DEDENT>t1 = s1.transform<EOL>t2 = s2.transform<EOL>t1.update_points(s1.points)<EOL>t2.update_points(s2.points)<EOL>t1.update_collision_rect()<EOL>t2.update_collision_rect()<EOL>if not (t1.min_x < t2.max_x and t1.max_x > t2.min_xand t1.min_y < t2.max_y and t1.max_y > t2.min_y):<EOL><INDENT>return False<EOL><DEDENT>return points_in_points(t1, t2) orpoints_in_points(t2, t1) orlines_cross(t1, t2)<EOL>
|
判断图形是否碰到了另外一个图形
|
f5271:c0:m1
|
def on_press(self, f):
|
self._press = f<EOL>
|
注册on_press函数,当图形被点击时,触发func函数
|
f5271:c0:m2
|
def color_to_tuple(color, opacity=<NUM_LIT:1>):
|
if(type(color) == str and color[<NUM_LIT:0>] == "<STR_LIT:#>"):<EOL><INDENT>color = hex_color_to_tuple(color)<EOL><DEDENT>elif type(color) == str:<EOL><INDENT>if color in color_dict:<EOL><INDENT>color = color_dict[color.lower()]<EOL><DEDENT>else:<EOL><INDENT>print("<STR_LIT>" + color)<EOL>color = (<NUM_LIT:255>, <NUM_LIT>, <NUM_LIT:0>, int(<NUM_LIT:255>*opacity)) <EOL><DEDENT><DEDENT>while len(color) < <NUM_LIT:4>:<EOL><INDENT>color += (int(<NUM_LIT:255>*opacity),)<EOL><DEDENT>return color<EOL>
|
convert any color to standard ()
"red" -> 'c3B', (255, 125, 0)
"#ffffff" -> 'c3B', (255, 255, 255)
"#ffffffff" -> 'c4B', (255, 255, 255, 255)
|
f5272:m0
|
def hex_color_to_tuple(hex):
|
hex = hex[<NUM_LIT:1>:]<EOL>length = len(hex) // <NUM_LIT:2><EOL>return tuple(int(hex[i*<NUM_LIT:2>:i*<NUM_LIT:2>+<NUM_LIT:2>], <NUM_LIT:16>) for i in range(length))<EOL>
|
convent hex color to tuple
"#ffffff" -> (255, 255, 255)
"#ffff00ff" -> (255, 255, 0, 255)
|
f5272:m1
|
def hsla_to_rgba(h, s, l, a):
|
h = h % <NUM_LIT><EOL>s = max(<NUM_LIT:0>, min(<NUM_LIT:1>, s))<EOL>l = max(<NUM_LIT:0>, min(<NUM_LIT:1>, l))<EOL>a = max(<NUM_LIT:0>, min(<NUM_LIT:1>, a))<EOL>c = (<NUM_LIT:1> - abs(<NUM_LIT:2>*l - <NUM_LIT:1>)) * s<EOL>x = c * (<NUM_LIT:1> - abs(h/<NUM_LIT>%<NUM_LIT:2> - <NUM_LIT:1>))<EOL>m = l - c/<NUM_LIT:2><EOL>if h<<NUM_LIT>:<EOL><INDENT>r, g, b = c, x, <NUM_LIT:0><EOL><DEDENT>elif h<<NUM_LIT>:<EOL><INDENT>r, g, b = x, c, <NUM_LIT:0><EOL><DEDENT>elif h<<NUM_LIT>:<EOL><INDENT>r, g, b = <NUM_LIT:0>, c, x<EOL><DEDENT>elif h<<NUM_LIT>:<EOL><INDENT>r, g, b = <NUM_LIT:0>, x, c<EOL><DEDENT>elif h<<NUM_LIT>:<EOL><INDENT>r, g, b = x, <NUM_LIT:0>, c<EOL><DEDENT>else:<EOL><INDENT>r, g, b = c, <NUM_LIT:0>, x<EOL><DEDENT>return (int((r+m)*<NUM_LIT:255>), int((g+m)*<NUM_LIT:255>), int((b+m)*<NUM_LIT:255>), int(a*<NUM_LIT:255>))<EOL>
|
0 <= H < 360, 0 <= s,l,a < 1
|
f5272:m4
|
def __init__(self, x1=<NUM_LIT:100>, y1=<NUM_LIT:100>, x2=<NUM_LIT:200>, y2=<NUM_LIT:200>, line_width=<NUM_LIT:1>, color="<STR_LIT>"):
|
x = (x1 + x2) / <NUM_LIT:2><EOL>y = (y1 + y2) / <NUM_LIT:2><EOL>super().__init__(color, gl=gl.GL_LINES, line_width=line_width)<EOL>self.x1 = x1<EOL>self.y1 = y1<EOL>self.x2 = x2<EOL>self.y2 = y2<EOL>lines.append(self)<EOL>
|
线段
线段宽度: line_width, 默认为1
颜色: color, 默认为"orange"
|
f5301:c0:m0
|
def authenticated(func):
|
@wraps(func)<EOL>def wrapper(*args, **kwargs):<EOL><INDENT>self = args[<NUM_LIT:0>]<EOL>if self.refresh_token is not None andself.token_expiration_time <= dt.datetime.utcnow():<EOL><INDENT>self.re_authenticate()<EOL><DEDENT>return func(*args, **kwargs)<EOL><DEDENT>return wrapper<EOL>
|
Decorator to check if Smappee's access token has expired.
If it has, use the refresh token to request a new access token
|
f5315:m0
|
def urljoin(*parts):
|
<EOL>part_list = []<EOL>for part in parts:<EOL><INDENT>p = str(part)<EOL>if p.endswith('<STR_LIT>'):<EOL><INDENT>p = p[<NUM_LIT:0>:-<NUM_LIT:1>]<EOL><DEDENT>else:<EOL><INDENT>p = p.strip('<STR_LIT:/>')<EOL><DEDENT>part_list.append(p)<EOL><DEDENT>url = '<STR_LIT:/>'.join(part_list)<EOL>return url<EOL>
|
Join terms together with forward slashes
Parameters
----------
parts
Returns
-------
str
|
f5315:m1
|
def __init__(self, client_id=None, client_secret=None):
|
self.client_id = client_id<EOL>self.client_secret = client_secret<EOL>self.access_token = None<EOL>self.refresh_token = None<EOL>self.token_expiration_time = None<EOL>
|
To receive a client id and secret,
you need to request via the Smappee support
Parameters
----------
client_id : str, optional
client_secret : str, optional
If None, you won't be able to do any authorisation,
so it requires that you already have an access token somewhere.
In that case, the SimpleSmappee class is something for you.
|
f5315:c0:m0
|
def authenticate(self, username, password):
|
url = URLS['<STR_LIT>']<EOL>data = {<EOL>"<STR_LIT>": "<STR_LIT:password>",<EOL>"<STR_LIT>": self.client_id,<EOL>"<STR_LIT>": self.client_secret,<EOL>"<STR_LIT:username>": username,<EOL>"<STR_LIT:password>": password<EOL>}<EOL>r = requests.post(url, data=data)<EOL>r.raise_for_status()<EOL>j = r.json()<EOL>self.access_token = j['<STR_LIT>']<EOL>self.refresh_token = j['<STR_LIT>']<EOL>self._set_token_expiration_time(expires_in=j['<STR_LIT>'])<EOL>return r<EOL>
|
Uses a Smappee username and password to request an access token,
refresh token and expiry date.
Parameters
----------
username : str
password : str
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
|
f5315:c0:m1
|
def _set_token_expiration_time(self, expires_in):
|
self.token_expiration_time = dt.datetime.utcnow() +dt.timedelta(<NUM_LIT:0>, expires_in)<EOL>
|
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
|
f5315:c0:m2
|
def re_authenticate(self):
|
url = URLS['<STR_LIT>']<EOL>data = {<EOL>"<STR_LIT>": "<STR_LIT>",<EOL>"<STR_LIT>": self.refresh_token,<EOL>"<STR_LIT>": self.client_id,<EOL>"<STR_LIT>": self.client_secret<EOL>}<EOL>r = requests.post(url, data=data)<EOL>r.raise_for_status()<EOL>j = r.json()<EOL>self.access_token = j['<STR_LIT>']<EOL>self.refresh_token = j['<STR_LIT>']<EOL>self._set_token_expiration_time(expires_in=j['<STR_LIT>'])<EOL>return r<EOL>
|
Uses the refresh token to request a new access token, refresh token and
expiration date.
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
|
f5315:c0:m3
|
@authenticated<EOL><INDENT>def get_service_locations(self):<DEDENT>
|
url = URLS['<STR_LIT>']<EOL>headers = {"<STR_LIT>": "<STR_LIT>".format(self.access_token)}<EOL>r = requests.get(url, headers=headers)<EOL>r.raise_for_status()<EOL>return r.json()<EOL>
|
Request service locations
Returns
-------
dict
|
f5315:c0:m4
|
@authenticated<EOL><INDENT>def get_service_location_info(self, service_location_id):<DEDENT>
|
url = urljoin(URLS['<STR_LIT>'], service_location_id, "<STR_LIT:info>")<EOL>headers = {"<STR_LIT>": "<STR_LIT>".format(self.access_token)}<EOL>r = requests.get(url, headers=headers)<EOL>r.raise_for_status()<EOL>return r.json()<EOL>
|
Request service location info
Parameters
----------
service_location_id : int
Returns
-------
dict
|
f5315:c0:m5
|
@authenticated<EOL><INDENT>def get_consumption(self, service_location_id, start, end, aggregation, raw=False):<DEDENT>
|
url = urljoin(URLS['<STR_LIT>'], service_location_id,<EOL>"<STR_LIT>")<EOL>d = self._get_consumption(url=url, start=start, end=end,<EOL>aggregation=aggregation)<EOL>if not raw:<EOL><INDENT>for block in d['<STR_LIT>']:<EOL><INDENT>if '<STR_LIT>' not in block.keys():<EOL><INDENT>break<EOL><DEDENT>block.update({'<STR_LIT>': block['<STR_LIT>'] / <NUM_LIT:12>})<EOL><DEDENT><DEDENT>return d<EOL>
|
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
|
f5315:c0:m6
|
@authenticated<EOL><INDENT>def get_sensor_consumption(self, service_location_id, sensor_id, start,<EOL>end, aggregation):<DEDENT>
|
url = urljoin(URLS['<STR_LIT>'], service_location_id, "<STR_LIT>",<EOL>sensor_id, "<STR_LIT>")<EOL>return self._get_consumption(url=url, start=start, end=end,<EOL>aggregation=aggregation)<EOL>
|
Request consumption for a given sensor in a given service location
Parameters
----------
service_location_id : int
sensor_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
Returns
-------
dict
|
f5315:c0:m7
|
def _get_consumption(self, url, start, end, aggregation):
|
start = self._to_milliseconds(start)<EOL>end = self._to_milliseconds(end)<EOL>headers = {"<STR_LIT>": "<STR_LIT>".format(self.access_token)}<EOL>params = {<EOL>"<STR_LIT>": aggregation,<EOL>"<STR_LIT>": start,<EOL>"<STR_LIT:to>": end<EOL>}<EOL>r = requests.get(url, headers=headers, params=params)<EOL>r.raise_for_status()<EOL>return r.json()<EOL>
|
Request for both the get_consumption and
get_sensor_consumption methods.
Parameters
----------
url : str
start : dt.datetime
end : dt.datetime
aggregation : int
Returns
-------
dict
|
f5315:c0:m8
|
@authenticated<EOL><INDENT>def get_events(self, service_location_id, appliance_id, start, end,<EOL>max_number=None):<DEDENT>
|
start = self._to_milliseconds(start)<EOL>end = self._to_milliseconds(end)<EOL>url = urljoin(URLS['<STR_LIT>'], service_location_id, "<STR_LIT>")<EOL>headers = {"<STR_LIT>": "<STR_LIT>".format(self.access_token)}<EOL>params = {<EOL>"<STR_LIT>": start,<EOL>"<STR_LIT:to>": end,<EOL>"<STR_LIT>": appliance_id,<EOL>"<STR_LIT>": max_number<EOL>}<EOL>r = requests.get(url, headers=headers, params=params)<EOL>r.raise_for_status()<EOL>return r.json()<EOL>
|
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
max_number : int, optional
The maximum number of events that should be returned by this query
Default returns all events in the selected period
Returns
-------
dict
|
f5315:c0:m9
|
@authenticated<EOL><INDENT>def actuator_on(self, service_location_id, actuator_id, duration=None):<DEDENT>
|
return self._actuator_on_off(<EOL>on_off='<STR_LIT>', service_location_id=service_location_id,<EOL>actuator_id=actuator_id, duration=duration)<EOL>
|
Turn actuator on
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
|
f5315:c0:m10
|
@authenticated<EOL><INDENT>def actuator_off(self, service_location_id, actuator_id, duration=None):<DEDENT>
|
return self._actuator_on_off(<EOL>on_off='<STR_LIT>', service_location_id=service_location_id,<EOL>actuator_id=actuator_id, duration=duration)<EOL>
|
Turn actuator off
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
|
f5315:c0:m11
|
def _actuator_on_off(self, on_off, service_location_id, actuator_id,<EOL>duration=None):
|
url = urljoin(URLS['<STR_LIT>'], service_location_id,<EOL>"<STR_LIT>", actuator_id, on_off)<EOL>headers = {"<STR_LIT>": "<STR_LIT>".format(self.access_token)}<EOL>if duration is not None:<EOL><INDENT>data = {"<STR_LIT>": duration}<EOL><DEDENT>else:<EOL><INDENT>data = {}<EOL><DEDENT>r = requests.post(url, headers=headers, json=data)<EOL>r.raise_for_status()<EOL>return r<EOL>
|
Turn actuator on or off
Parameters
----------
on_off : str
'on' or 'off'
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
|
f5315:c0:m12
|
def get_consumption_dataframe(self, service_location_id, start, end,<EOL>aggregation, sensor_id=None, localize=False,<EOL>raw=False):
|
import pandas as pd<EOL>if sensor_id is None:<EOL><INDENT>data = self.get_consumption(<EOL>service_location_id=service_location_id, start=start,<EOL>end=end, aggregation=aggregation, raw=raw)<EOL>consumptions = data['<STR_LIT>']<EOL><DEDENT>else:<EOL><INDENT>data = self.get_sensor_consumption(<EOL>service_location_id=service_location_id, sensor_id=sensor_id,<EOL>start=start, end=end, aggregation=aggregation)<EOL>consumptions = data['<STR_LIT>']<EOL><DEDENT>df = pd.DataFrame.from_dict(consumptions)<EOL>if not df.empty:<EOL><INDENT>df.set_index('<STR_LIT>', inplace=True)<EOL>df.index = pd.to_datetime(df.index, unit='<STR_LIT>', utc=True)<EOL>if localize:<EOL><INDENT>info = self.get_service_location_info(<EOL>service_location_id=service_location_id)<EOL>timezone = info['<STR_LIT>']<EOL>df = df.tz_convert(timezone)<EOL><DEDENT><DEDENT>return df<EOL>
|
Extends get_consumption() AND get_sensor_consumption(),
parses the results in a Pandas DataFrame
Parameters
----------
service_location_id : int
start : dt.datetime | int
end : dt.datetime | int
timezone-naive datetimes are assumed to be in UTC
epoch timestamps need to be in milliseconds
aggregation : int
sensor_id : int, optional
If a sensor id is passed, api method get_sensor_consumption will
be used otherwise (by default),
the get_consumption method will be used: this returns Electricity
and Solar consumption and production.
localize : bool
default False
default returns timestamps in UTC
if True, timezone is fetched from service location info and
Data Frame is localized
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
pd.DataFrame
|
f5315:c0:m13
|
def _to_milliseconds(self, time):
|
if isinstance(time, dt.datetime):<EOL><INDENT>if time.tzinfo is None:<EOL><INDENT>time = time.replace(tzinfo=pytz.UTC)<EOL><DEDENT>return int(time.timestamp() * <NUM_LIT>)<EOL><DEDENT>elif isinstance(time, numbers.Number):<EOL><INDENT>return time<EOL><DEDENT>else:<EOL><INDENT>raise NotImplementedError("<STR_LIT>")<EOL><DEDENT>
|
Converts a datetime-like object to epoch, in milliseconds
Timezone-naive datetime objects are assumed to be in UTC
Parameters
----------
time : dt.datetime | pd.Timestamp | int
Returns
-------
int
epoch milliseconds
|
f5315:c0:m14
|
def __init__(self, access_token):
|
super(SimpleSmappee, self).__init__(client_id=None, client_secret=None)<EOL>self.access_token = access_token<EOL>
|
Parameters
----------
access_token : str
|
f5315:c1:m0
|
def __init__(self, ip):
|
self.ip = ip<EOL>self.headers = {'<STR_LIT:Content-Type>': '<STR_LIT>'}<EOL>self.session = requests.Session()<EOL>
|
Parameters
----------
ip : str
local IP-address of your Smappee
|
f5315:c2:m0
|
def _basic_post(self, url, data=None):
|
_url = urljoin(self.base_url, url)<EOL>r = self.session.post(_url, data=data, headers=self.headers, timeout=<NUM_LIT:5>)<EOL>r.raise_for_status()<EOL>return r<EOL>
|
Because basically every post request is the same
Parameters
----------
url : str
data : str, optional
Returns
-------
requests.Response
|
f5315:c2:m2
|
def logon(self, password='<STR_LIT>'):
|
r = self._basic_post(url='<STR_LIT>', data=password)<EOL>return r.json()<EOL>
|
Parameters
----------
password : str
default 'admin'
Returns
-------
dict
|
f5315:c2:m4
|
def report_instantaneous_values(self):
|
r = self._basic_get(url='<STR_LIT>')<EOL>return r.json()<EOL>
|
Returns
-------
dict
|
f5315:c2:m5
|
def load_instantaneous(self):
|
r = self._basic_post(url='<STR_LIT>', data="<STR_LIT>")<EOL>return r.json()<EOL>
|
Returns
-------
dict
|
f5315:c2:m6
|
def active_power(self):
|
inst = self.load_instantaneous()<EOL>values = [float(i['<STR_LIT:value>']) for i in inst if i['<STR_LIT:key>'].endswith('<STR_LIT>')]<EOL>return sum(values) / <NUM_LIT:1000><EOL>
|
Takes the sum of all instantaneous active power values
Returns them in kWh
Returns
-------
float
|
f5315:c2:m7
|
def active_cosfi(self):
|
inst = self.load_instantaneous()<EOL>values = [float(i['<STR_LIT:value>']) for i in inst if i['<STR_LIT:key>'].endswith('<STR_LIT>')]<EOL>return sum(values) / len(values)<EOL>
|
Takes the average of all instantaneous cosfi values
Returns
-------
float
|
f5315:c2:m8
|
def restart(self):
|
return self._basic_get(url='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m9
|
def reset_active_power_peaks(self):
|
return self._basic_post(url='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m10
|
def reset_ip_scan_cache(self):
|
return self._basic_post(url='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m11
|
def reset_sensor_cache(self):
|
return self._basic_post(url='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m12
|
def reset_data(self):
|
return self._basic_post(url='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m13
|
def clear_appliances(self):
|
return self._basic_post(url='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m14
|
def load_advanced_config(self):
|
r = self._basic_post(url='<STR_LIT>', data='<STR_LIT>')<EOL>return r.json()<EOL>
|
Returns
-------
dict
|
f5315:c2:m15
|
def load_config(self):
|
r = self._basic_post(url='<STR_LIT>', data='<STR_LIT>')<EOL>return r.json()<EOL>
|
Returns
-------
dict
|
f5315:c2:m16
|
def save_config(self, *args, **kwargs):
|
raise NotImplementedError("<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>
|
Parameters
----------
args
kwargs
Raises
------
NotImplementedError
|
f5315:c2:m17
|
def load_command_control_config(self):
|
r = self._basic_post(url='<STR_LIT>', data='<STR_LIT>')<EOL>return r.json()<EOL>
|
Returns
-------
dict
|
f5315:c2:m18
|
def send_group(self):
|
return self._basic_post(url='<STR_LIT>', data='<STR_LIT>')<EOL>
|
Returns
-------
requests.Response
|
f5315:c2:m19
|
def on_command_control(self, val_id):
|
data = "<STR_LIT>" + val_id<EOL>return self._basic_post(url='<STR_LIT>', data=data)<EOL>
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
f5315:c2:m20
|
def off_command_control(self, val_id):
|
data = "<STR_LIT>" + val_id<EOL>return self._basic_post(url='<STR_LIT>', data=data)<EOL>
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
f5315:c2:m21
|
def add_command_control(self, *args, **kwargs):
|
raise NotImplementedError("<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>
|
Parameters
----------
args
kwargs
Raises
-------
NotImplementedError
|
f5315:c2:m22
|
def delete_command_control(self, val_id):
|
data = "<STR_LIT>" + val_id<EOL>return self._basic_post(url='<STR_LIT>', data=data)<EOL>
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
f5315:c2:m23
|
def delete_command_control_timers(self, val_id):
|
data = "<STR_LIT>" + val_id<EOL>return self._basic_post(url='<STR_LIT>', data=data)<EOL>
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
f5315:c2:m24
|
def add_command_control_timed(self, *args, **kwargs):
|
raise NotImplementedError("<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>"<EOL>"<STR_LIT>")<EOL>
|
Parameters
----------
args
kwargs
Raises
-------
NotImplementedError
|
f5315:c2:m25
|
def load_logfiles(self):
|
r = self._basic_post(url='<STR_LIT>', data='<STR_LIT>')<EOL>return r.json()<EOL>
|
Returns
-------
dict
|
f5315:c2:m26
|
def select_logfile(self, logfile):
|
data = '<STR_LIT>' + logfile<EOL>r = self._basic_post(url='<STR_LIT>', data=data)<EOL>return r.json()<EOL>
|
Parameters
----------
logfile : str
Returns
-------
dict
|
f5315:c2:m27
|
def content_recommendations(access_token, content_item_id):
|
headers = {'<STR_LIT>': '<STR_LIT>' + str(access_token)}<EOL>recommendations_url =construct_content_recommendations_url(enrichment_url, content_item_id)<EOL>request = requests.get(recommendations_url, headers=headers)<EOL>if request.status_code == <NUM_LIT:200>:<EOL><INDENT>recommendations = request.json()<EOL>return recommendations<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: content_recommendations
Parameters: access_token, content_item_id
Return: dictionary
|
f5318:m0
|
def recommendations(access_token, payload):
|
headers = {'<STR_LIT>': '<STR_LIT>' + str(access_token)}<EOL>request = requests.post(recommendations_url, json=payload, headers=headers)<EOL>if request.status_code == <NUM_LIT:200>:<EOL><INDENT>metadata = request.json()<EOL>return metadata<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: recommendations
Parameters: access_token, payload
Return: dictionary
|
f5318:m1
|
def get_access_token(client_id, client_secret):
|
headers = {'<STR_LIT:Content-Type>': '<STR_LIT>'}<EOL>payload = {<EOL>'<STR_LIT>': client_id,<EOL>'<STR_LIT>': client_secret<EOL>}<EOL>request = requests.post(token_url, data=payload, headers=headers)<EOL>if request.status_code == <NUM_LIT:200>:<EOL><INDENT>token = request.json()<EOL>return token<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: token
Parameters: client_id, client_secret
Return: dictionary
|
f5319:m0
|
def create_content_item(access_token, payload):
|
headers = {<EOL>'<STR_LIT:Content-Type>': '<STR_LIT:application/json>',<EOL>'<STR_LIT>': '<STR_LIT>' + str(access_token)<EOL>}<EOL>request = requests.post(enrichment_url, json=payload, headers=headers)<EOL>if request.status_code == <NUM_LIT>:<EOL><INDENT>content_item = request.json()<EOL>return content_item<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: create_content_item
Parameters: access_token, payload (dict)
Return: dictionary
|
f5322:m0
|
def get_content_items(access_token, limit, page):
|
headers = {'<STR_LIT>': '<STR_LIT>' + str(access_token)}<EOL>content_items_url =construct_content_items_url(enrichment_url, limit, page)<EOL>request = requests.get(content_items_url, headers=headers)<EOL>if request.status_code == <NUM_LIT:200>:<EOL><INDENT>content_item = request.json()<EOL>return content_item<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: get_content_items
Parameters: access_token, limit (optional), page (optional)
Return: dictionary
|
f5322:m1
|
def get_content_item(access_token, content_item_id):
|
headers = {'<STR_LIT>': '<STR_LIT>' + str(access_token)}<EOL>content_item_url =construct_content_item_url(enrichment_url, content_item_id)<EOL>request = requests.get(content_item_url, headers=headers)<EOL>if request.status_code == <NUM_LIT:200>:<EOL><INDENT>content_item = request.json()<EOL>return content_item<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: get_content_item
Parameters: access_token, content_item_id
Return: dictionary
|
f5322:m2
|
def update_content_item(access_token, content_item_id, payload):
|
headers = {'<STR_LIT>': '<STR_LIT>' + str(access_token)}<EOL>content_item_url =construct_content_item_url(enrichment_url, content_item_id)<EOL>payload = create_random_payload(payload)<EOL>request = requests.put(content_item_url, json=payload, headers=headers)<EOL>if request.status_code == <NUM_LIT:200>:<EOL><INDENT>content_item = request.json()<EOL>return content_item<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: update_content_item
Parameters: access_token, content_item_id, payload (dict)
Return: dictionary
|
f5322:m3
|
def delete_content_item(access_token, content_item_id):
|
headers = {'<STR_LIT>': '<STR_LIT>' + str(access_token)}<EOL>content_item_url =construct_content_item_url(enrichment_url, content_item_id)<EOL>request = requests.delete(content_item_url, headers=headers)<EOL>return {'<STR_LIT:status>': request.status_code, '<STR_LIT:message>': request.text}<EOL>
|
Name: delete_content_item
Parameters: access_token, content_item_id
Return: dictionary
|
f5322:m4
|
def metadata(access_token, text):
|
headers = {<EOL>'<STR_LIT:Content-Type>': '<STR_LIT:application/json>',<EOL>'<STR_LIT>': '<STR_LIT>' + str(access_token)<EOL>}<EOL>payload = {'<STR_LIT:text>': text}<EOL>request = requests.post(metadata_url, json=payload, headers=headers)<EOL>if request.status_code == <NUM_LIT>:<EOL><INDENT>metadata = request.json()<EOL>return metadata<EOL><DEDENT>return {'<STR_LIT:status>': request.status_code, "<STR_LIT:message>": request.text}<EOL>
|
Name: metadata_only
Parameters: access_token, text (string)
Return: dictionary
|
f5322:m5
|
def save_model(self, request, obj, form, change):
|
if obj.bulk_pubmed_query:<EOL><INDENT>obj.publications = '<STR_LIT>'<EOL>obj.perform_bulk_pubmed_query()<EOL>form.cleaned_data['<STR_LIT>'] = form.cleaned_data['<STR_LIT>']| obj.publications.all()<EOL><DEDENT>super().save_model(request, obj, form, change)<EOL>
|
Allow Bulk PubMed Query to update publications field on admin form.
|
f5326:c4:m0
|
def save(self, *args, **kwargs):
|
if self.no_query:<EOL><INDENT>if not self.pk or self.pmid > <NUM_LIT:0>:<EOL><INDENT>try:<EOL><INDENT>pmid_min = Publication.objects.all().aggregate(<EOL>models.Min('<STR_LIT>'))['<STR_LIT>'] - <NUM_LIT:1><EOL><DEDENT>except:<EOL><INDENT>self.pmid = <NUM_LIT:0><EOL><DEDENT>else:<EOL><INDENT>self.pmid = min(<NUM_LIT:0>, pmid_min)<EOL><DEDENT><DEDENT>self.pubmed_url = '<STR_LIT>'<EOL>self.mini_citation = '<STR_LIT>'.format(<EOL>self.first_author, self.year, self.journal)<EOL><DEDENT>elif self.redo_query or not self.pk:<EOL><INDENT>if self.pmid:<EOL><INDENT>query = self.pmid<EOL><DEDENT>else:<EOL><INDENT>query = self.pubmed_url<EOL><DEDENT>email = "<STR_LIT>" <EOL>lookup = pubmed_lookup.PubMedLookup(query, email)<EOL>publication = pubmed_lookup.Publication(lookup)<EOL>self.pmid = publication.pmid<EOL>self.pubmed_url = publication.pubmed_url<EOL>self.title = strip_tags(publication.title)<EOL>self.authors = publication.authors<EOL>self.first_author = publication.first_author<EOL>self.last_author = publication.last_author<EOL>self.journal = publication.journal<EOL>self.year = publication.year<EOL>self.month = publication.month<EOL>self.day = publication.day<EOL>self.url = publication.url<EOL>self.citation = publication.cite()<EOL>self.mini_citation = publication.cite_mini()<EOL>self.abstract = strip_tags(publication.abstract)<EOL><DEDENT>self.redo_query = False<EOL>super().save(*args, **kwargs)<EOL>
|
Before saving, get publication's PubMed metadata if publication
is not already in database or if 'redo_query' is True.
|
f5334:c0:m1
|
def perform_bulk_pubmed_query(self):
|
if self.bulk_pubmed_query:<EOL><INDENT>failed_queries = []<EOL>pmid_list = re.findall(r'<STR_LIT>', self.bulk_pubmed_query)<EOL>for pmid in pmid_list:<EOL><INDENT>try:<EOL><INDENT>p, created = Publication.objects.get_or_create(pmid=pmid)<EOL><DEDENT>except:<EOL><INDENT>failed_queries.append(pmid)<EOL><DEDENT>else:<EOL><INDENT>self.publications.add(p.id)<EOL><DEDENT><DEDENT>if failed_queries:<EOL><INDENT>failed_queries.sort(key=int)<EOL>self.bulk_pubmed_query = '<STR_LIT>'.format('<STR_LIT:U+002CU+0020>'.join(failed_queries))<EOL><DEDENT>else:<EOL><INDENT>self.bulk_pubmed_query = '<STR_LIT>'<EOL><DEDENT><DEDENT>
|
If 'bulk_pubmed_query' contains any content, perform a bulk PubMed query,
add the publications to the publication set, and save.
|
f5334:c1:m0
|
def clean(self):
|
if self.bulk_pubmed_query and self.pk is None:<EOL><INDENT>raise ValidationError(<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>)<EOL><DEDENT>
|
Require creation of Publication Set before performing a Bulk PubMed Query.
|
f5334:c1:m1
|
def save(self, *args, **kwargs):
|
self.perform_bulk_pubmed_query()<EOL>super().save(*args, **kwargs)<EOL>
|
Before saving, execute 'perform_bulk_pubmed_query()'.
|
f5334:c1:m2
|
def read(*paths):
|
with open(os.path.join(*paths), '<STR_LIT:r>') as f:<EOL><INDENT>return f.read()<EOL><DEDENT>
|
Build a file path from *paths* and return the contents.
|
f5336:m0
|
def filter_slaves(selfie, slaves):
|
return [(s['<STR_LIT>'], s['<STR_LIT:port>']) for s in slaves<EOL>if not s['<STR_LIT>'] and<EOL>not s['<STR_LIT>'] and<EOL>s['<STR_LIT>'] == '<STR_LIT>']<EOL>
|
Remove slaves that are in an ODOWN or SDOWN state
also remove slaves that do not have 'ok' master-link-status
|
f5337:c0:m3
|
def get_message(zelf):
|
try:<EOL><INDENT>message = zelf.r.master.rpoplpush(zelf.lijst, zelf._processing)<EOL>if message:<EOL><INDENT>LOG.debug('<STR_LIT>' % message)<EOL>return zelf._call_handler(message)<EOL><DEDENT><DEDENT>except zelf.r.generic_error:<EOL><INDENT>LOG.exception('<STR_LIT>')<EOL><DEDENT>
|
get one message if available else return None
if message is available returns the result of handler(message)
does not block!
if you would like to call your handler manually, this is the way to
go. don't pass in a handler to Listener() and the default handler will
log and return the message for your own manual processing
|
f5337:c2:m3
|
def listen(zelf):
|
while zelf.active:<EOL><INDENT>try:<EOL><INDENT>msg = zelf.r.master.brpoplpush(zelf.lijst, zelf._processing,<EOL>zelf.read_time)<EOL>if msg:<EOL><INDENT>LOG.debug('<STR_LIT>' % msg)<EOL>zelf._call_handler(msg)<EOL><DEDENT><DEDENT>except zelf.r.generic_error:<EOL><INDENT>LOG.exception('<STR_LIT>')<EOL><DEDENT>finally:<EOL><INDENT>time.sleep(<NUM_LIT:0>)<EOL><DEDENT><DEDENT>
|
listen indefinitely, handling messages as they come
all redis specific exceptions are handled, anything your handler raises
will not be handled. setting active to False on the Listener object
will gracefully stop the listen() function
|
f5337:c2:m4
|
def stop(self):
|
if hasattr(self, '<STR_LIT>'):<EOL><INDENT>if self.process is not None:<EOL><INDENT>try:<EOL><INDENT>is_running = self.process.poll() is None<EOL><DEDENT>except AttributeError:<EOL><INDENT>is_running = False<EOL><DEDENT>if is_running:<EOL><INDENT>self.bundle_engine.logline("<STR_LIT>".format(self.service.name))<EOL>self.term_signal_sent = True<EOL>try:<EOL><INDENT>for childproc in psutil.Process(self.process.pid).children(recursive=True):<EOL><INDENT>childproc.send_signal(signal.SIGINT)<EOL><DEDENT><DEDENT>except psutil.NoSuchProcess:<EOL><INDENT>pass<EOL><DEDENT>except AttributeError:<EOL><INDENT>pass<EOL><DEDENT>try:<EOL><INDENT>self.process.send_signal(self.service.stop_signal)<EOL><DEDENT>except OSError as e:<EOL><INDENT>if e.errno == <NUM_LIT:3>: <EOL><INDENT>pass<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>self.bundle_engine.warnline("<STR_LIT>".format(self.service.name))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.bundle_engine.warnline("<STR_LIT>".format(self.service.name))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>self.bundle_engine.warnline("<STR_LIT>".format(self.service.name))<EOL><DEDENT>
|
Ask politely, first, with SIGINT and SIGQUIT.
|
f5340:c0:m4
|
def kill(self):
|
if not self.is_dead():<EOL><INDENT>self.bundle_engine.warnline("<STR_LIT>".format(self.service.name))<EOL>try:<EOL><INDENT>if hasattr(self.process, '<STR_LIT>'):<EOL><INDENT>for child in psutil.Process(self.process.pid).children(recursive=True):<EOL><INDENT>os.kill(child.pid, signal.SIGKILL)<EOL><DEDENT>self.process.kill()<EOL><DEDENT><DEDENT>except psutil.NoSuchProcess:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>
|
Murder the children of this service in front of it, and then murder the service itself.
|
f5340:c0:m6
|
def _signal_callback(self, handle, signum):
|
if signum == signal.SIGINT:<EOL><INDENT>self._close_handles()<EOL><DEDENT>
|
Shutdown tail if ctrl-C is received.
|
f5341:c0:m2
|
def cat(self, numlines=None):
|
if len(self.titles) == <NUM_LIT:1>:<EOL><INDENT>lines = self.lines()<EOL>if numlines is not None:<EOL><INDENT>lines = lines[len(lines)-numlines:]<EOL><DEDENT>log("<STR_LIT:\n>".join(lines))<EOL><DEDENT>else:<EOL><INDENT>lines = [self._printtuple(line[<NUM_LIT:0>], line[<NUM_LIT:1>]) for line in self.lines()]<EOL>if numlines is not None:<EOL><INDENT>lines = lines[len(lines)-numlines:]<EOL><DEDENT>log("<STR_LIT>".join(lines))<EOL><DEDENT>
|
Return a list of lines output by this service.
|
f5341:c0:m12
|
def _match_service(self, line_with_color):
|
line = re.compile("<STR_LIT>").sub("<STR_LIT>", line_with_color) <EOL>regexp = re.compile(r"<STR_LIT>")<EOL>if regexp.match(line):<EOL><INDENT>title = regexp.match(line).group(<NUM_LIT:1>).strip()<EOL>if title in self.titles:<EOL><INDENT>return (title, regexp.match(line).group(<NUM_LIT:2>))<EOL><DEDENT><DEDENT>return None<EOL>
|
Return line if line matches this service's name, return None otherwise.
|
f5341:c1:m1
|
@property<EOL><INDENT>def tail(self):<DEDENT>
|
return Tail(self)<EOL>
|
Get a Tail object for these logs.
|
f5341:c1:m2
|
def lines(self):
|
lines = []<EOL>with open(self._logfilename, "<STR_LIT:r>") as log_handle:<EOL><INDENT>for line in log_handle:<EOL><INDENT>matching_line = self._match_service(line)<EOL>if matching_line is not None:<EOL><INDENT>lines.append(matching_line)<EOL><DEDENT><DEDENT><DEDENT>return lines<EOL>
|
Return a list of lines output by this service.
|
f5341:c1:m3
|
def __repr__(self):
|
return "<STR_LIT:\n>".join(["<STR_LIT>".format(title.rjust(self.max_length_of_titles), line) for title, line in self.lines()])<EOL>
|
Generate a string representation.
|
f5341:c1:m4
|
def json(self):
|
lines = []<EOL>for line in self.lines():<EOL><INDENT>try:<EOL><INDENT>if len(line) == <NUM_LIT:1>:<EOL><INDENT>lines.append(json.loads(line, strict=False))<EOL><DEDENT>else:<EOL><INDENT>lines.append(json.loads(line[<NUM_LIT:1>], strict=False))<EOL><DEDENT><DEDENT>except ValueError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>return lines<EOL>
|
Return a list of JSON objects output by this service.
|
f5341:c1:m6
|
def log(self, message):
|
log(message)<EOL>
|
Print a normal priority message.
|
f5342:c0:m9
|
def warn(self, message):
|
warn(message)<EOL>
|
Print a higher priority message.
|
f5342:c0:m10
|
def redirect_stdout(self):
|
self.hijacked_stdout = sys.stdout<EOL>self.hijacked_stderr = sys.stderr<EOL>sys.stdout = open(self.hitch_dir.driverout(), "<STR_LIT>", <NUM_LIT:0>)<EOL>sys.stderr = open(self.hitch_dir.drivererr(), "<STR_LIT>", <NUM_LIT:0>)<EOL>
|
Redirect stdout to file so that it can be tailed and aggregated with the other logs.
|
f5342:c0:m14
|
def unredirect_stdout(self):
|
if hasattr(self, '<STR_LIT>') and hasattr(self, '<STR_LIT>'):<EOL><INDENT>sys.stdout = self.hijacked_stdout<EOL>sys.stderr = self.hijacked_stderr<EOL><DEDENT>
|
Redirect stdout and stderr back to screen.
|
f5342:c0:m15
|
def time_travel(self, datetime=None, timedelta=None, seconds=<NUM_LIT:0>, minutes=<NUM_LIT:0>, hours=<NUM_LIT:0>, days=<NUM_LIT:0>):
|
if datetime is not None:<EOL><INDENT>self.timedelta = datetime - python_datetime.now()<EOL><DEDENT>if timedelta is not None:<EOL><INDENT>self.timedelta = self.timedelta + timedelta<EOL><DEDENT>self.timedelta = self.timedelta + python_timedelta(seconds=seconds)<EOL>self.timedelta = self.timedelta + python_timedelta(minutes=minutes)<EOL>self.timedelta = self.timedelta + python_timedelta(hours=hours)<EOL>self.timedelta = self.timedelta + python_timedelta(days=days)<EOL>log("<STR_LIT>".format(humanize.naturaltime(self.now())))<EOL>faketime.change_time(self.hitch_dir.faketime(), self.now())<EOL>
|
Mock moving forward or backward in time by shifting the system clock fed to the services tested.
Note that all of these arguments can be used together, individually or not at all. The time
traveled to will be the sum of all specified time deltas from datetime. If no datetime is specified,
the deltas will be added to the current time.
Args:
datetime (Optional[datetime]): Time travel to specific datetime.
timedelta (Optional[timedelta]): Time travel to 'timedelta' from now.
seconds (Optional[number]): Time travel 'seconds' seconds from now.
minutes (Optional[number]): Time travel 'minutes' minutes from now.
hours (Optional[number]): Time travel 'hours' hours from now.
days (Optional[number]): Time travel 'days' days from now.
|
f5342:c0:m18
|
def now(self):
|
return python_datetime.now() + self.timedelta<EOL>
|
Get a current (mocked) datetime. This will be the current datetime unless you have time traveled.
|
f5342:c0:m19
|
def wait_for_ipykernel(self, service_name, timeout=<NUM_LIT:10>):
|
kernel_line = self._services[service_name].logs.tail.until(<EOL>lambda line: "<STR_LIT>" in line[<NUM_LIT:1>], timeout=<NUM_LIT:10>, lines_back=<NUM_LIT:5><EOL>)<EOL>return kernel_line.replace("<STR_LIT>", "<STR_LIT>").strip()<EOL>
|
Wait for an IPython kernel-nnnn.json filename message to appear in log.
|
f5342:c0:m21
|
def connect_to_ipykernel(self, service_name, timeout=<NUM_LIT:10>):
|
kernel_json_file = self.wait_for_ipykernel(service_name, timeout=<NUM_LIT:10>)<EOL>self.start_interactive_mode()<EOL>subprocess.check_call([<EOL>sys.executable, "<STR_LIT>", "<STR_LIT>", "<STR_LIT>", "<STR_LIT>", kernel_json_file<EOL>])<EOL>self.stop_interactive_mode()<EOL>
|
Connect to an IPython kernel as soon as its message is logged.
|
f5342:c0:m22
|
def longest_service_name(self):
|
return max([len(service_handle.service.name) for service_handle in self.service_handles] + [<NUM_LIT:0>])<EOL>
|
Length of the longest service name.
|
f5344:c0:m8
|
def signal_cb(self, handle, signum):
|
SIGNALS_TO_NAMES_DICT = dict((getattr(signal, n), n)for n in dir(signal) if n.startswith('<STR_LIT>') and '<STR_LIT:_>' not in n )<EOL>self.logline("<STR_LIT>".format(SIGNALS_TO_NAMES_DICT.get(signum)))<EOL>if signum in (signal.SIGTERM, signal.SIGHUP, signal.SIGQUIT, signal.SIGINT):<EOL><INDENT>self.stop()<EOL><DEDENT>
|
Handle ctrl-C if not in ipython shell. Always shutdown on SIGTERM.
|
f5346:c0:m1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.