_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q17500
|
ModelBase.param_alter
|
train
|
def param_alter(self,
param,
default=None,
unit=None,
descr=None,
tomatrix=None,
nonzero=None,
mandatory=None,
power=None,
voltage=None,
current=None,
z=None,
y=None,
r=None,
g=None,
dccurrent=None,
dcvoltage=None,
time=None,
**kwargs):
"""
Set attribute of an existing parameter.
To be used to alter an attribute inherited from parent models.
See .. self.param_define for argument descriptions.
"""
assert param in self._data, \
'parameter <{}> does not exist in {}'.format(param, self._name)
def alter_attr(p, attr, value):
"""Set self.__dict__[attr] for param based on value
"""
if value is None:
return
elif (value is True) and (p not in self.__dict__[attr]):
self.__dict__[attr].append(p)
elif (value is False) and (p in self.__dict__[attr]):
self.__dict__[attr].remove(p)
else:
self.log('No need to alter {} for {}'.format(attr, p))
if default is not None:
self._data.update({param: default})
if unit is not None:
self._units.update({param: unit})
if descr is not None:
self._descr.update({param: descr})
alter_attr(param, '_params', tomatrix)
alter_attr(param, '_zeros', nonzero)
alter_attr(param, '_mandatory', mandatory)
alter_attr(param, '_powers', power)
alter_attr(param, '_voltages', voltage)
alter_attr(param, '_currents', current)
alter_attr(param, '_z', z)
alter_attr(param, '_y', y)
alter_attr(param, '_r', r)
alter_attr(param, '_g', g)
alter_attr(param, '_dcvoltages', dcvoltage)
alter_attr(param, '_dccurrents', dccurrent)
alter_attr(param, '_times', time)
|
python
|
{
"resource": ""
}
|
q17501
|
ModelBase.eq_add
|
train
|
def eq_add(self, expr, var, intf=False):
"""
Add an equation to this model.
An equation is associated with the addresses of a variable. The
number of equations must equal that of variables.
Stored to ``self._equations`` is a tuple of ``(expr, var, intf,
ty)`` where ``ty`` is in ('f', 'g')
:param str expr: equation expression
:param str var: variable name to be associated with
:param bool intf: if this equation is added to an interface equation,
namely, an equation outside this model
:return: None
"""
# determine the type of the equation, ('f', 'g') based on var
# We assume that all differential equations are only modifiable by
# the model itself
# Only interface to algebraic varbailes of external models
ty = ''
if var in self._algebs:
ty = 'g'
elif var in self._states:
ty = 'f'
else:
for _, item in self._ac:
if var in item:
ty = 'g'
if intf is False:
intf = True
for _, item in self._dc:
if var == item:
ty = 'g'
if intf is False:
intf = True
if ty == '':
self.log(
'Equation associated with interface variable {var} '
'assumed as algeb'.
format(var=var),
DEBUG)
ty = 'g'
self._equations.append((expr, var, intf, ty))
|
python
|
{
"resource": ""
}
|
q17502
|
ModelBase.read_data_ext
|
train
|
def read_data_ext(self, model: str, field: str, idx=None, astype=None):
"""
Return a field of a model or group at the given indices
:param str model: name of the group or model to retrieve
:param str field: name of the field
:param list, int, float str idx: idx of elements to access
:param type astype: type cast
:return:
"""
ret = list()
if model in self.system.devman.devices:
ret = self.system.__dict__[model].get_field(field, idx)
elif model in self.system.devman.group.keys():
# ===============================================================
# Since ``self.system.devman.group`` is an unordered dictionary,
# ``idx`` must be given to retrieve ``field`` across models.
#
# Returns a matrix by default
# ===============================================================
assert idx is not None, \
'idx must be specified when accessing group fields'
astype = matrix
for item in idx:
dev_name = self.system.devman.group[model].get(item, None)
ret.append(self.read_data_ext(dev_name, field, idx=item))
else:
raise NameError(
'Model or Group <{0}> does not exist.'.format(model))
if (ret is None) or isinstance(ret, (int, float, str)):
return ret
elif astype is None:
return ret
else:
return astype(ret)
|
python
|
{
"resource": ""
}
|
q17503
|
ModelBase.copy_data_ext
|
train
|
def copy_data_ext(self, model, field, dest=None, idx=None, astype=None):
"""
Retrieve the field of another model and store it as a field.
:param model: name of the source model being a model name or a group name
:param field: name of the field to retrieve
:param dest: name of the destination field in ``self``
:param idx: idx of elements to access
:param astype: type cast
:type model: str
:type field: str
:type dest: str
:type idx: list, matrix
:type astype: None, list, matrix
:return: None
"""
# use default destination
if not dest:
dest = field
assert dest not in self._states + self._algebs
self.__dict__[dest] = self.read_data_ext(
model, field, idx, astype=astype)
if idx is not None:
if len(idx) == self.n:
self.link_to(model, idx, self.idx)
|
python
|
{
"resource": ""
}
|
q17504
|
ModelBase.elem_add
|
train
|
def elem_add(self, idx=None, name=None, **kwargs):
"""
Add an element of this model
:param idx: element idx
:param name: element name
:param kwargs: keyword arguments of the parameters
:return: allocated idx
"""
idx = self.system.devman.register_element(dev_name=self._name, idx=idx)
self.system.__dict__[self._group].register_element(self._name, idx)
self.uid[idx] = self.n
self.idx.append(idx)
self.mdl_to.append(list())
self.mdl_from.append(list())
# self.n += 1
if name is None:
self.name.append(self._name + ' ' + str(self.n))
else:
self.name.append(name)
# check mandatory parameters
for key in self._mandatory:
if key not in kwargs.keys():
self.log(
'Mandatory parameter <{:s}.{:s}> missing'.format(
self.name[-1], key), ERROR)
sys.exit(1)
# set default values
for key, value in self._data.items():
self.__dict__[key].append(value)
# overwrite custom values
for key, value in kwargs.items():
if key not in self._data:
self.log(
'Parameter <{:s}.{:s}> is not used.'.format(
self.name[-1], key), WARNING)
continue
self.__dict__[key][-1] = value
# check data consistency
if not value and key in self._zeros:
if key == 'Sn':
default = self.system.mva
elif key == 'fn':
default = self.system.config.freq
else:
default = self._data[key]
self.__dict__[key][-1] = default
self.log(
'Using default value for <{:s}.{:s}>'.format(
self.name[-1], key), WARNING)
return idx
|
python
|
{
"resource": ""
}
|
q17505
|
ModelBase.elem_remove
|
train
|
def elem_remove(self, idx=None):
"""
Remove elements labeled by idx from this model instance.
:param list,matrix idx: indices of elements to be removed
:return: None
"""
if idx is not None:
if idx in self.uid:
key = idx
item = self.uid[idx]
else:
self.log('The item <{:s}> does not exist.'.format(idx), ERROR)
return None
else:
return None
convert = False
if isinstance(self.__dict__[self._params[0]], matrix):
self._param_to_list()
convert = True
# self.n -= 1
self.uid.pop(key, '')
self.idx.pop(item)
self.mdl_from.pop(item)
self.mdl_to.pop(item)
for x, y in self.uid.items():
if y > item:
self.uid[x] = y - 1
for param in self._data:
self.__dict__[param].pop(item)
for param in self._service:
if len(self.__dict__[param]) == (self.n + 1):
if isinstance(self.__dict__[param], list):
self.__dict__[param].pop(item)
elif isinstance(self.__dict__[param], matrix):
service = list(self.__dict__[param])
service.pop(item)
self.__dict__[param] = matrix(service)
for x in self._states:
if len(self.__dict__[x]):
self.__dict__[x].pop(item)
for y in self._algebs:
if self.__dict__[y]:
self.__dict__[y].pop(item)
for key, param in self._ac.items():
if isinstance(param, list):
for subparam in param:
if len(self.__dict__[subparam]):
self.__dict__[subparam].pop(item)
else:
self.__dict__[param].pop(item)
for key, param in self._dc.items():
self.__dict__[param].pop(item)
self.name.pop(item)
if convert and self.n:
self._param_to_matrix()
|
python
|
{
"resource": ""
}
|
q17506
|
ModelBase.data_to_elem_base
|
train
|
def data_to_elem_base(self):
"""
Convert parameter data to element base
Returns
-------
None
"""
if self._flags['sysbase'] is False:
return
for key, val in self._store.items():
self.__dict__[key] = val
self._flags['sysbase'] = False
|
python
|
{
"resource": ""
}
|
q17507
|
ModelBase._intf_network
|
train
|
def _intf_network(self):
"""
Retrieve the ac and dc network interface variable indices.
:Example:
``self._ac = {'bus1': (a1, v1)}`` gives
- indices: self.bus1
- system.Bus.a -> self.a1
- system.Bus.v -> self.v1
``self._dc = {'node1': v1}`` gives
- indices: self.node1
- system.Node.v-> self.v1
:return: None
"""
for key, val in self._ac.items():
self.copy_data_ext(
model='Bus', field='a', dest=val[0], idx=self.__dict__[key])
self.copy_data_ext(
model='Bus', field='v', dest=val[1], idx=self.__dict__[key])
for key, val in self._dc.items():
self.copy_data_ext(
model='Node', field='v', dest=val, idx=self.__dict__[key])
# check for interface voltage differences
self._check_Vn()
|
python
|
{
"resource": ""
}
|
q17508
|
ModelBase._intf_ctrl
|
train
|
def _intf_ctrl(self):
"""
Retrieve variable indices of controlled models.
Control interfaces are specified in ``self._ctrl``.
Each ``key:value`` pair has ``key`` being the variable names
for the reference idx and ``value`` being a tuple of
``(model name, field to read, destination field, return type)``.
:Example:
``self._ctrl = {'syn', ('Synchronous', 'omega', 'w', list)}``
- indices: self.syn
- self.w = list(system.Synchronous.omega)
:return: None
"""
for key, val in self._ctrl.items():
model, field, dest, astype = val
self.copy_data_ext(
model, field, dest=dest, idx=self.__dict__[key], astype=astype)
|
python
|
{
"resource": ""
}
|
q17509
|
ModelBase._addr
|
train
|
def _addr(self):
"""
Assign dae addresses for algebraic and state variables.
Addresses are stored in ``self.__dict__[var]``.
``dae.m`` and ``dae.n`` are updated accordingly.
Returns
-------
None
"""
group_by = self._config['address_group_by']
assert not self._flags['address'], "{} address already assigned".format(self._name)
assert group_by in ('element', 'variable')
m0 = self.system.dae.m
n0 = self.system.dae.n
mend = m0 + len(self._algebs) * self.n
nend = n0 + len(self._states) * self.n
if group_by == 'variable':
for idx, item in enumerate(self._algebs):
self.__dict__[item] = list(
range(m0 + idx * self.n, m0 + (idx + 1) * self.n))
for idx, item in enumerate(self._states):
self.__dict__[item] = list(
range(n0 + idx * self.n, n0 + (idx + 1) * self.n))
elif group_by == 'element':
for idx, item in enumerate(self._algebs):
self.__dict__[item] = list(
range(m0 + idx, mend, len(self._algebs)))
for idx, item in enumerate(self._states):
self.__dict__[item] = list(
range(n0 + idx, nend, len(self._states)))
self.system.dae.m = mend
self.system.dae.n = nend
self._flags['address'] = True
|
python
|
{
"resource": ""
}
|
q17510
|
ModelBase._varname
|
train
|
def _varname(self):
"""
Set up variable names in ``self.system.varname``.
Variable names follows the convention ``VariableName,Model Name``.
A maximum of 24 characters are allowed for each variable.
:return: None
"""
if not self._flags['address']:
self.log('Unable to assign Varname before allocating address',
ERROR)
return
varname = self.system.varname
for i in range(self.n):
iname = str(self.name[i])
for e, var in enumerate(self._states):
unamex = self._unamex[e]
fnamex = self._fnamex[e]
idx = self.__dict__[var][i]
varname.unamex[idx] = '{} {}'.format(unamex, iname)[:33]
varname.fnamex[idx] = '$' + '{}\\ {}'.format(
fnamex, iname.replace(' ', '\\ '))[:33] + '$'
for e, var in enumerate(self._algebs):
unamey = self._unamey[e]
fnamey = self._fnamey[e]
idx = self.__dict__[var][i]
varname.unamey[idx] = '{} {}'.format(unamey, iname)[:33]
varname.fnamey[idx] = '$' + '{}\\ {}'.format(
fnamey, iname.replace(' ', '\\ '))[:33] + '$'
|
python
|
{
"resource": ""
}
|
q17511
|
ModelBase._param_to_matrix
|
train
|
def _param_to_matrix(self):
"""
Convert parameters defined in `self._params` to `cvxopt.matrix`
:return None
"""
for item in self._params:
self.__dict__[item] = matrix(self.__dict__[item], tc='d')
|
python
|
{
"resource": ""
}
|
q17512
|
ModelBase._param_to_list
|
train
|
def _param_to_list(self):
"""
Convert parameters defined in `self._param` to list
:return None
"""
for item in self._params:
self.__dict__[item] = list(self.__dict__[item])
|
python
|
{
"resource": ""
}
|
q17513
|
ModelBase.log
|
train
|
def log(self, msg, level=INFO):
"""Record a line of log in logger
:param str msg: content of the messag
:param level: logging level
:return: None
"""
logger.log(level, '<{}> - '.format(self._name) + msg)
|
python
|
{
"resource": ""
}
|
q17514
|
ModelBase.init_limit
|
train
|
def init_limit(self, key, lower=None, upper=None, limit=False):
""" check if data is within limits. reset if violates"""
above = agtb(self.__dict__[key], upper)
for idx, item in enumerate(above):
if item == 0.:
continue
maxval = upper[idx]
self.log(
'{0} <{1}.{2}> above its maximum of {3}.'.format(
self.name[idx], self._name, key, maxval), ERROR)
if limit:
self.__dict__[key][idx] = maxval
below = altb(self.__dict__[key], lower)
for idx, item in enumerate(below):
if item == 0.:
continue
minval = lower[idx]
self.log(
'{0} <{1}.{2}> below its minimum of {3}.'.format(
self.name[idx], self._name, key, minval), ERROR)
if limit:
self.__dict__[key][idx] = minval
|
python
|
{
"resource": ""
}
|
q17515
|
ModelBase.doc
|
train
|
def doc(self, export='plain'):
"""
Build help document into a Texttable table
:param ('plain', 'latex') export: export format
:param save: save to file ``help_model.extension`` or not
:param writemode: file write mode
:return: None
"""
title = '<{}.{}>'.format(self._group, self._name)
table = Tab(export=export, title=title, descr=self.__doc__)
rows = []
keys = sorted(self._data.keys())
for key in keys:
val = self._data[key]
suf = ''
if key in self._mandatory:
suf = ' *'
elif key in self._powers + \
self._voltages + \
self._currents + \
self._z + \
self._y + \
self._dccurrents + \
self._dcvoltages + \
self._r + \
self._g + \
self._times:
suf = ' #'
c1 = key + suf
c2 = self._descr.get(key, '')
c3 = val
c4 = self._units.get(key, '-')
rows.append([c1, c2, c3, c4])
table.add_rows(rows, header=False)
table.header(['Parameter', 'Description', 'Default', 'Unit'])
if export == 'plain':
pass
elif export == 'latex':
raise NotImplementedError('LaTex output not implemented')
return table.draw()
|
python
|
{
"resource": ""
}
|
q17516
|
ModelBase.check_limit
|
train
|
def check_limit(self, varname, vmin=None, vmax=None):
"""
Check if the variable values are within the limits.
Return False if fails.
"""
retval = True
assert varname in self.__dict__
if varname in self._algebs:
val = self.system.dae.y[self.__dict__[varname]]
elif varname in self._states:
val = self.system.dae.x[self.__dict__[varname]]
else: # service or temporary variable
val = matrix(self.__dict__[varname])
vmin = matrix(self.__dict__[vmin])
comp = altb(val, vmin)
comp = mul(self.u, comp)
for c, n, idx in zip(comp, self.name, range(self.n)):
if c == 1:
v = val[idx]
vm = vmin[idx]
self.log(
'Init of <{}.{}>={:.4g} is lower than min={:6.4g}'.format(
n, varname, v, vm), ERROR)
retval = False
vmax = matrix(self.__dict__[vmax])
comp = agtb(val, vmax)
comp = mul(self.u, comp)
for c, n, idx in zip(comp, self.name, range(self.n)):
if c == 1:
v = val[idx]
vm = vmax[idx]
self.log(
'Init of <{}.{}>={:.4g} is higher than max={:.4g}'.format(
n, varname, v, vm), ERROR)
retval = False
return retval
|
python
|
{
"resource": ""
}
|
q17517
|
ModelBase.on_bus
|
train
|
def on_bus(self, bus_idx):
"""
Return the indices of elements on the given buses for shunt-connected
elements
:param bus_idx: idx of the buses to which the elements are connected
:return: idx of elements connected to bus_idx
"""
assert hasattr(self, 'bus')
ret = []
if isinstance(bus_idx, (int, float, str)):
bus_idx = [bus_idx]
for item in bus_idx:
idx = []
for e, b in enumerate(self.bus):
if b == item:
idx.append(self.idx[e])
if len(idx) == 1:
idx = idx[0]
elif len(idx) == 0:
idx = None
ret.append(idx)
if len(ret) == 1:
ret = ret[0]
return ret
|
python
|
{
"resource": ""
}
|
q17518
|
ModelBase.link_bus
|
train
|
def link_bus(self, bus_idx):
"""
Return the indices of elements linking the given buses
:param bus_idx:
:return:
"""
ret = []
if not self._config['is_series']:
self.log(
'link_bus function is not valid for non-series model <{}>'.
format(self.name))
return []
if isinstance(bus_idx, (int, float, str)):
bus_idx = [bus_idx]
fkey = list(self._ac.keys())
if 'bus' in fkey:
fkey.remove('bus')
nfkey = len(fkey)
fkey_val = [self.__dict__[i] for i in fkey]
for item in bus_idx:
idx = []
key = []
for i in range(self.n):
for j in range(nfkey):
if fkey_val[j][i] == item:
idx.append(self.idx[i])
key.append(fkey[j])
# <= 1 terminal should connect to the same bus
break
if len(idx) == 0:
idx = None
if len(key) == 0:
key = None
ret.append((idx, key))
return ret
|
python
|
{
"resource": ""
}
|
q17519
|
ModelBase.elem_find
|
train
|
def elem_find(self, field, value):
"""
Return the indices of elements whose field first satisfies the given values
``value`` should be unique in self.field.
This function does not check the uniqueness.
:param field: name of the supplied field
:param value: value of field of the elemtn to find
:return: idx of the elements
:rtype: list, int, float, str
"""
if isinstance(value, (int, float, str)):
value = [value]
f = list(self.__dict__[field])
uid = np.vectorize(f.index)(value)
return self.get_idx(uid)
|
python
|
{
"resource": ""
}
|
q17520
|
ModelBase._check_Vn
|
train
|
def _check_Vn(self):
"""Check data consistency of Vn and Vdcn if connected to Bus or Node
:return None
"""
if hasattr(self, 'bus') and hasattr(self, 'Vn'):
bus_Vn = self.read_data_ext('Bus', field='Vn', idx=self.bus)
for name, bus, Vn, Vn0 in zip(self.name, self.bus, self.Vn,
bus_Vn):
if Vn != Vn0:
self.log(
'<{}> has Vn={} different from bus <{}> Vn={}.'.format(
name, Vn, bus, Vn0), WARNING)
if hasattr(self, 'node') and hasattr(self, 'Vdcn'):
node_Vdcn = self.read_data_ext('Node', field='Vdcn', idx=self.node)
for name, node, Vdcn, Vdcn0 in zip(self.name, self.node, self.Vdcn,
node_Vdcn):
if Vdcn != Vdcn0:
self.log(
'<{}> has Vdcn={} different from node <{}> Vdcn={}.'
.format(name, Vdcn, node, Vdcn0), WARNING)
|
python
|
{
"resource": ""
}
|
q17521
|
PFLOW.reset
|
train
|
def reset(self):
"""
Reset all internal storage to initial status
Returns
-------
None
"""
self.solved = False
self.niter = 0
self.iter_mis = []
self.F = None
self.system.dae.factorize = True
|
python
|
{
"resource": ""
}
|
q17522
|
PFLOW.pre
|
train
|
def pre(self):
"""
Initialize system for power flow study
Returns
-------
None
"""
logger.info('-> Power flow study: {} method, {} start'.format(
self.config.method.upper(), 'flat' if self.config.flatstart else 'non-flat')
)
t, s = elapsed()
system = self.system
dae = self.system.dae
system.dae.init_xy()
for device, pflow, init0 in zip(system.devman.devices,
system.call.pflow, system.call.init0):
if pflow and init0:
system.__dict__[device].init0(dae)
# check for islands
system.check_islands(show_info=True)
t, s = elapsed(t)
logger.debug('Power flow initialized in {:s}.'.format(s))
|
python
|
{
"resource": ""
}
|
q17523
|
PFLOW.run
|
train
|
def run(self, **kwargs):
"""
call the power flow solution routine
Returns
-------
bool
True for success, False for fail
"""
ret = None
# initialization Y matrix and inital guess
self.pre()
t, _ = elapsed()
# call solution methods
if self.config.method == 'NR':
ret = self.newton()
elif self.config.method == 'DCPF':
ret = self.dcpf()
elif self.config.method in ('FDPF', 'FDBX', 'FDXB'):
ret = self.fdpf()
self.post()
_, s = elapsed(t)
if self.solved:
logger.info(' Solution converged in {} in {} iterations'.format(s, self.niter))
else:
logger.warning(' Solution failed in {} in {} iterations'.format(s,
self.niter))
return ret
|
python
|
{
"resource": ""
}
|
q17524
|
PFLOW.newton
|
train
|
def newton(self):
"""
Newton power flow routine
Returns
-------
(bool, int)
success flag, number of iterations
"""
dae = self.system.dae
while True:
inc = self.calc_inc()
dae.x += inc[:dae.n]
dae.y += inc[dae.n:dae.n + dae.m]
self.niter += 1
max_mis = max(abs(inc))
self.iter_mis.append(max_mis)
self._iter_info(self.niter)
if max_mis < self.config.tol:
self.solved = True
break
elif self.niter > 5 and max_mis > 1000 * self.iter_mis[0]:
logger.warning('Blown up in {0} iterations.'.format(self.niter))
break
if self.niter > self.config.maxit:
logger.warning('Reached maximum number of iterations.')
break
return self.solved, self.niter
|
python
|
{
"resource": ""
}
|
q17525
|
PFLOW.dcpf
|
train
|
def dcpf(self):
"""
Calculate linearized power flow
Returns
-------
(bool, int)
success flag, number of iterations
"""
dae = self.system.dae
self.system.Bus.init0(dae)
self.system.dae.init_g()
Va0 = self.system.Bus.angle
for model, pflow, gcall in zip(self.system.devman.devices, self.system.call.pflow, self.system.call.gcall):
if pflow and gcall:
self.system.__dict__[model].gcall(dae)
sw = self.system.SW.a
sw.sort(reverse=True)
no_sw = self.system.Bus.a[:]
no_swv = self.system.Bus.v[:]
for item in sw:
no_sw.pop(item)
no_swv.pop(item)
Bp = self.system.Line.Bp[no_sw, no_sw]
p = matrix(self.system.dae.g[no_sw], (no_sw.__len__(), 1))
p = p-self.system.Line.Bp[no_sw, sw]*Va0[sw]
Sp = self.solver.symbolic(Bp)
N = self.solver.numeric(Bp, Sp)
self.solver.solve(Bp, Sp, N, p)
self.system.dae.y[no_sw] = p
self.solved = True
self.niter = 1
return self.solved, self.niter
|
python
|
{
"resource": ""
}
|
q17526
|
PFLOW._iter_info
|
train
|
def _iter_info(self, niter, level=logging.INFO):
"""
Log iteration number and mismatch
Parameters
----------
level
logging level
Returns
-------
None
"""
max_mis = self.iter_mis[niter - 1]
msg = ' Iter {:<d}. max mismatch = {:8.7f}'.format(niter, max_mis)
logger.info(msg)
|
python
|
{
"resource": ""
}
|
q17527
|
PFLOW.calc_inc
|
train
|
def calc_inc(self):
"""
Calculate the Newton incrementals for each step
Returns
-------
matrix
The solution to ``x = -A\\b``
"""
system = self.system
self.newton_call()
A = sparse([[system.dae.Fx, system.dae.Gx],
[system.dae.Fy, system.dae.Gy]])
inc = matrix([system.dae.f, system.dae.g])
if system.dae.factorize:
self.F = self.solver.symbolic(A)
system.dae.factorize = False
try:
N = self.solver.numeric(A, self.F)
self.solver.solve(A, self.F, N, inc)
except ValueError:
logger.warning('Unexpected symbolic factorization.')
system.dae.factorize = True
except ArithmeticError:
logger.warning('Jacobian matrix is singular.')
system.dae.check_diag(system.dae.Gy, 'unamey')
return -inc
|
python
|
{
"resource": ""
}
|
q17528
|
PFLOW.newton_call
|
train
|
def newton_call(self):
"""
Function calls for Newton power flow
Returns
-------
None
"""
# system = self.system
# exec(system.call.newton)
system = self.system
dae = self.system.dae
system.dae.init_fg()
system.dae.reset_small_g()
# evaluate algebraic equation mismatches
for model, pflow, gcall in zip(system.devman.devices,
system.call.pflow, system.call.gcall):
if pflow and gcall:
system.__dict__[model].gcall(dae)
# eval differential equations
for model, pflow, fcall in zip(system.devman.devices,
system.call.pflow, system.call.fcall):
if pflow and fcall:
system.__dict__[model].fcall(dae)
# reset islanded buses mismatches
system.Bus.gisland(dae)
if system.dae.factorize:
system.dae.init_jac0()
# evaluate constant Jacobian elements
for model, pflow, jac0 in zip(system.devman.devices,
system.call.pflow, system.call.jac0):
if pflow and jac0:
system.__dict__[model].jac0(dae)
dae.temp_to_spmatrix('jac0')
dae.setup_FxGy()
# evaluate Gy
for model, pflow, gycall in zip(system.devman.devices,
system.call.pflow, system.call.gycall):
if pflow and gycall:
system.__dict__[model].gycall(dae)
# evaluate Fx
for model, pflow, fxcall in zip(system.devman.devices,
system.call.pflow, system.call.fxcall):
if pflow and fxcall:
system.__dict__[model].fxcall(dae)
# reset islanded buses Jacobians
system.Bus.gyisland(dae)
dae.temp_to_spmatrix('jac')
|
python
|
{
"resource": ""
}
|
q17529
|
PFLOW.post
|
train
|
def post(self):
"""
Post processing for solved systems.
Store load, generation data on buses.
Store reactive power generation on PVs and slack generators.
Calculate series flows and area flows.
Returns
-------
None
"""
if not self.solved:
return
system = self.system
exec(system.call.pfload)
system.Bus.Pl = system.dae.g[system.Bus.a]
system.Bus.Ql = system.dae.g[system.Bus.v]
exec(system.call.pfgen)
system.Bus.Pg = system.dae.g[system.Bus.a]
system.Bus.Qg = system.dae.g[system.Bus.v]
if system.PV.n:
system.PV.qg = system.dae.y[system.PV.q]
if system.SW.n:
system.SW.pg = system.dae.y[system.SW.p]
system.SW.qg = system.dae.y[system.SW.q]
exec(system.call.seriesflow)
system.Area.seriesflow(system.dae)
|
python
|
{
"resource": ""
}
|
q17530
|
PV.init0
|
train
|
def init0(self, dae):
"""
Set initial voltage and reactive power for PQ.
Overwrites Bus.voltage values
"""
dae.y[self.v] = self.v0
dae.y[self.q] = mul(self.u, self.qg)
|
python
|
{
"resource": ""
}
|
q17531
|
PV.disable_gen
|
train
|
def disable_gen(self, idx):
"""
Disable a PV element for TDS
Parameters
----------
idx
Returns
-------
"""
self.u[self.uid[idx]] = 0
self.system.dae.factorize = True
|
python
|
{
"resource": ""
}
|
q17532
|
PQ.init0
|
train
|
def init0(self, dae):
"""Set initial p and q for power flow"""
self.p0 = matrix(self.p, (self.n, 1), 'd')
self.q0 = matrix(self.q, (self.n, 1), 'd')
|
python
|
{
"resource": ""
}
|
q17533
|
PQ.init1
|
train
|
def init1(self, dae):
"""Set initial voltage for time domain simulation"""
self.v0 = matrix(dae.y[self.v])
|
python
|
{
"resource": ""
}
|
q17534
|
Monitor.update_status
|
train
|
def update_status(self, header, message):
"""Process incoming status message. Acquire lock for status dictionary before updating."""
with self._lock:
if self.message_box:
self.message_box.erase()
self.message_box.move(0, 0)
for n, field in enumerate(header):
if n == 0:
self.message_box.addstr(field + ":", curses.color_pair(1))
else:
self.message_box.addstr(
", " + field + ":", curses.color_pair(1)
)
self.message_box.addstr(header[field])
self.message_box.addstr(": ", curses.color_pair(1))
self.message_box.addstr(
str(message), curses.color_pair(2) + curses.A_BOLD
)
self.message_box.refresh()
if (
message["host"] not in self._node_status
or int(header["timestamp"])
>= self._node_status[message["host"]]["last_seen"]
):
self._node_status[message["host"]] = message
self._node_status[message["host"]]["last_seen"] = int(
header["timestamp"]
)
|
python
|
{
"resource": ""
}
|
q17535
|
Monitor._redraw_screen
|
train
|
def _redraw_screen(self, stdscr):
"""Redraw screen. This could be to initialize, or to redraw after resizing."""
with self._lock:
stdscr.clear()
stdscr.addstr(
0, 0, "workflows service monitor -- quit with Ctrl+C", curses.A_BOLD
)
stdscr.refresh()
self.message_box = self._boxwin(
5, curses.COLS, 2, 0, title="last seen message", color_pair=1
)
self.message_box.scrollok(True)
self.cards = []
|
python
|
{
"resource": ""
}
|
q17536
|
Monitor._erase_card
|
train
|
def _erase_card(self, number):
"""Destroy cards with this or higher number."""
with self._lock:
if number < (len(self.cards) - 1):
self._erase_card(number + 1)
if number > (len(self.cards) - 1):
return
max_cards_horiz = int(curses.COLS / 35)
obliterate = curses.newwin(
6,
35,
7 + 6 * (number // max_cards_horiz),
35 * (number % max_cards_horiz),
)
obliterate.erase()
obliterate.noutrefresh()
del self.cards[number]
|
python
|
{
"resource": ""
}
|
q17537
|
Monitor._run
|
train
|
def _run(self, stdscr):
"""Start the actual service monitor"""
with self._lock:
curses.use_default_colors()
curses.curs_set(False)
curses.init_pair(1, curses.COLOR_RED, -1)
curses.init_pair(2, curses.COLOR_BLACK, -1)
curses.init_pair(3, curses.COLOR_GREEN, -1)
self._redraw_screen(stdscr)
try:
while not self.shutdown:
now = int(time.time())
with self._lock:
overview = self._node_status.copy()
cardnumber = 0
for host, status in overview.items():
age = now - int(status["last_seen"] / 1000)
with self._lock:
if age > 90:
del self._node_status[host]
else:
card = self._get_card(cardnumber)
card.erase()
card.move(0, 0)
card.addstr("Host: ", curses.color_pair(3))
card.addstr(host)
card.move(1, 0)
card.addstr("Service: ", curses.color_pair(3))
if "service" in status and status["service"]:
card.addstr(status["service"])
else:
card.addstr("---", curses.color_pair(2))
card.move(2, 0)
card.addstr("State: ", curses.color_pair(3))
if "status" in status:
status_code = status["status"]
state_string = CommonService.human_readable_state.get(
status_code, str(status_code)
)
state_color = None
if status_code in (
CommonService.SERVICE_STATUS_PROCESSING,
CommonService.SERVICE_STATUS_TIMER,
):
state_color = curses.color_pair(3) + curses.A_BOLD
if status_code == CommonService.SERVICE_STATUS_IDLE:
state_color = curses.color_pair(2) + curses.A_BOLD
if status_code == CommonService.SERVICE_STATUS_ERROR:
state_color = curses.color_pair(1)
if state_color:
card.addstr(state_string, state_color)
else:
card.addstr(state_string)
card.move(3, 0)
if age >= 10:
card.addstr(
"last seen %d seconds ago" % age,
curses.color_pair(1)
+ (0 if age < 60 else curses.A_BOLD),
)
card.noutrefresh()
cardnumber = cardnumber + 1
if cardnumber < len(self.cards):
with self._lock:
self._erase_card(cardnumber)
with self._lock:
curses.doupdate()
time.sleep(0.2)
except KeyboardInterrupt:
pass # User pressed CTRL+C
self._transport.disconnect()
|
python
|
{
"resource": ""
}
|
q17538
|
VSC.disable
|
train
|
def disable(self, idx):
"""Disable an element and reset the outputs"""
if idx not in self.uid.keys():
self.log('Element index {0} does not exist.'.format(idx))
return
self.u[self.uid[idx]] = 0
|
python
|
{
"resource": ""
}
|
q17539
|
ConfigBase.get_alt
|
train
|
def get_alt(self, option):
"""
Return the alternative values of an option
Parameters
----------
option: str
option name
Returns
-------
str
a string of alternative options
"""
assert hasattr(self, option)
alt = option + '_alt'
if not hasattr(self, alt):
return ''
return ', '.join(self.__dict__[alt])
|
python
|
{
"resource": ""
}
|
q17540
|
ConfigBase.doc
|
train
|
def doc(self, export='plain'):
"""
Dump help document for setting classes
"""
rows = []
title = '<{:s}> config options'.format(self.__class__.__name__)
table = Tab(export=export, title=title)
for opt in sorted(self.config_descr):
if hasattr(self, opt):
c1 = opt
c2 = self.config_descr[opt]
c3 = self.__dict__.get(opt, '')
c4 = self.get_alt(opt)
rows.append([c1, c2, c3, c4])
else:
print('Setting {:s} has no {:s} option. Correct in config_descr.'.
format(self.__class__.__name__, opt))
table.add_rows(rows, header=False)
table.header(['Option', 'Description', 'Value', 'Alt.'])
return table.draw()
|
python
|
{
"resource": ""
}
|
q17541
|
ConfigBase.dump_conf
|
train
|
def dump_conf(self, conf=None):
"""
Dump settings to an rc config file
Parameters
----------
conf
configparser.ConfigParser() object
Returns
-------
None
"""
if conf is None:
conf = configparser.ConfigParser()
tab = self.__class__.__name__
conf[tab] = {}
for key, val in self.__dict__.items():
if key.endswith('_alt'):
continue
conf[tab][key] = str(val)
return conf
|
python
|
{
"resource": ""
}
|
q17542
|
ConfigBase.load_config
|
train
|
def load_config(self, conf):
"""
Load configurations from an rc file
Parameters
----------
rc: str
path to the rc file
Returns
-------
None
"""
section = self.__class__.__name__
if section not in conf.sections():
logger.debug('Config section {} not in rc file'.format(
self.__class__.__name__))
return
for key in conf[section].keys():
if not hasattr(self, key):
logger.debug('Config key {}.{} skipped'.format(section, key))
continue
val = conf[section].get(key)
try:
val = conf[section].getfloat(key)
except ValueError:
try:
val = conf[section].getboolean(key)
except ValueError:
pass
self.__dict__.update({key: val})
self.check()
|
python
|
{
"resource": ""
}
|
q17543
|
UtilizationStatistics.update_status
|
train
|
def update_status(self, new_status):
"""Record a status change with a current timestamp."""
timestamp = time.time()
self.status_history[-1]["end"] = timestamp
self.status_history.append(
{"start": timestamp, "end": None, "status": new_status}
)
|
python
|
{
"resource": ""
}
|
q17544
|
UtilizationStatistics.report
|
train
|
def report(self):
"""Return a dictionary of different status codes and the percentage of time
spent in each throughout the last summation_period seconds.
Truncate the aggregated history appropriately."""
timestamp = time.time()
cutoff = timestamp - self.period
truncate = 0
summary = {}
for event in self.status_history[:-1]:
if event["end"] < cutoff:
truncate = truncate + 1
continue
summary[event["status"]] = (
summary.get(event["status"], 0)
+ event["end"]
- max(cutoff, event["start"])
)
summary[self.status_history[-1]["status"]] = (
summary.get(self.status_history[-1]["status"], 0)
+ timestamp
- max(cutoff, self.status_history[-1]["start"])
)
if truncate:
self.status_history = self.status_history[truncate:]
total_duration = sum(summary.values())
summary = {s: round(d / total_duration, 4) for s, d in summary.items()}
return summary
|
python
|
{
"resource": ""
}
|
q17545
|
generate_unique_host_id
|
train
|
def generate_unique_host_id():
"""Generate a unique ID, that is somewhat guaranteed to be unique among all
instances running at the same time."""
host = ".".join(reversed(socket.gethostname().split(".")))
pid = os.getpid()
return "%s.%d" % (host, pid)
|
python
|
{
"resource": ""
}
|
q17546
|
GovernorBase.data_to_sys_base
|
train
|
def data_to_sys_base(self):
"""Custom system base conversion function"""
if not self.n or self._flags['sysbase'] is True:
return
self.copy_data_ext(model='Synchronous', field='Sn', dest='Sn', idx=self.gen)
super(GovernorBase, self).data_to_sys_base()
self._store['R'] = self.R
self.R = self.system.mva * div(self.R, self.Sn)
|
python
|
{
"resource": ""
}
|
q17547
|
GovernorBase.data_to_elem_base
|
train
|
def data_to_elem_base(self):
"""Custom system base unconversion function"""
if not self.n or self._flags['sysbase'] is False:
return
self.R = mul(self.R, self.Sn) / self.system.mva
super(GovernorBase, self).data_to_elem_base()
|
python
|
{
"resource": ""
}
|
q17548
|
add_suffix
|
train
|
def add_suffix(fullname, suffix):
""" Add suffix to a full file name"""
name, ext = os.path.splitext(fullname)
return name + '_' + suffix + ext
|
python
|
{
"resource": ""
}
|
q17549
|
FileMan.get_fullpath
|
train
|
def get_fullpath(self, fullname=None, relative_to=None):
"""
Return the original full path if full path is specified, otherwise
search in the case file path
"""
# if is an empty path
if not fullname:
return fullname
isabs = os.path.isabs(fullname)
path, name = os.path.split(fullname)
if not name: # path to a folder
return None
else: # path to a file
if isabs:
return fullname
else:
return os.path.join(self.case_path, path, name)
|
python
|
{
"resource": ""
}
|
q17550
|
eAGC.switch
|
train
|
def switch(self):
"""Switch if time for eAgc has come"""
t = self.system.dae.t
for idx in range(0, self.n):
if t >= self.tl[idx]:
if self.en[idx] == 0:
self.en[idx] = 1
logger.info(
'Extended ACE <{}> activated at t = {}.'.format(
self.idx[idx], t))
|
python
|
{
"resource": ""
}
|
q17551
|
get_command
|
train
|
def get_command(all_pkg, hook):
"""
Collect the command-line interface names by querying ``hook`` in ``all_pkg``
Parameters
----------
all_pkg: list
list of package files
hook: str
A variable where the command is stored. ``__cli__`` by default.
Returns
-------
list
"""
ret = []
for r in all_pkg:
module = importlib.import_module(__name__ + '.' + r.lower())
ret.append(getattr(module, hook))
return ret
|
python
|
{
"resource": ""
}
|
q17552
|
EventBase.get_times
|
train
|
def get_times(self):
"""
Return a list of occurrance times of the events
:return: list of times
"""
if not self.n:
return list()
ret = list()
for item in self._event_times:
ret += list(self.__dict__[item])
return ret + list(matrix(ret) - 1e-6)
|
python
|
{
"resource": ""
}
|
q17553
|
Line.build_y
|
train
|
def build_y(self):
"""Build transmission line admittance matrix into self.Y"""
if not self.n:
return
self.y1 = mul(self.u, self.g1 + self.b1 * 1j)
self.y2 = mul(self.u, self.g2 + self.b2 * 1j)
self.y12 = div(self.u, self.r + self.x * 1j)
self.m = polar(self.tap, self.phi * deg2rad)
self.m2 = abs(self.m)**2
self.mconj = conj(self.m)
# build self and mutual admittances into Y
self.Y = spmatrix(
div(self.y12 + self.y1, self.m2), self.a1, self.a1,
(self.nb, self.nb), 'z')
self.Y -= spmatrix(
div(self.y12, self.mconj), self.a1, self.a2, (self.nb, self.nb),
'z')
self.Y -= spmatrix(
div(self.y12, self.m), self.a2, self.a1, (self.nb, self.nb), 'z')
self.Y += spmatrix(self.y12 + self.y2, self.a2, self.a2,
(self.nb, self.nb), 'z')
|
python
|
{
"resource": ""
}
|
q17554
|
Line.incidence
|
train
|
def incidence(self):
"""Build incidence matrix into self.C"""
self.C = \
spmatrix(self.u, range(self.n), self.a1, (self.n, self.nb), 'd') -\
spmatrix(self.u, range(self.n), self.a2, (self.n, self.nb), 'd')
|
python
|
{
"resource": ""
}
|
q17555
|
Line.connectivity
|
train
|
def connectivity(self, bus):
"""check connectivity of network using Goderya's algorithm"""
if not self.n:
return
n = self.nb
fr = self.a1
to = self.a2
os = [0] * self.n
# find islanded buses
diag = list(
matrix(
spmatrix(self.u, to, os, (n, 1), 'd') +
spmatrix(self.u, fr, os, (n, 1), 'd')))
nib = bus.n_islanded_buses = diag.count(0)
bus.islanded_buses = []
for idx in range(n):
if diag[idx] == 0:
bus.islanded_buses.append(idx)
# find islanded areas
temp = spmatrix(
list(self.u) * 4, fr + to + fr + to, to + fr + fr + to, (n, n),
'd')
cons = temp[0, :]
nelm = len(cons.J)
conn = spmatrix([], [], [], (1, n), 'd')
bus.island_sets = []
idx = islands = 0
enum = 0
while 1:
while 1:
cons = cons * temp
cons = sparse(cons) # remove zero values
new_nelm = len(cons.J)
if new_nelm == nelm:
break
nelm = new_nelm
if len(cons.J) == n: # all buses are interconnected
return
bus.island_sets.append(list(cons.J))
conn += cons
islands += 1
nconn = len(conn.J)
if nconn >= (n - nib):
bus.island_sets = [i for i in bus.island_sets if i != []]
break
for element in conn.J[idx:]:
if not diag[idx]:
enum += 1 # skip islanded buses
if element <= enum:
idx += 1
enum += 1
else:
break
cons = temp[enum, :]
|
python
|
{
"resource": ""
}
|
q17556
|
Line.build_gy
|
train
|
def build_gy(self, dae):
"""Build line Jacobian matrix"""
if not self.n:
idx = range(dae.m)
dae.set_jac(Gy, 1e-6, idx, idx)
return
Vn = polar(1.0, dae.y[self.a])
Vc = mul(dae.y[self.v], Vn)
Ic = self.Y * Vc
diagVn = spdiag(Vn)
diagVc = spdiag(Vc)
diagIc = spdiag(Ic)
dS = self.Y * diagVn
dS = diagVc * conj(dS)
dS += conj(diagIc) * diagVn
dR = diagIc
dR -= self.Y * diagVc
dR = diagVc.H.T * dR
self.gy_store = sparse([[dR.imag(), dR.real()], [dS.real(),
dS.imag()]])
return self.gy_store
|
python
|
{
"resource": ""
}
|
q17557
|
Line.seriesflow
|
train
|
def seriesflow(self, dae):
"""
Compute the flow through the line after solving PF.
Compute terminal injections, line losses
"""
# Vm = dae.y[self.v]
# Va = dae.y[self.a]
# V1 = polar(Vm[self.a1], Va[self.a1])
# V2 = polar(Vm[self.a2], Va[self.a2])
I1 = mul(self.v1, div(self.y12 + self.y1, self.m2)) - \
mul(self.v2, div(self.y12, self.mconj))
I2 = mul(self.v2, self.y12 + self.y2) - \
mul(self.v2, div(self.y12, self.m))
self.I1_real = I1.real()
self.I1_imag = I1.imag()
self.I2_real = I2.real()
self.I2_imag = I2.imag()
self.S1 = mul(self.v1, conj(I1))
self.S2 = mul(self.v2, conj(I2))
self.P1 = self.S1.real()
self.P2 = self.S2.real()
self.Q1 = self.S1.imag()
self.Q2 = self.S2.imag()
self.chg1 = mul(self.g1 + 1j * self.b1, div(self.v1**2, self.m2))
self.chg2 = mul(self.g2 + 1j * self.b2, self.v2**2)
self.Pchg1 = self.chg1.real()
self.Pchg2 = self.chg2.real()
self.Qchg1 = self.chg1.imag()
self.Qchg2 = self.chg2.imag()
self._line_flows = matrix([self.P1, self.P2, self.Q1, self.Q2,
self.I1_real, self.I1_imag,
self.I2_real, self.I2_imag])
|
python
|
{
"resource": ""
}
|
q17558
|
Line.switch
|
train
|
def switch(self, idx, u):
"""switch the status of Line idx"""
self.u[self.uid[idx]] = u
self.rebuild = True
self.system.dae.factorize = True
logger.debug('<Line> Status switch to {} on idx {}.'.format(u, idx))
|
python
|
{
"resource": ""
}
|
q17559
|
Line.get_flow_by_idx
|
train
|
def get_flow_by_idx(self, idx, bus):
"""Return seriesflow based on the external idx on the `bus` side"""
P, Q = [], []
if type(idx) is not list:
idx = [idx]
if type(bus) is not list:
bus = [bus]
for line_idx, bus_idx in zip(idx, bus):
line_int = self.uid[line_idx]
if bus_idx == self.bus1[line_int]:
P.append(self.P1[line_int])
Q.append(self.Q1[line_int])
elif bus_idx == self.bus2[line_int]:
P.append(self.P2[line_int])
Q.append(self.Q2[line_int])
return matrix(P), matrix(Q)
|
python
|
{
"resource": ""
}
|
q17560
|
Line.leaf_bus
|
train
|
def leaf_bus(self, df=False):
"""
Return leaf bus idx, line idx, and the line foreign key
Returns
-------
(list, list, list) or DataFrame
"""
# leafs - leaf bus idx
# lines - line idx
# fkey - the foreign key of Line, in 'bus1' or 'bus2', linking the bus
leafs, lines, fkeys = list(), list(), list()
# convert to unique, ordered list
buses = sorted(list(set(self.bus1 + self.bus2)))
links = self.link_bus(buses)
for bus, link in zip(buses, links):
line = link[0]
fkey = link[1]
if line is None:
continue
if len(line) == 1:
leafs.append(bus)
lines.extend(line)
fkeys.extend(fkey)
# output formatting
if df is False:
return leafs, lines, fkeys
else:
_data = {'Bus idx': leafs, 'Line idx': lines, 'fkey': fkeys}
if globals()['pd'] is None:
globals()['pd'] = importlib.import_module('pandas')
return pd.DataFrame(data=_data)
|
python
|
{
"resource": ""
}
|
q17561
|
Utilities.truncate
|
train
|
def truncate(text, length=255):
"""
Splits the message into a list of strings of of length `length`
Args:
text (str): The text to be divided
length (int, optional): The length of the chunks of text. \
Defaults to 255.
Returns:
list: Text divided into chunks of length `length`
"""
lines = []
i = 0
while i < len(text) - 1:
try:
lines.append(text[i:i+length])
i += length
except IndexError as e:
lines.append(text[i:])
return lines
|
python
|
{
"resource": ""
}
|
q17562
|
VarName.resize_for_flows
|
train
|
def resize_for_flows(self):
"""Extend `unamey` and `fnamey` for bus injections and line flows"""
if self.system.config.dime_enable:
self.system.tds.config.compute_flows = True
if self.system.tds.config.compute_flows:
nflows = 2 * self.system.Bus.n + \
8 * self.system.Line.n + \
2 * self.system.Area.n_combination
self.unamey.extend([''] * nflows)
self.fnamey.extend([''] * nflows)
|
python
|
{
"resource": ""
}
|
q17563
|
VarName.append
|
train
|
def append(self, listname, xy_idx, var_name, element_name):
"""Append variable names to the name lists"""
self.resize()
string = '{0} {1}'
if listname not in ['unamex', 'unamey', 'fnamex', 'fnamey']:
logger.error('Wrong list name for varname.')
return
elif listname in ['fnamex', 'fnamey']:
string = '${0}\\ {1}$'
if isinstance(element_name, list):
for i, j in zip(xy_idx, element_name):
# manual elem_add LaTex space for auto-generated element name
if listname == 'fnamex' or listname == 'fnamey':
j = j.replace(' ', '\\ ')
self.__dict__[listname][i] = string.format(var_name, j)
elif isinstance(element_name, int):
self.__dict__[listname][xy_idx] = string.format(
var_name, element_name)
else:
logger.warning(
'Unknown element_name type while building varname')
|
python
|
{
"resource": ""
}
|
q17564
|
VarName.bus_line_names
|
train
|
def bus_line_names(self):
"""Append bus injection and line flow names to `varname`"""
if self.system.tds.config.compute_flows:
self.system.Bus._varname_inj()
self.system.Line._varname_flow()
self.system.Area._varname_inter()
|
python
|
{
"resource": ""
}
|
q17565
|
VarName.get_xy_name
|
train
|
def get_xy_name(self, yidx, xidx=0):
"""
Return variable names for the given indices
:param yidx:
:param xidx:
:return:
"""
assert isinstance(xidx, int)
if isinstance(yidx, int):
yidx = [yidx]
uname = ['Time [s]'] + self.uname
fname = ['$Time\\ [s]$'] + self.fname
xname = [list(), list()]
yname = [list(), list()]
xname[0] = uname[xidx]
xname[1] = fname[xidx]
yname[0] = [uname[i] for i in yidx]
yname[1] = [fname[i] for i in yidx]
return xname, yname
|
python
|
{
"resource": ""
}
|
q17566
|
cli_parse
|
train
|
def cli_parse():
"""command line input parser"""
parser = ArgumentParser(prog='andesplot')
parser.add_argument('datfile', nargs=1, default=[], help='dat file name.')
parser.add_argument('x', nargs=1, type=int, help='x axis variable index')
parser.add_argument('y', nargs='*', help='y axis variable index')
parser.add_argument('--xmax', type=float, help='x axis maximum value')
parser.add_argument('--ymax', type=float, help='y axis maximum value')
parser.add_argument('--ymin', type=float, help='y axis minimum value')
parser.add_argument('--xmin', type=float, help='x axis minimum value')
parser.add_argument(
'--checkinit', action='store_true', help='check initialization value')
parser.add_argument(
'-x', '--xlabel', type=str, help='manual set x-axis text label')
parser.add_argument('-y', '--ylabel', type=str, help='y-axis text label')
parser.add_argument(
'-s', '--save', action='store_true', help='save to file')
parser.add_argument('-g', '--grid', action='store_true', help='grid on')
parser.add_argument(
'-d',
'--no_latex',
action='store_true',
help='disable LaTex formatting')
parser.add_argument(
'-u',
'--unattended',
action='store_true',
help='do not show the plot window')
parser.add_argument('--ytimes', type=str, help='y times')
parser.add_argument(
'--dpi', type=int, help='image resolution in dot per inch (DPI)')
args = parser.parse_args()
return vars(args)
|
python
|
{
"resource": ""
}
|
q17567
|
add_plot
|
train
|
def add_plot(x, y, xl, yl, fig, ax, LATEX=False, linestyle=None, **kwargs):
"""Add plots to an existing plot"""
if LATEX:
xl_data = xl[1] # NOQA
yl_data = yl[1]
else:
xl_data = xl[0] # NOQA
yl_data = yl[0]
for idx in range(len(y)):
ax.plot(x, y[idx], label=yl_data[idx], linestyle=linestyle)
ax.legend(loc='upper right')
ax.set_ylim(auto=True)
|
python
|
{
"resource": ""
}
|
q17568
|
check_init
|
train
|
def check_init(yval, yl):
""""Check initialization by comparing t=0 and t=end values"""
suspect = []
for var, label in zip(yval, yl):
if abs(var[0] - var[-1]) >= 1e-6:
suspect.append(label)
if suspect:
print('Initialization failure:')
print(', '.join(suspect))
else:
print('Initialization is correct.')
|
python
|
{
"resource": ""
}
|
q17569
|
TDSData.load_lst
|
train
|
def load_lst(self):
"""
Load the lst file into internal data structures
"""
with open(self._lst_file, 'r') as fd:
lines = fd.readlines()
idx, uname, fname = list(), list(), list()
for line in lines:
values = line.split(',')
values = [x.strip() for x in values]
# preserve the idx ordering here in case variables are not
# ordered by idx
idx.append(int(values[0])) # convert to integer
uname.append(values[1])
fname.append(values[2])
self._idx = idx
self._fname = fname
self._uname = uname
|
python
|
{
"resource": ""
}
|
q17570
|
TDSData.find_var
|
train
|
def find_var(self, query, formatted=False):
"""
Return variable names and indices matching ``query``
"""
# load the variable list to search in
names = self._uname if formatted is False else self._fname
found_idx, found_names = list(), list()
for idx, name in zip(self._idx, names):
if re.search(query, name):
found_idx.append(idx)
found_names.append(name)
return found_idx, found_names
|
python
|
{
"resource": ""
}
|
q17571
|
TDSData.load_dat
|
train
|
def load_dat(self, delimiter=','):
"""
Load the dat file into internal data structures, ``self._data``
"""
try:
data = np.loadtxt(self._dat_file, delimiter=',')
except ValueError:
data = np.loadtxt(self._dat_file)
self._data = data
|
python
|
{
"resource": ""
}
|
q17572
|
TDSData.get_values
|
train
|
def get_values(self, idx):
"""
Return the variable values at the given indices
"""
if isinstance(idx, list):
idx = np.array(idx, dtype=int)
return self._data[:, idx]
|
python
|
{
"resource": ""
}
|
q17573
|
TDSData.get_header
|
train
|
def get_header(self, idx, formatted=False):
"""
Return a list of the variable names at the given indices
"""
header = self._uname if not formatted else self._fname
return [header[x] for x in idx]
|
python
|
{
"resource": ""
}
|
q17574
|
TDSData.export_csv
|
train
|
def export_csv(self, path, idx=None, header=None, formatted=False,
sort_idx=True, fmt='%.18e'):
"""
Export to a csv file
Parameters
----------
path : str
path of the csv file to save
idx : None or array-like, optional
the indices of the variables to export. Export all by default
header : None or array-like, optional
customized header if not `None`. Use the names from the lst file
by default
formatted : bool, optional
Use LaTeX-formatted header. Does not apply when using customized
header
sort_idx : bool, optional
Sort by idx or not, # TODO: implement sort
fmt : str
cell formatter
"""
if not idx:
idx = self._idx
if not header:
header = self.get_header(idx, formatted=formatted)
assert len(idx) == len(header), \
"Idx length does not match header length"
body = self.get_values(idx)
with open(path, 'w') as fd:
fd.write(','.join(header) + '\n')
np.savetxt(fd, body, fmt=fmt, delimiter=',')
|
python
|
{
"resource": ""
}
|
q17575
|
Tab.auto_style
|
train
|
def auto_style(self):
"""
automatic styling according to _row_size
76 characters in a row
"""
if self._row_size is None:
return
elif self._row_size == 3:
self.set_cols_align(['l', 'l', 'l'])
self.set_cols_valign(['t', 't', 't'])
self.set_cols_width([12, 54, 12])
elif self._row_size == 4:
self.set_cols_align(['l', 'l', 'l', 'l'])
self.set_cols_valign(['t', 't', 't', 't'])
self.set_cols_width([10, 40, 10, 10])
|
python
|
{
"resource": ""
}
|
q17576
|
Tab.draw
|
train
|
def draw(self):
"""generate texttable formatted string"""
self.guess_header()
self.add_left_space(
)
# for Texttable, elem_add a column of whitespace on the left for
# better visual effect
if self._title and self._descr:
pre = self._title + '\n' + self._descr + '\n\n'
elif self._title:
pre = self._title + '\n\n'
elif self._descr:
pre = 'Empty Title' + '\n' + self._descr + '\n'
else:
pre = ''
empty_line = '\n\n'
return pre + str(Texttable.draw(self)) + empty_line
|
python
|
{
"resource": ""
}
|
q17577
|
simpletab.guess_width
|
train
|
def guess_width(self):
"""auto fit column width"""
if len(self.header) <= 4:
nspace = 6
elif len(self.header) <= 6:
nspace = 5
else:
nspace = 4
ncol = len(self.header)
self._width = [nspace] * ncol
width = [0] * ncol
# set initial width from header
for idx, item in enumerate(self.header):
width[idx] = len(str(item))
# guess width of each column from first 10 lines of data
samples = min(len(self.data), 10)
for col in range(ncol):
for idx in range(samples):
data = self.data[idx][col]
if not isinstance(data, (float, int)):
temp = len(data)
else:
temp = 10
if temp > width[col]:
width[col] = temp
for col in range(ncol):
self._width[col] += width[col]
|
python
|
{
"resource": ""
}
|
q17578
|
JIT.jit_load
|
train
|
def jit_load(self):
"""
Import and instantiate this JIT object
Returns
-------
"""
try:
model = importlib.import_module('.' + self.model, 'andes.models')
device = getattr(model, self.device)
self.system.__dict__[self.name] = device(self.system, self.name)
g = self.system.__dict__[self.name]._group
self.system.group_add(g)
self.system.__dict__[g].register_model(self.name)
# register device after loading
self.system.devman.register_device(self.name)
self.loaded = 1
logger.debug('Imported model <{:s}.{:s}>.'.format(
self.model, self.device))
except ImportError:
logger.error(
'non-JIT model <{:s}.{:s}> import error'
.format(self.model, self.device))
except AttributeError:
logger.error(
'model <{:s}.{:s}> not exist. Check models/__init__.py'
.format(self.model, self.device))
|
python
|
{
"resource": ""
}
|
q17579
|
JIT.elem_add
|
train
|
def elem_add(self, idx=None, name=None, **kwargs):
"""overloading elem_add function of a JIT class"""
self.jit_load()
if self.loaded:
return self.system.__dict__[self.name].elem_add(
idx, name, **kwargs)
|
python
|
{
"resource": ""
}
|
q17580
|
SampleTxn.initializing
|
train
|
def initializing(self):
"""Subscribe to a channel. Received messages must be acknowledged."""
self.subid = self._transport.subscribe(
"transient.transaction", self.receive_message, acknowledgement=True
)
|
python
|
{
"resource": ""
}
|
q17581
|
SampleTxn.receive_message
|
train
|
def receive_message(self, header, message):
"""Receive a message"""
print("=== Receive ===")
print(header)
print(message)
print("MsgID: {0}".format(header["message-id"]))
assert header["message-id"]
txn = self._transport.transaction_begin()
print(" 1. Txn: {0}".format(str(txn)))
if self.crashpoint():
self._transport.transaction_abort(txn)
print("--- Abort ---")
return
self._transport.ack(header["message-id"], self.subid, transaction=txn)
print(" 2. Ack")
if self.crashpoint():
self._transport.transaction_abort(txn)
print("--- Abort ---")
return
self._transport.send("transient.destination", message, transaction=txn)
print(" 3. Send")
if self.crashpoint():
self._transport.transaction_abort(txn)
print("--- Abort ---")
return
self._transport.transaction_commit(txn)
print(" 4. Commit")
print("=== Done ===")
|
python
|
{
"resource": ""
}
|
q17582
|
SampleTxnProducer.create_message
|
train
|
def create_message(self):
"""Create and send a unique message for this service."""
self.counter += 1
self._transport.send(
"transient.transaction",
"TXMessage #%d\n++++++++Produced@ %f"
% (self.counter, (time.time() % 1000) * 1000),
)
self.log.info("Created message %d", self.counter)
|
python
|
{
"resource": ""
}
|
q17583
|
elapsed
|
train
|
def elapsed(t0=0.0):
"""get elapsed time from the give time
Returns:
now: the absolute time now
dt_str: elapsed time in string
"""
now = time()
dt = now - t0
dt_sec = Decimal(str(dt)).quantize(Decimal('.0001'), rounding=ROUND_DOWN)
if dt_sec <= 1:
dt_str = str(dt_sec) + ' second'
else:
dt_str = str(dt_sec) + ' seconds'
return now, dt_str
|
python
|
{
"resource": ""
}
|
q17584
|
PSS1.set_flag
|
train
|
def set_flag(self, value, flag, reset_val=False):
"""Set a flag to 0 if the corresponding value is 0"""
if not self.__dict__[flag]:
self.__dict__[flag] = matrix(1.0, (len(self.__dict__[value]), 1),
'd')
for idx, item in enumerate(self.__dict__[value]):
if item == 0:
self.__dict__[flag][idx] = 0
if reset_val:
self.__dict__[value][idx] = 1
|
python
|
{
"resource": ""
}
|
q17585
|
Report._update_summary
|
train
|
def _update_summary(self, system):
"""
Update the summary data
Parameters
----------
system
Returns
-------
None
"""
self.basic.update({
'nbus': system.Bus.n,
'ngen': system.PV.n + system.SW.n,
'ngen_on': sum(system.PV.u) + sum(system.SW.u),
'nload': system.PQ.n,
'nshunt': system.Shunt.n,
'nline': system.Line.n,
'ntransf': system.Line.trasf.count(True),
'narea': system.Area.n,
})
|
python
|
{
"resource": ""
}
|
q17586
|
Report._update_extended
|
train
|
def _update_extended(self, system):
"""Update the extended data"""
if self.system.pflow.solved is False:
logger.warning(
'Cannot update extended summary. Power flow not solved.')
return
Sloss = sum(system.Line.S1 + system.Line.S2)
self.extended.update({
'Ptot':
sum(system.PV.pmax) + sum(system.SW.pmax), # + sum(system.SW.pmax)
'Pon':
sum(mul(system.PV.u, system.PV.pmax)),
'Pg':
sum(system.Bus.Pg),
'Qtot_min':
sum(system.PV.qmin) + sum(system.SW.qmin),
'Qtot_max':
sum(system.PV.qmax) + sum(system.SW.qmax),
'Qon_min':
sum(mul(system.PV.u, system.PV.qmin)),
'Qon_max':
sum(mul(system.PV.u, system.PV.qmax)),
'Qg':
round(sum(system.Bus.Qg), 5),
'Pl':
round(sum(system.PQ.p), 5),
'Ql':
round(sum(system.PQ.q), 5),
'Psh':
0.0,
'Qsh':
round(sum(system.PQ.q) - sum(system.Bus.Ql), 5),
'Ploss':
round(Sloss.real, 5),
'Qloss':
round(Sloss.imag, 5),
'Pch':
round(sum(system.Line.Pchg1 + system.Line.Pchg2), 5),
'Qch':
round(sum(system.Line.Qchg1 + system.Line.Qchg2), 5),
})
|
python
|
{
"resource": ""
}
|
q17587
|
Report.update
|
train
|
def update(self, content=None):
"""
Update values based on the requested content
Parameters
----------
content
Returns
-------
"""
if not content:
return
if content == 'summary' or 'extended' or 'powerflow':
self._update_summary(self.system)
if content == 'extended' or 'powerflow':
self._update_extended(self.system)
|
python
|
{
"resource": ""
}
|
q17588
|
RecipeWrapper.send
|
train
|
def send(self, *args, **kwargs):
"""Send messages to another service that is connected to the currently
running service via the recipe. The 'send' method will either use a
default channel name, set via the set_default_channel method, or an
unnamed output definition.
"""
if not self.transport:
raise ValueError(
"This RecipeWrapper object does not contain "
"a reference to a transport object."
)
if not self.recipe_step:
raise ValueError(
"This RecipeWrapper object does not contain "
"a recipe with a selected step."
)
if "output" not in self.recipe_step:
# The current recipe step does not have output channels.
return
if isinstance(self.recipe_step["output"], dict):
# The current recipe step does have named output channels.
if self.default_channel:
# Use named output channel
self.send_to(self.default_channel, *args, **kwargs)
else:
# The current recipe step does have unnamed output channels.
self._send_to_destinations(self.recipe_step["output"], *args, **kwargs)
|
python
|
{
"resource": ""
}
|
q17589
|
RecipeWrapper._generate_full_recipe_message
|
train
|
def _generate_full_recipe_message(self, destination, message, add_path_step):
"""Factory function to generate independent message objects for
downstream recipients with different destinations."""
if add_path_step and self.recipe_pointer:
recipe_path = self.recipe_path + [self.recipe_pointer]
else:
recipe_path = self.recipe_path
return {
"environment": self.environment,
"payload": message,
"recipe": self.recipe.recipe,
"recipe-path": recipe_path,
"recipe-pointer": destination,
}
|
python
|
{
"resource": ""
}
|
q17590
|
RecipeWrapper._send_to_destinations
|
train
|
def _send_to_destinations(self, destinations, message, header=None, **kwargs):
"""Send messages to a list of numbered destinations. This is an internal
helper method used by the public 'send' methods.
"""
if not isinstance(destinations, list):
destinations = (destinations,)
for destination in destinations:
self._send_to_destination(destination, header, message, kwargs)
|
python
|
{
"resource": ""
}
|
q17591
|
RecipeWrapper._send_to_destination
|
train
|
def _send_to_destination(
self, destination, header, payload, transport_kwargs, add_path_step=True
):
"""Helper function to send a message to a specific recipe destination."""
if header:
header = header.copy()
header["workflows-recipe"] = True
else:
header = {"workflows-recipe": True}
dest_kwargs = transport_kwargs.copy()
if (
"transport-delay" in self.recipe[destination]
and "delay" not in transport_kwargs
):
dest_kwargs["delay"] = self.recipe[destination]["transport-delay"]
if self.recipe[destination].get("queue"):
self.transport.send(
self.recipe[destination]["queue"],
self._generate_full_recipe_message(destination, payload, add_path_step),
headers=header,
**dest_kwargs
)
if self.recipe[destination].get("topic"):
self.transport.broadcast(
self.recipe[destination]["topic"],
self._generate_full_recipe_message(destination, payload, add_path_step),
headers=header,
**dest_kwargs
)
|
python
|
{
"resource": ""
}
|
q17592
|
RtmBot.on_message
|
train
|
def on_message(self, event):
'''Runs when a message event is received
Args:
event: RTM API event.
Returns:
Legobot.messge
'''
metadata = self._parse_metadata(event)
message = Message(text=metadata['text'],
metadata=metadata).__dict__
if message.get('text'):
message['text'] = self.find_and_replace_userids(message['text'])
message['text'] = self.find_and_replace_channel_refs(
message['text']
)
return message
|
python
|
{
"resource": ""
}
|
q17593
|
RtmBot.find_and_replace_userids
|
train
|
def find_and_replace_userids(self, text):
'''Finds occurrences of Slack userids and attempts to replace them with
display names.
Args:
text (string): The message text
Returns:
string: The message text with userids replaced.
'''
match = True
pattern = re.compile('<@([A-Z0-9]{9})>')
while match:
match = pattern.search(text)
if match:
name = self.get_user_display_name(match.group(1))
text = re.sub(re.compile(match.group(0)), '@' + name, text)
return text
|
python
|
{
"resource": ""
}
|
q17594
|
RtmBot.find_and_replace_channel_refs
|
train
|
def find_and_replace_channel_refs(self, text):
'''Find occurrences of Slack channel referenfces and attempts to
replace them with just channel names.
Args:
text (string): The message text
Returns:
string: The message text with channel references replaced.
'''
match = True
pattern = re.compile('<#([A-Z0-9]{9})\|([A-Za-z0-9-]+)>')
while match:
match = pattern.search(text)
if match:
text = text.replace(match.group(0), '#' + match.group(2))
return text
|
python
|
{
"resource": ""
}
|
q17595
|
RtmBot.get_channels
|
train
|
def get_channels(self, condensed=False):
'''Grabs all channels in the slack team
Args:
condensed (bool): if true triggers list condensing functionality
Returns:
dic: Dict of channels in Slack team.
See also: https://api.slack.com/methods/channels.list
'''
channel_list = self.slack_client.api_call('channels.list')
if not channel_list.get('ok'):
return None
if condensed:
channels = [{'id': item.get('id'), 'name': item.get('name')}
for item in channel_list.get('channels')]
return channels
else:
return channel_list
|
python
|
{
"resource": ""
}
|
q17596
|
RtmBot.get_users
|
train
|
def get_users(self, condensed=False):
'''Grabs all users in the slack team
This should should only be used for getting list of all users. Do not
use it for searching users. Use get_user_info instead.
Args:
condensed (bool): if true triggers list condensing functionality
Returns:
dict: Dict of users in Slack team.
See also: https://api.slack.com/methods/users.list
'''
user_list = self.slack_client.api_call('users.list')
if not user_list.get('ok'):
return None
if condensed:
users = [{'id': item.get('id'), 'name': item.get('name'),
'display_name': item.get('profile').get('display_name')}
for item in user_list.get('members')]
return users
else:
return user_list
|
python
|
{
"resource": ""
}
|
q17597
|
RtmBot.get_user_display_name
|
train
|
def get_user_display_name(self, userid):
'''Given a Slack userid, grabs user display_name from api.
Args:
userid (string): the user id of the user being queried
Returns:
dict: a dictionary of the api response
'''
user_info = self.slack_client.api_call('users.info', user=userid)
if user_info.get('ok'):
user = user_info.get('user')
if user.get('profile'):
return user.get('profile').get('display_name')
else:
return user.get('name')
else:
return userid
|
python
|
{
"resource": ""
}
|
q17598
|
RtmBot.get_dm_channel
|
train
|
def get_dm_channel(self, userid):
'''Perform a lookup of users to resolve a userid to a DM channel
Args:
userid (string): Slack userid to lookup.
Returns:
string: DM channel ID of user
'''
dm_open = self.slack_client.api_call('im.open', user=userid)
return dm_open['channel']['id']
|
python
|
{
"resource": ""
}
|
q17599
|
RtmBot.get_username
|
train
|
def get_username(self, userid):
'''Perform a lookup of users to resolve a userid to a username
Args:
userid (string): Slack userid to lookup.
Returns:
string: Human-friendly name of the user
'''
username = self.user_map.get(userid)
if not username:
users = self.get_users()
if users:
members = {
m['id']: m['name']
for m in users.get('members', [{}])
if m.get('id')
and m.get('name')
}
if members:
self.user_map.update(members)
username = self.user_map.get(userid, userid)
return username
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.