_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q17400
CommonService.initialize_logging
train
def initialize_logging(self): """Reset the logging for the service process. All logged messages are forwarded to the frontend. If any filtering is desired, then this must take place on the service side.""" # Reset logging to pass logrecords into the queue to the frontend only. # Existing handlers may be broken as they were copied into a new process, # so should be discarded. for loggername in [None] + list(logging.Logger.manager.loggerDict.keys()): logger = logging.getLogger(loggername) while logger.handlers: logger.removeHandler(logger.handlers[0]) # Re-enable logging to console root_logger = logging.getLogger() # By default pass all warning (and higher) level messages to the frontend root_logger.setLevel(logging.WARN) root_logger.addHandler(workflows.logging.CallbackHandler(self._log_send)) # Set up the service logger and pass all info (and higher) level messages # (or other level if set differently) self.log = logging.getLogger(self._logger_name) if self.start_kwargs.get("verbose_log"): self.log_verbosity = logging.DEBUG self.log.setLevel(self.log_verbosity) # Additionally, write all critical messages directly to console console = logging.StreamHandler() console.setLevel(logging.CRITICAL) root_logger.addHandler(console)
python
{ "resource": "" }
q17401
CommonService.start
train
def start(self, **kwargs): """Start listening to command queue, process commands in main loop, set status, etc... This function is most likely called by the frontend in a separate process.""" # Keep a copy of keyword arguments for use in subclasses self.start_kwargs.update(kwargs) try: self.initialize_logging() self.__update_service_status(self.SERVICE_STATUS_STARTING) self.start_transport() self.initializing() self._register("command", self.__process_command) if self.__pipe_commands is None: # can only listen to commands if command queue is defined self.__shutdown = True else: # start listening to command queue in separate thread self.__start_command_queue_listener() while not self.__shutdown: # main loop self.__update_service_status(self.SERVICE_STATUS_IDLE) if self._idle_time is None: task = self.__queue.get() else: try: task = self.__queue.get(True, self._idle_time) except queue.Empty: self.__update_service_status(self.SERVICE_STATUS_TIMER) if self._idle_callback: self._idle_callback() continue self.__update_service_status(self.SERVICE_STATUS_PROCESSING) if task[0] == Priority.COMMAND: message = task[2] if message and "band" in message: processor = self.__callback_register.get(message["band"]) if processor is None: self.log.warning( "received message on unregistered band\n%s", message ) else: processor(message.get("payload")) else: self.log.warning( "received message without band information\n%s", message ) elif task[0] == Priority.TRANSPORT: callback, header, message = task[2] callback(header, message) else: self.log.warning("Unknown item on main service queue\n%r", task) except KeyboardInterrupt: self.log.warning("Ctrl+C detected. Shutting down.") except Exception as e: self.process_uncaught_exception(e) self.__update_service_status(self.SERVICE_STATUS_ERROR) self.in_shutdown() return try: self.__update_service_status(self.SERVICE_STATUS_SHUTDOWN) self.in_shutdown() self.__update_service_status(self.SERVICE_STATUS_END) except Exception as e: self.process_uncaught_exception(e) self.__update_service_status(self.SERVICE_STATUS_ERROR)
python
{ "resource": "" }
q17402
CommonService.process_uncaught_exception
train
def process_uncaught_exception(self, e): """This is called to handle otherwise uncaught exceptions from the service. The service will terminate either way, but here we can do things such as gathering useful environment information and logging for posterity.""" # Add information about the actual exception to the log message # This includes the file, line and piece of code causing the exception. # exc_info=True adds the full stack trace to the log message. exc_file_fullpath, exc_file, exc_lineno, exc_func, exc_line = ( workflows.logging.get_exception_source() ) added_information = { "workflows_exc_lineno": exc_lineno, "workflows_exc_funcName": exc_func, "workflows_exc_line": exc_line, "workflows_exc_pathname": exc_file_fullpath, "workflows_exc_filename": exc_file, } for field in filter(lambda x: x.startswith("workflows_log_"), dir(e)): added_information[field[14:]] = getattr(e, field, None) self.log.critical( "Unhandled service exception: %s", e, exc_info=True, extra=added_information )
python
{ "resource": "" }
q17403
IRCBot.connect
train
def connect(self, *args, **kwargs): """ Connect to a server. This overrides the function in SimpleIRCClient to provide SSL functionality. :param args: :param kwargs: :return: """ if self.use_ssl: factory = irc.connection.Factory(wrapper=ssl.wrap_socket) else: factory = irc.connection.Factory() self.connection.connect(server=self.server, port=self.port, nickname=self.nickname, connect_factory=factory, password=self.password, username=self.username, ircname=self.ircname)
python
{ "resource": "" }
q17404
IRCBot.set_metadata
train
def set_metadata(self, e): """ This function sets the metadata that is common between pub and priv """ metadata = Metadata(source=self.actor_urn).__dict__ metadata['source_connector'] = 'irc' metadata['source_channel'] = e.target metadata['source_user'] = e.source metadata['source_username'] = e.source.split('!')[0] metadata['user_id'] = metadata['source_user'] metadata['display_name'] = metadata['source_username'] return metadata
python
{ "resource": "" }
q17405
IRCBot.on_pubmsg
train
def on_pubmsg(self, c, e): """ This function runs when the bot receives a public message. """ text = e.arguments[0] metadata = self.set_metadata(e) metadata['is_private_message'] = False message = Message(text=text, metadata=metadata).__dict__ self.baseplate.tell(message)
python
{ "resource": "" }
q17406
IRCBot.on_welcome
train
def on_welcome(self, c, e): """ This function runs when the bot successfully connects to the IRC server """ self.backoff = 1 # Assume we had a good connection. Reset backoff. if self.nickserv: if Utilities.isNotEmpty(self.nickserv_pass): self.identify(c, e, self.nickserv_pass) time.sleep(3) # Make sure Nickserv really sees us else: logger.error('If nickserv is enabled, you must supply' ' a password') if self.nickserv is False and self.nickserv_pass is not None: logger.warn('It appears you provided a nickserv password but ' 'did not enable nickserv authentication') for channel in self.my_channels: logger.debug('Attempting to join {0!s}'.format(channel)) c.join(channel)
python
{ "resource": "" }
q17407
IRCBot.run
train
def run(self): """ Run the bot in a thread. Implementing the IRC listener as a thread allows it to listen without blocking IRCLego's ability to listen as a pykka actor. :return: None """ self._connect() super(irc.bot.SingleServerIRCBot, self).start()
python
{ "resource": "" }
q17408
System.check
train
def check(self): """ Check config data consistency Returns ------- """ if self.sparselib not in self.sparselib_alt: logger.warning("Invalid sparse library <{}>".format(self.sparselib)) self.sparselib = 'umfpack' if self.sparselib == 'klu' and not KLU: logger.info("Optional package \"cvxoptklu\" available for speed up") self.sparselib = 'umfpack' return True
python
{ "resource": "" }
q17409
Fault.apply
train
def apply(self, actual_time): """Check time and apply faults""" if self.time != actual_time: self.time = actual_time else: return for i in range(self.n): if self.tf[i] == self.time: logger.info( ' <Fault> Applying fault on Bus <{}> at t={}.'.format( self.bus[i], self.tf[i])) self.u[i] = 1 self.active += 1 self.angle0 = self.system.dae.y[self.system.Bus.a] self.volt0 = self.system.dae.y[self.system.Bus.n:] self.system.dae.factorize = True elif self.tc[i] == self.time: logger.info( ' <Fault> Clearing fault on Bus <{}> at t={}.'.format( self.bus[i], self.tc[i])) self.u[i] = 0 self.active -= 1 self.system.dae.y[self.system.Bus.n:] = self.volt0 # self.system.dae.y[self.a] = self.anglepre self.system.dae.factorize = True
python
{ "resource": "" }
q17410
SampleConsumer.consume_message
train
def consume_message(self, header, message): """Consume a message""" logmessage = { "time": (time.time() % 1000) * 1000, "header": "", "message": message, } if header: logmessage["header"] = ( json.dumps(header, indent=2) + "\n" + "----------------" + "\n" ) if isinstance(message, dict): logmessage["message"] = ( json.dumps(message, indent=2) + "\n" + "----------------" + "\n" ) print("=== Consume ====\n{header}{message}".format(**logmessage)) self.log.info("Received message @{time}".format(**logmessage)) self.log.debug( "Received message @{time}\n{header}{message}".format(**logmessage) ) time.sleep(0.1)
python
{ "resource": "" }
q17411
Lego.on_receive
train
def on_receive(self, message): """ Handle being informed of a message. This function is called whenever a Lego receives a message, as specified in the pykka documentation. Legos should not override this function. :param message: :return: """ if self.log_file is not None and message['should_log']: message_copy = Message(message['text'], Metadata(None).__dict__, message['should_log']).__dict__ with open(self.log_file, mode='w') as f: f.write(json.dumps(message_copy)) logger.info(message['metadata']['source']) if self.listening_for(message): self_thread = self.HandlerThread(self.handle, message) self_thread.start() self.cleanup() for child in self.children: child.tell(message)
python
{ "resource": "" }
q17412
Lego.cleanup
train
def cleanup(self): """ Clean up finished children. :return: None """ self.lock.acquire() logger.debug('Acquired lock in cleanup for ' + str(self)) self.children = [child for child in self.children if child.is_alive()] self.lock.release()
python
{ "resource": "" }
q17413
Lego.add_child
train
def add_child(self, child_type, *args, **kwargs): """ Initialize and keep track of a child. :param child_type: a class inheriting from Lego to initialize \ an instance of :param args: arguments for initializing the child :param kwargs: keyword arguments for initializing the child :return: """ try: baseplate = kwargs['baseplate'] except: if self.baseplate is None: baseplate = self.actor_ref else: baseplate = self.baseplate try: lock = kwargs['lock'] except: lock = self.lock child = child_type.start(baseplate, lock, *args, **kwargs) self.children.append(child)
python
{ "resource": "" }
q17414
Lego.reply
train
def reply(self, message, text, opts=None): """ Reply to the sender of the provided message with a message \ containing the provided text. :param message: the message to reply to :param text: the text to reply with :param opts: A dictionary of additional values to add to metadata :return: None """ metadata = Metadata(source=self.actor_urn, dest=message['metadata']['source']).__dict__ metadata['opts'] = opts message = Message(text=text, metadata=metadata, should_log=message['should_log']).__dict__ dest_actor = ActorRegistry.get_by_urn(message['metadata']['dest']) if dest_actor is not None: dest_actor.tell(message) else: raise("Tried to send message to nonexistent actor")
python
{ "resource": "" }
q17415
Lego.build_reply_opts
train
def build_reply_opts(self, message): """ Convenience method for constructing default options for a reply message. :param message: the message to reply to :return: opts """ try: source = message['metadata']['source_channel'] thread = message['metadata'].get('thread_ts') opts = {'target': source, 'thread': thread} except LookupError: source = None opts = None logger.error("Could not identify source from message:{}\n" .format(str(message))) return opts
python
{ "resource": "" }
q17416
alter
train
def alter(data, system): """Alter data in dm format devices""" device = data[0] action = data[1] if data[2] == '*': data[2] = '.*' regex = re.compile(data[2]) prop = data[3] value = float(data[4]) if action == 'MUL': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] *= value elif action == 'REP': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] = value elif action == 'DIV': if not value: return for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] /= value elif action == 'SUM': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] += value elif action == 'SUB': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] -= value elif action == 'POW': for item in range(system.__dict__[device].n): if regex.search(system.__dict__[device].name[item]): system.__dict__[device].__dict__[prop][item] **= value else: print('ALTER action <%s> is not defined', action)
python
{ "resource": "" }
q17417
write
train
def write(file, system): """ Write data in system to a dm file """ # TODO: Check for bugs!!! out = list() out.append('# DOME format version 1.0') ppl = 7 # parameter per line retval = True dev_list = sorted(system.devman.devices) for dev in dev_list: model = system.__dict__[dev] if not model.n: continue out.append('') header = dev + ', ' space = ' ' * (len(dev) + 2) keys = list(model._data.keys()) keys.extend(['name', 'idx']) keys = sorted(keys) # remove non-existent keys for key in keys: if key not in model.__dict__.keys(): keys.pop(key) nline = int(ceil(len(keys) / ppl)) nelement = model.n vals = [''] * len(keys) # for each element, read values for elem in range(nelement): for idx, key in enumerate(keys): if model._flags['sysbase'] and key in model._store.keys(): val = model._store[key][elem] else: val = model.__dict__[key][elem] if isinstance(val, float): val = round(val, 5) elif isinstance(val, str): val = '"{}"'.format(val) elif isinstance(val, list): val = list(val) val = '; '.join(str(i) for i in val) val = '[{}]'.format(val) elif val is None: val = 0 vals[idx] = val pair = [] for key, val in zip(keys, vals): pair.append('{} = {}'.format(key, val)) for line in range(nline): string = ', '.join(pair[ppl * line:ppl * (line + 1)]) if line == 0: # append header or space string = header + string else: string = space + string if not line == nline - 1: # add comma except for last line string += ',' out.append(string) fid = open(file, 'w') for line in out: fid.write(line + '\n') fid.close() return retval
python
{ "resource": "" }
q17418
StompTransport.add_command_line_options
train
def add_command_line_options(cls, parser): """function to inject command line parameters""" if "add_argument" in dir(parser): return cls.add_command_line_options_argparse(parser) else: return cls.add_command_line_options_optparse(parser)
python
{ "resource": "" }
q17419
StompTransport.add_command_line_options_argparse
train
def add_command_line_options_argparse(cls, argparser): """function to inject command line parameters into a Python ArgumentParser.""" import argparse class SetParameter(argparse.Action): """callback object for ArgumentParser""" def __call__(self, parser, namespace, value, option_string=None): cls.config[option_string] = value if option_string == "--stomp-conf": cls.load_configuration_file(value) argparser.add_argument( "--stomp-host", metavar="HOST", default=cls.defaults.get("--stomp-host"), help="Stomp broker address, default '%(default)s'", type=str, action=SetParameter, ) argparser.add_argument( "--stomp-port", metavar="PORT", default=cls.defaults.get("--stomp-port"), help="Stomp broker port, default '%(default)s'", type=int, action=SetParameter, ) argparser.add_argument( "--stomp-user", metavar="USER", default=cls.defaults.get("--stomp-user"), help="Stomp user, default '%(default)s'", type=str, action=SetParameter, ) argparser.add_argument( "--stomp-pass", metavar="PASS", default=cls.defaults.get("--stomp-pass"), help="Stomp password", type=str, action=SetParameter, ) argparser.add_argument( "--stomp-prfx", metavar="PRE", default=cls.defaults.get("--stomp-prfx"), help="Stomp namespace prefix, default '%(default)s'", type=str, action=SetParameter, ) argparser.add_argument( "--stomp-conf", metavar="CNF", default=cls.defaults.get("--stomp-conf"), help="Stomp configuration file containing connection information, disables default values", type=str, action=SetParameter, )
python
{ "resource": "" }
q17420
StompTransport.add_command_line_options_optparse
train
def add_command_line_options_optparse(cls, optparser): """function to inject command line parameters into a Python OptionParser.""" def set_parameter(option, opt, value, parser): """callback function for OptionParser""" cls.config[opt] = value if opt == "--stomp-conf": cls.load_configuration_file(value) optparser.add_option( "--stomp-host", metavar="HOST", default=cls.defaults.get("--stomp-host"), help="Stomp broker address, default '%default'", type="string", nargs=1, action="callback", callback=set_parameter, ) optparser.add_option( "--stomp-port", metavar="PORT", default=cls.defaults.get("--stomp-port"), help="Stomp broker port, default '%default'", type="int", nargs=1, action="callback", callback=set_parameter, ) optparser.add_option( "--stomp-user", metavar="USER", default=cls.defaults.get("--stomp-user"), help="Stomp user, default '%default'", type="string", nargs=1, action="callback", callback=set_parameter, ) optparser.add_option( "--stomp-pass", metavar="PASS", default=cls.defaults.get("--stomp-pass"), help="Stomp password", type="string", nargs=1, action="callback", callback=set_parameter, ) optparser.add_option( "--stomp-prfx", metavar="PRE", default=cls.defaults.get("--stomp-prfx"), help="Stomp namespace prefix, default '%default'", type="string", nargs=1, action="callback", callback=set_parameter, ) optparser.add_option( "--stomp-conf", metavar="CNF", default=cls.defaults.get("--stomp-conf"), help="Stomp configuration file containing connection information, disables default values", type="string", nargs=1, action="callback", callback=set_parameter, )
python
{ "resource": "" }
q17421
StompTransport.is_connected
train
def is_connected(self): """Return connection status""" self._connected = self._connected and self._conn.is_connected() return self._connected
python
{ "resource": "" }
q17422
StompTransport.disconnect
train
def disconnect(self): """Gracefully close connection to stomp server.""" if self._connected: self._connected = False self._conn.disconnect()
python
{ "resource": "" }
q17423
StompTransport.broadcast_status
train
def broadcast_status(self, status): """Broadcast transient status information to all listeners""" self._broadcast( "transient.status", json.dumps(status), headers={"expires": str(int((15 + time.time()) * 1000))}, )
python
{ "resource": "" }
q17424
DevMan.register_device
train
def register_device(self, dev_name): """register a device to the device list""" if dev_name not in self.devices: self.devices.append(dev_name) group_name = self.system.__dict__[dev_name]._group if group_name not in self.group.keys(): self.group[group_name] = {}
python
{ "resource": "" }
q17425
DevMan.register_element
train
def register_element(self, dev_name, idx=None): """ Register a device element to the group list Parameters ---------- dev_name : str model name idx : str element idx Returns ------- str assigned idx """ if dev_name not in self.devices: logger.error( 'Device {} missing. call add_device before adding elements'. format(dev_name)) return group_name = self.system.__dict__[dev_name]._group if idx is None: # "if not idx" will fail for idx==0.0 idx = dev_name + '_' + str(len(self.group[group_name].keys())) self.group[group_name][idx] = dev_name return idx
python
{ "resource": "" }
q17426
DevMan.sort_device
train
def sort_device(self): """ Sort device to follow the order of initialization :return: None """ self.devices.sort() # idx: the indices of order-sensitive models # names: an ordered list of order-sensitive models idx = [] names = [] for dev in order: # if ``dev`` in ``order`` is a model file name: # initialize the models in alphabet order if dev in all_models: all_dev = list(sorted(all_models[dev].keys())) for item in all_dev: if item in self.devices: idx.append(self.devices.index(item)) names.append(item) # if ``dev`` presents as a model name elif dev in self.devices: idx.append(self.devices.index(dev)) names.append(dev) idx = sorted(idx) for id, name in zip(idx, names): self.devices[id] = name
python
{ "resource": "" }
q17427
TDS._calc_time_step_first
train
def _calc_time_step_first(self): """ Compute the first time step and save to ``self.h`` Returns ------- None """ system = self.system config = self.config if not system.dae.n: freq = 1.0 elif system.dae.n == 1: B = matrix(system.dae.Gx) self.solver.linsolve(system.dae.Gy, B) As = system.dae.Fx - system.dae.Fy * B freq = abs(As[0, 0]) else: freq = 20.0 if freq > system.freq: freq = float(system.freq) tspan = abs(config.tf - config.t0) tcycle = 1 / freq config.deltatmax = min(5 * tcycle, tspan / 100.0) config.deltat = min(tcycle, tspan / 100.0) config.deltatmin = min(tcycle / 64, config.deltatmax / 20) if config.fixt: if config.tstep <= 0: logger.warning('Fixed time step is negative or zero') logger.warning('Switching to automatic time step') config.fixt = False else: config.deltat = config.tstep if config.tstep < config.deltatmin: logger.warning( 'Fixed time step is below the estimated minimum') self.h = config.deltat
python
{ "resource": "" }
q17428
TDS.calc_time_step
train
def calc_time_step(self): """ Set the time step during time domain simulations Parameters ---------- convergence: bool truth value of the convergence of the last step niter: int current iteration count t: float current simulation time Returns ------- float computed time step size """ system = self.system config = self.config convergence = self.convergence niter = self.niter t = self.t if t == 0: self._calc_time_step_first() return if convergence: if niter >= 15: config.deltat = max(config.deltat * 0.5, config.deltatmin) elif niter <= 6: config.deltat = min(config.deltat * 1.1, config.deltatmax) else: config.deltat = max(config.deltat * 0.95, config.deltatmin) # adjust fixed time step if niter is high if config.fixt: config.deltat = min(config.tstep, config.deltat) else: config.deltat *= 0.9 if config.deltat < config.deltatmin: config.deltat = 0 if system.Fault.is_time(t) or system.Breaker.is_time(t): config.deltat = min(config.deltat, 0.002778) elif system.check_event(t): config.deltat = min(config.deltat, 0.002778) if config.method == 'fwdeuler': config.deltat = min(config.deltat, config.tstep) # last step size if self.t + config.deltat > config.tf: config.deltat = config.tf - self.t # reduce time step for fixed_times events for fixed_t in self.fixed_times: if (fixed_t > self.t) and (fixed_t <= self.t + config.deltat): config.deltat = fixed_t - self.t self.switch = True break self.h = config.deltat
python
{ "resource": "" }
q17429
TDS.init
train
def init(self): """ Initialize time domain simulation Returns ------- None """ system = self.system config = self.config dae = self.system.dae if system.pflow.solved is False: return t, s = elapsed() # Assign indices for post-powerflow device variables system.xy_addr1() # Assign variable names for bus injections and line flows if enabled system.varname.resize_for_flows() system.varname.bus_line_names() # Reshape dae to retain power flow solutions system.dae.init1() # Initialize post-powerflow device variables for device, init1 in zip(system.devman.devices, system.call.init1): if init1: system.__dict__[device].init1(system.dae) # compute line and area flow if config.compute_flows: dae.init_fg() self.compute_flows() # TODO: move to PowerSystem t, s = elapsed(t) if system.dae.n: logger.debug('Dynamic models initialized in {:s}.'.format(s)) else: logger.debug('No dynamic model loaded.') # system.dae flags initialize system.dae.factorize = True system.dae.mu = 1.0 system.dae.kg = 0.0
python
{ "resource": "" }
q17430
TDS.restore_values
train
def restore_values(self): """ Restore x, y, and f values if not converged Returns ------- None """ if self.convergence is True: return dae = self.system.dae system = self.system inc_g = self.inc[dae.n:dae.m + dae.n] max_g_err_sign = 1 if abs(max(inc_g)) > abs(min(inc_g)) else -1 if max_g_err_sign == 1: max_g_err_idx = list(inc_g).index(max(inc_g)) else: max_g_err_idx = list(inc_g).index(min(inc_g)) logger.debug( 'Maximum mismatch = {:.4g} at equation <{}>'.format( max(abs(inc_g)), system.varname.unamey[max_g_err_idx])) logger.debug( 'Reducing time step h={:.4g}s for t={:.4g}'.format(self.h, self.t)) # restore initial variable data dae.x = matrix(self.x0) dae.y = matrix(self.y0) dae.f = matrix(self.f0)
python
{ "resource": "" }
q17431
TDS.implicit_step
train
def implicit_step(self): """ Integrate one step using trapezoidal method. Sets convergence and niter flags. Returns ------- None """ config = self.config system = self.system dae = self.system.dae # constant short names In = spdiag([1] * dae.n) h = self.h while self.err > config.tol and self.niter < config.maxit: if self.t - self.t_jac >= 5: dae.rebuild = True self.t_jac = self.t elif self.niter > 4: dae.rebuild = True elif dae.factorize: dae.rebuild = True # rebuild Jacobian if dae.rebuild: exec(system.call.int) dae.rebuild = False else: exec(system.call.int_fg) # complete Jacobian matrix dae.Ac if config.method == 'euler': dae.Ac = sparse( [[In - h * dae.Fx, dae.Gx], [-h * dae.Fy, dae.Gy]], 'd') dae.q = dae.x - self.x0 - h * dae.f elif config.method == 'trapezoidal': dae.Ac = sparse([[In - h * 0.5 * dae.Fx, dae.Gx], [-h * 0.5 * dae.Fy, dae.Gy]], 'd') dae.q = dae.x - self.x0 - h * 0.5 * (dae.f + self.f0) # windup limiters dae.reset_Ac() if dae.factorize: self.F = self.solver.symbolic(dae.Ac) dae.factorize = False self.inc = -matrix([dae.q, dae.g]) try: N = self.solver.numeric(dae.Ac, self.F) self.solver.solve(dae.Ac, self.F, N, self.inc) except ArithmeticError: logger.error('Singular matrix') dae.check_diag(dae.Gy, 'unamey') dae.check_diag(dae.Fx, 'unamex') # force quit self.niter = config.maxit + 1 break except ValueError: logger.warning('Unexpected symbolic factorization') dae.factorize = True continue else: inc_x = self.inc[:dae.n] inc_y = self.inc[dae.n:dae.m + dae.n] dae.x += inc_x dae.y += inc_y self.err = max(abs(self.inc)) if np.isnan(self.inc).any(): logger.error('Iteration error: NaN detected.') self.niter = config.maxit + 1 break self.niter += 1 if self.niter <= config.maxit: self.convergence = True
python
{ "resource": "" }
q17432
TDS.event_actions
train
def event_actions(self): """ Take actions for timed events Returns ------- None """ system = self.system dae = system.dae if self.switch: system.Breaker.apply(self.t) for item in system.check_event(self.t): system.__dict__[item].apply(self.t) dae.rebuild = True self.switch = False
python
{ "resource": "" }
q17433
TDS.load_pert
train
def load_pert(self): """ Load perturbation files to ``self.callpert`` Returns ------- None """ system = self.system if system.files.pert: try: sys.path.append(system.files.path) module = importlib.import_module(system.files.pert[:-3]) self.callpert = getattr(module, 'pert') except ImportError: logger.warning('Pert file is discarded due to import errors.') self.callpert = None
python
{ "resource": "" }
q17434
TDS.run_step0
train
def run_step0(self): """ For the 0th step, store the data and stream data Returns ------- None """ dae = self.system.dae system = self.system self.inc = zeros(dae.m + dae.n, 1) system.varout.store(self.t, self.step) self.streaming_step()
python
{ "resource": "" }
q17435
TDS.streaming_step
train
def streaming_step(self): """ Sync, handle and streaming for each integration step Returns ------- None """ system = self.system if system.config.dime_enable: system.streaming.sync_and_handle() system.streaming.vars_to_modules() system.streaming.vars_to_pmu()
python
{ "resource": "" }
q17436
TDS.streaming_init
train
def streaming_init(self): """ Send out initialization variables and process init from modules Returns ------- None """ system = self.system config = self.config if system.config.dime_enable: config.compute_flows = True system.streaming.send_init(recepient='all') logger.info('Waiting for modules to send init info...') sleep(0.5) system.streaming.sync_and_handle()
python
{ "resource": "" }
q17437
TDS.compute_flows
train
def compute_flows(self): """ If enabled, compute the line flows after each step Returns ------- None """ system = self.system config = self.config dae = system.dae if config.compute_flows: # compute and append series injections on buses exec(system.call.bus_injection) bus_inj = dae.g[:2 * system.Bus.n] exec(system.call.seriesflow) system.Area.seriesflow(system.dae) system.Area.interchange_varout() dae.y = matrix([ dae.y, bus_inj, system.Line._line_flows, system.Area.inter_varout ])
python
{ "resource": "" }
q17438
TDS.dump_results
train
def dump_results(self, success): """ Dump simulation results to ``dat`` and ``lst`` files Returns ------- None """ system = self.system t, _ = elapsed() if success and (not system.files.no_output): # system.varout.dump() system.varout.dump_np_vars() _, s = elapsed(t) logger.info('Simulation data dumped in {:s}.'.format(s))
python
{ "resource": "" }
q17439
de_blank
train
def de_blank(val): """Remove blank elements in `val` and return `ret`""" ret = list(val) if type(val) == list: for idx, item in enumerate(val): if item.strip() == '': ret.remove(item) else: ret[idx] = item.strip() return ret
python
{ "resource": "" }
q17440
stringfy
train
def stringfy(expr, sym_const=None, sym_states=None, sym_algebs=None): """Convert the right-hand-side of an equation into CVXOPT matrix operations""" if not sym_const: sym_const = [] if not sym_states: sym_states = [] if not sym_algebs: sym_algebs = [] expr_str = [] if type(expr) in (int, float): return expr if expr.is_Atom: if expr in sym_const: expr_str = 'self.{}'.format(expr) elif expr in sym_states: expr_str = 'dae.x[self.{}]'.format(expr) elif expr in sym_algebs: expr_str = 'dae.y[self.{}]'.format(expr) elif expr.is_Number: if expr.is_Integer: expr_str = str(int(expr)) else: expr_str = str(float(expr)) # if expr.is_negative: # expr_str = '{}'.format(expr) # else: # expr_str = str(expr) else: raise AttributeError('Unknown free symbol <{}>'.format(expr)) else: nargs = len(expr.args) arg_str = [] for arg in expr.args: arg_str.append(stringfy(arg, sym_const, sym_states, sym_algebs)) if expr.is_Add: expr_str = '' for idx, item in enumerate(arg_str): if idx == 0: if len(item) > 1 and item[1] == ' ': item = item[0] + item[2:] if idx > 0: if item[0] == '-': item = ' ' + item else: item = ' + ' + item expr_str += item elif expr.is_Mul: if nargs == 2 and expr.args[0].is_Integer: # number * matrix if expr.args[0].is_positive: expr_str = '{}*{}'.format(*arg_str) elif expr.args[0] == Integer('-1'): expr_str = '- {}'.format(arg_str[1]) else: # negative but not -1 expr_str = '{}*{}'.format(*arg_str) else: # matrix dot multiplication if expr.args[0] == Integer('-1'): # bring '-' out of mul() expr_str = ', '.join(arg_str[1:]) expr_str = '- mul(' + expr_str + ')' else: expr_str = ', '.join(arg_str) expr_str = 'mul(' + expr_str + ')' elif expr.is_Function: expr_str = ', '.join(arg_str) expr_str = str(expr.func) + '(' + expr_str + ')' elif expr.is_Pow: if arg_str[1] == '-1': expr_str = 'div(1, {})'.format(arg_str[0]) else: expr_str = '({})**{}'.format(*arg_str) elif expr.is_Div: expr_str = ', '.join(arg_str) expr_str = 'div(' + expr_str + ')' else: raise NotImplementedError return expr_str
python
{ "resource": "" }
q17441
readadd
train
def readadd(file, system): """read DYR file""" dyr = {} data = [] end = 0 retval = True sep = ',' fid = open(file, 'r') for line in fid.readlines(): if line.find('/') >= 0: line = line.split('/')[0] end = 1 if line.find(',') >= 0: # mixed comma and space splitter not allowed line = [to_number(item.strip()) for item in line.split(sep)] else: line = [to_number(item.strip()) for item in line.split()] if not line: end = 0 continue data.extend(line) if end == 1: field = data[1] if field not in dyr.keys(): dyr[field] = [] dyr[field].append(data) end = 0 data = [] fid.close() # elem_add device elements to system supported = [ 'GENROU', 'GENCLS', 'ESST3A', 'ESDC2A', 'SEXS', 'EXST1', 'ST2CUT', 'IEEEST', 'TGOV1', ] used = list(supported) for model in supported: if model not in dyr.keys(): used.remove(model) continue for data in dyr[model]: add_dyn(system, model, data) needed = list(dyr.keys()) for i in supported: if i in needed: needed.remove(i) logger.warning('Models currently unsupported: {}'.format( ', '.join(needed))) return retval
python
{ "resource": "" }
q17442
Recipe._sanitize
train
def _sanitize(recipe): """Clean up a recipe that may have been stored as serialized json string. Convert any numerical pointers that are stored as strings to integers.""" recipe = recipe.copy() for k in list(recipe): if k not in ("start", "error") and int(k) and k != int(k): recipe[int(k)] = recipe[k] del recipe[k] for k in list(recipe): if "output" in recipe[k] and not isinstance( recipe[k]["output"], (list, dict) ): recipe[k]["output"] = [recipe[k]["output"]] # dicts should be normalized, too if "start" in recipe: recipe["start"] = [tuple(x) for x in recipe["start"]] return recipe
python
{ "resource": "" }
q17443
parse_string
train
def parse_string(data, unquote=default_unquote): """Decode URL-encoded strings to UTF-8 containing the escaped chars. """ if data is None: return None # We'll soon need to unquote to recover our UTF-8 data. # In Python 2, unquote crashes on chars beyond ASCII. So encode functions # had better not include anything beyond ASCII in data. # In Python 3, unquote crashes on bytes objects, requiring conversion to # str objects (unicode) using decode(). # But in Python 2, the same decode causes unquote to butcher the data. # So in that case, just leave the bytes. if isinstance(data, bytes): if sys.version_info > (3, 0, 0): # pragma: no cover data = data.decode('ascii') # Recover URL encoded data unquoted = unquote(data) # Without this step, Python 2 may have good URL decoded *bytes*, # which will therefore not normalize as unicode and not compare to # the original. if isinstance(unquoted, bytes): unquoted = unquoted.decode('utf-8') return unquoted
python
{ "resource": "" }
q17444
parse_value
train
def parse_value(value, allow_spaces=True, unquote=default_unquote): "Process a cookie value" if value is None: return None value = strip_spaces_and_quotes(value) value = parse_string(value, unquote=unquote) if not allow_spaces: assert ' ' not in value return value
python
{ "resource": "" }
q17445
valid_name
train
def valid_name(name): "Validate a cookie name string" if isinstance(name, bytes): name = name.decode('ascii') if not Definitions.COOKIE_NAME_RE.match(name): return False # This module doesn't support $identifiers, which are part of an obsolete # and highly complex standard which is never used. if name[0] == "$": return False return True
python
{ "resource": "" }
q17446
valid_value
train
def valid_value(value, quote=default_cookie_quote, unquote=default_unquote): """Validate a cookie value string. This is generic across quote/unquote functions because it directly verifies the encoding round-trip using the specified quote/unquote functions. So if you use different quote/unquote functions, use something like this as a replacement for valid_value:: my_valid_value = lambda value: valid_value(value, quote=my_quote, unquote=my_unquote) """ if value is None: return False # Put the value through a round trip with the given quote and unquote # functions, so we will know whether data will get lost or not in the event # that we don't complain. encoded = encode_cookie_value(value, quote=quote) decoded = parse_string(encoded, unquote=unquote) # If the original string made the round trip, this is a valid value for the # given quote and unquote functions. Since the round trip can generate # different unicode forms, normalize before comparing, so we can ignore # trivial inequalities. decoded_normalized = (normalize("NFKD", decoded) if not isinstance(decoded, bytes) else decoded) value_normalized = (normalize("NFKD", value) if not isinstance(value, bytes) else value) if decoded_normalized == value_normalized: return True return False
python
{ "resource": "" }
q17447
valid_date
train
def valid_date(date): "Validate an expires datetime object" # We want something that acts like a datetime. In particular, # strings indicate a failure to parse down to an object and ints are # nonstandard and ambiguous at best. if not hasattr(date, 'tzinfo'): return False # Relevant RFCs define UTC as 'close enough' to GMT, and the maximum # difference between UTC and GMT is often stated to be less than a second. if date.tzinfo is None or _total_seconds(date.utcoffset()) < 1.1: return True return False
python
{ "resource": "" }
q17448
valid_domain
train
def valid_domain(domain): "Validate a cookie domain ASCII string" # Using encoding on domain would confuse browsers into not sending cookies. # Generate UnicodeDecodeError up front if it can't store as ASCII. domain.encode('ascii') # Domains starting with periods are not RFC-valid, but this is very common # in existing cookies, so they should still parse with DOMAIN_AV. if Definitions.DOMAIN_RE.match(domain): return True return False
python
{ "resource": "" }
q17449
valid_path
train
def valid_path(value): "Validate a cookie path ASCII string" # Generate UnicodeDecodeError if path can't store as ASCII. value.encode("ascii") # Cookies without leading slash will likely be ignored, raise ASAP. if not (value and value[0] == "/"): return False if not Definitions.PATH_RE.match(value): return False return True
python
{ "resource": "" }
q17450
valid_max_age
train
def valid_max_age(number): "Validate a cookie Max-Age" if isinstance(number, basestring): try: number = long(number) except (ValueError, TypeError): return False if number >= 0 and number % 1 == 0: return True return False
python
{ "resource": "" }
q17451
encode_cookie_value
train
def encode_cookie_value(data, quote=default_cookie_quote): """URL-encode strings to make them safe for a cookie value. By default this uses urllib quoting, as used in many other cookie implementations and in other Python code, instead of an ad hoc escaping mechanism which includes backslashes (these also being illegal chars in RFC 6265). """ if data is None: return None # encode() to ASCII bytes so quote won't crash on non-ASCII. # but doing that to bytes objects is nonsense. # On Python 2 encode crashes if s is bytes containing non-ASCII. # On Python 3 encode crashes on all byte objects. if not isinstance(data, bytes): data = data.encode("utf-8") # URL encode data so it is safe for cookie value quoted = quote(data) # Don't force to bytes, so that downstream can use proper string API rather # than crippled bytes, and to encourage encoding to be done just once. return quoted
python
{ "resource": "" }
q17452
Cookie.from_dict
train
def from_dict(cls, cookie_dict, ignore_bad_attributes=True): """Construct an instance from a dict of strings to parse. The main difference between this and Cookie(name, value, **kwargs) is that the values in the argument to this method are parsed. If ignore_bad_attributes=True (default), values which did not parse are set to '' in order to avoid passing bad data. """ name = cookie_dict.get('name', None) if not name: raise InvalidCookieError("Cookie must have name") raw_value = cookie_dict.get('value', '') # Absence or failure of parser here is fatal; errors in present name # and value should be found by Cookie.__init__. value = cls.attribute_parsers['value'](raw_value) cookie = cls(name, value) # Parse values from serialized formats into objects parsed = {} for key, value in cookie_dict.items(): # Don't want to pass name/value to _set_attributes if key in ('name', 'value'): continue parser = cls.attribute_parsers.get(key) if not parser: # Don't let totally unknown attributes pass silently if not ignore_bad_attributes: raise InvalidCookieAttributeError( key, value, "unknown cookie attribute '%s'" % key) _report_unknown_attribute(key) continue try: parsed_value = parser(value) except Exception as e: reason = "did not parse with %r: %r" % (parser, e) if not ignore_bad_attributes: raise InvalidCookieAttributeError( key, value, reason) _report_invalid_attribute(key, value, reason) parsed_value = '' parsed[key] = parsed_value # Set the parsed objects (does object validation automatically) cookie._set_attributes(parsed, ignore_bad_attributes) return cookie
python
{ "resource": "" }
q17453
Cookie.from_string
train
def from_string(cls, line, ignore_bad_cookies=False, ignore_bad_attributes=True): "Construct a Cookie object from a line of Set-Cookie header data." cookie_dict = parse_one_response( line, ignore_bad_cookies=ignore_bad_cookies, ignore_bad_attributes=ignore_bad_attributes) if not cookie_dict: return None return cls.from_dict( cookie_dict, ignore_bad_attributes=ignore_bad_attributes)
python
{ "resource": "" }
q17454
Cookie.validate
train
def validate(self, name, value): """Validate a cookie attribute with an appropriate validator. The value comes in already parsed (for example, an expires value should be a datetime). Called automatically when an attribute value is set. """ validator = self.attribute_validators.get(name, None) if validator: return True if validator(value) else False return True
python
{ "resource": "" }
q17455
Cookie.attributes
train
def attributes(self): """Export this cookie's attributes as a dict of encoded values. This is an important part of the code for rendering attributes, e.g. render_response(). """ dictionary = {} # Only look for attributes registered in attribute_names. for python_attr_name, cookie_attr_name in self.attribute_names.items(): value = getattr(self, python_attr_name) renderer = self.attribute_renderers.get(python_attr_name, None) if renderer: value = renderer(value) # If renderer returns None, or it's just natively none, then the # value is suppressed entirely - does not appear in any rendering. if not value: continue dictionary[cookie_attr_name] = value return dictionary
python
{ "resource": "" }
q17456
Cookies.add
train
def add(self, *args, **kwargs): """Add Cookie objects by their names, or create new ones under specified names. Any unnamed arguments are interpreted as existing cookies, and are added under the value in their .name attribute. With keyword arguments, the key is interpreted as the cookie name and the value as the UNENCODED value stored in the cookie. """ # Only the first one is accessible through the main interface, # others accessible through get_all (all_cookies). for cookie in args: self.all_cookies.append(cookie) if cookie.name in self: continue self[cookie.name] = cookie for key, value in kwargs.items(): cookie = self.cookie_class(key, value) self.all_cookies.append(cookie) if key in self: continue self[key] = cookie
python
{ "resource": "" }
q17457
Cookies.parse_request
train
def parse_request(self, header_data, ignore_bad_cookies=False): """Parse 'Cookie' header data into Cookie objects, and add them to this Cookies object. :arg header_data: string containing only 'Cookie:' request headers or header values (as in CGI/WSGI HTTP_COOKIE); if more than one, they must be separated by CRLF (\\r\\n). :arg ignore_bad_cookies: if set, will log each syntactically invalid cookie (at the granularity of semicolon-delimited blocks) rather than raising an exception at the first bad cookie. :returns: a Cookies instance containing Cookie objects parsed from header_data. .. note:: If you want to parse 'Set-Cookie:' response headers, please use parse_response instead. parse_request will happily turn 'expires=frob' into a separate cookie without complaining, according to the grammar. """ cookies_dict = _parse_request( header_data, ignore_bad_cookies=ignore_bad_cookies) cookie_objects = [] for name, values in cookies_dict.items(): for value in values: # Use from_dict to check name and parse value cookie_dict = {'name': name, 'value': value} try: cookie = self.cookie_class.from_dict(cookie_dict) except InvalidCookieError: if not ignore_bad_cookies: raise else: cookie_objects.append(cookie) try: self.add(*cookie_objects) except InvalidCookieError: if not ignore_bad_cookies: raise _report_invalid_cookie(header_data) return self
python
{ "resource": "" }
q17458
Cookies.parse_response
train
def parse_response(self, header_data, ignore_bad_cookies=False, ignore_bad_attributes=True): """Parse 'Set-Cookie' header data into Cookie objects, and add them to this Cookies object. :arg header_data: string containing only 'Set-Cookie:' request headers or their corresponding header values; if more than one, they must be separated by CRLF (\\r\\n). :arg ignore_bad_cookies: if set, will log each syntactically invalid cookie rather than raising an exception at the first bad cookie. (This includes cookies which have noncompliant characters in the attribute section). :arg ignore_bad_attributes: defaults to True, which means to log but not raise an error when a particular attribute is unrecognized. (This does not necessarily mean that the attribute is invalid, although that would often be the case.) if unset, then an error will be raised at the first semicolon-delimited block which has an unknown attribute. :returns: a Cookies instance containing Cookie objects parsed from header_data, each with recognized attributes populated. .. note:: If you want to parse 'Cookie:' headers (i.e., data like what's sent with an HTTP request, which has only name=value pairs and no attributes), then please use parse_request instead. Such lines often contain multiple name=value pairs, and parse_response will throw away the pairs after the first one, which will probably generate errors or confusing behavior. (Since there's no perfect way to automatically determine which kind of parsing to do, you have to tell it manually by choosing correctly from parse_request between part_response.) """ cookie_dicts = _parse_response( header_data, ignore_bad_cookies=ignore_bad_cookies, ignore_bad_attributes=ignore_bad_attributes) cookie_objects = [] for cookie_dict in cookie_dicts: cookie = self.cookie_class.from_dict(cookie_dict) cookie_objects.append(cookie) self.add(*cookie_objects) return self
python
{ "resource": "" }
q17459
Cookies.from_request
train
def from_request(cls, header_data, ignore_bad_cookies=False): "Construct a Cookies object from request header data." cookies = cls() cookies.parse_request( header_data, ignore_bad_cookies=ignore_bad_cookies) return cookies
python
{ "resource": "" }
q17460
Cookies.from_response
train
def from_response(cls, header_data, ignore_bad_cookies=False, ignore_bad_attributes=True): "Construct a Cookies object from response header data." cookies = cls() cookies.parse_response( header_data, ignore_bad_cookies=ignore_bad_cookies, ignore_bad_attributes=ignore_bad_attributes) return cookies
python
{ "resource": "" }
q17461
not0
train
def not0(a): """Return u if u!= 0, return 1 if u == 0""" return matrix(list(map(lambda x: 1 if x == 0 else x, a)), a.size)
python
{ "resource": "" }
q17462
index
train
def index(m, val): """ Return the indices of all the ``val`` in ``m`` """ mm = np.array(m) idx_tuple = np.where(mm == val) idx = idx_tuple[0].tolist() return idx
python
{ "resource": "" }
q17463
to_number
train
def to_number(s): """ Convert a string to a number. If not successful, return the string without blanks """ ret = s # try converting to float try: ret = float(s) except ValueError: ret = ret.strip('\'').strip() # try converting to uid try: ret = int(s) except ValueError: pass # try converting to boolean if ret == 'True': ret = True elif ret == 'False': ret = False elif ret == 'None': ret = None return ret
python
{ "resource": "" }
q17464
get_config_load_path
train
def get_config_load_path(conf_path=None): """ Return config file load path Priority: 1. conf_path 2. current directory 3. home directory Parameters ---------- conf_path Returns ------- """ if conf_path is None: # test ./andes.conf if os.path.isfile('andes.conf'): conf_path = 'andes.conf' # test ~/andes.conf home_dir = os.path.expanduser('~') if os.path.isfile(os.path.join(home_dir, '.andes', 'andes.conf')): conf_path = os.path.join(home_dir, '.andes', 'andes.conf') if conf_path is not None: logger.debug('Found config file at {}.'.format(conf_path)) return conf_path
python
{ "resource": "" }
q17465
get_log_dir
train
def get_log_dir(): """ Get a directory for logging On Linux or macOS, '/tmp/andes' is the default. On Windows, '%APPDATA%/andes' is the default. Returns ------- str Path to the logging directory """ PATH = '' if platform.system() in ('Linux', 'Darwin'): PATH = tempfile.mkdtemp(prefix='andes-') elif platform.system() == 'Windows': APPDATA = os.getenv('APPDATA') PATH = os.path.join(APPDATA, 'andes') if not os.path.exists(PATH): os.makedirs(PATH) return PATH
python
{ "resource": "" }
q17466
VarOut.show
train
def show(self): """ The representation of an Varout object :return: the full result matrix (for use with PyCharm viewer) :rtype: np.array """ out = [] for item in self.vars: out.append(list(item)) return np.array(out)
python
{ "resource": "" }
q17467
VarOut.concat_t_vars
train
def concat_t_vars(self): """ Concatenate ``self.t`` with ``self.vars`` and output a single matrix for data dump :return matrix: concatenated matrix with ``self.t`` as the 0-th column """ logger.warning('This function is deprecated and replaced by `concat_t_vars_np`.') out = np.array([]) if len(self.t) == 0: return out out = np.ndarray(shape=(0, self.vars[0].size[0] + 1)) for t, var in zip(self.t, self.vars): line = [[t]] line[0].extend(list(var)) out = np.append(out, line, axis=0) return out
python
{ "resource": "" }
q17468
VarOut.concat_t_vars_np
train
def concat_t_vars_np(self, vars_idx=None): """ Concatenate `self.np_t` with `self.np_vars` and return a single matrix. The first column corresponds to time, and the rest of the matrix is the variables. Returns ------- np.array : concatenated matrix """ selected_np_vars = self.np_vars if vars_idx is not None: selected_np_vars = self.np_vars[:, vars_idx] return np.concatenate([self.np_t[:self.np_nrows].reshape((-1, 1)), selected_np_vars[:self.np_nrows, :]], axis=1)
python
{ "resource": "" }
q17469
VarOut.get_xy
train
def get_xy(self, yidx, xidx=0): """ Return stored data for the given indices for plot :param yidx: the indices of the y-axis variables(1-indexing) :param xidx: the index of the x-axis variables :return: None """ assert isinstance(xidx, int) if isinstance(yidx, int): yidx = [yidx] t_vars = self.concat_t_vars() xdata = t_vars[:, xidx] ydata = t_vars[:, yidx] return xdata.tolist(), ydata.transpose().tolist()
python
{ "resource": "" }
q17470
VarOut.dump_np_vars
train
def dump_np_vars(self, store_format='csv', delimiter=','): """ Dump the TDS simulation data to files by calling subroutines `write_lst` and `write_np_dat`. Parameters ----------- store_format : str dump format in `('csv', 'txt', 'hdf5')` delimiter : str delimiter for the `csv` and `txt` format Returns ------- bool: success flag """ ret = False if self.system.files.no_output is True: logger.debug('no_output is True, thus no TDS dump saved ') return True if self.write_lst() and self.write_np_dat(store_format=store_format, delimiter=delimiter): ret = True return ret
python
{ "resource": "" }
q17471
VarOut.dump
train
def dump(self): """ Dump the TDS results to the output `dat` file :return: succeed flag """ logger.warn('This function is deprecated and replaced by `dump_np_vars`.') ret = False if self.system.files.no_output: # return ``True`` because it did not fail return True if self.write_lst() and self.write_dat(): ret = True return ret
python
{ "resource": "" }
q17472
VarOut.write_np_dat
train
def write_np_dat(self, store_format='csv', delimiter=',', fmt='%.12g'): """ Write TDS data stored in `self.np_vars` to the output file Parameters ---------- store_format : str dump format in ('csv', 'txt', 'hdf5') delimiter : str delimiter for the `csv` and `txt` format fmt : str output formatting template Returns ------- bool : success flag """ ret = False system = self.system # compute the total number of columns, excluding time if not system.Recorder.n: n_vars = system.dae.m + system.dae.n # post-computed power flows include: # bus - (Pi, Qi) # line - (Pij, Pji, Qij, Qji, Iij_Real, Iij_Imag, Iji_real, Iji_Imag) if system.tds.config.compute_flows: n_vars += 2 * system.Bus.n + 8 * system.Line.n + 2 * system.Area.n_combination idx = list(range(n_vars)) else: n_vars = len(system.Recorder.varout_idx) idx = system.Recorder.varout_idx # prepare data t_vars_concatenated = self.concat_t_vars_np(vars_idx=idx) try: os.makedirs(os.path.abspath(os.path.dirname(system.files.dat)), exist_ok=True) with open(system.files.dat, self._mode) as f: if store_format in ('csv', 'txt'): np.savetxt(f, t_vars_concatenated, fmt=fmt, delimiter=delimiter) elif store_format == 'hdf5': pass ret = True logger.info('TDS data dumped to <{}>'.format(system.files.dat)) except IOError: logger.error('I/O Error while writing the dat file.') return ret
python
{ "resource": "" }
q17473
VarOut.write_lst
train
def write_lst(self): """ Dump the variable name lst file :return: succeed flag """ ret = False out = '' system = self.system dae = self.system.dae varname = self.system.varname template = '{:>6g}, {:>25s}, {:>35s}\n' # header line out += template.format(0, 'Time [s]', '$Time\\ [s]$') # include line flow variables in algebraic variables nflows = 0 if self.system.tds.config.compute_flows: nflows = 2 * self.system.Bus.n + \ 8 * self.system.Line.n + \ 2 * self.system.Area.n_combination # output variable indices if system.Recorder.n == 0: state_idx = list(range(dae.n)) algeb_idx = list(range(dae.n, dae.n + dae.m + nflows)) idx = state_idx + algeb_idx else: idx = system.Recorder.varout_idx # variable names concatenated uname = varname.unamex + varname.unamey fname = varname.fnamex + varname.fnamey for e, i in enumerate(idx): out += template.format(e + 1, uname[i], fname[i]) try: with open(self.system.files.lst, 'w') as f: f.write(out) ret = True except IOError: logger.error('I/O Error while writing the lst file.') return ret
python
{ "resource": "" }
q17474
VarOut.vars_to_array
train
def vars_to_array(self): """ Convert `self.vars` to a numpy array Returns ------- numpy.array """ logger.warn('This function is deprecated. You can inspect `self.np_vars` directly as NumPy arrays ' 'without conversion.') if not self.vars: return None vars_matrix = matrix(self.vars, size=(self.vars[0].size[0], len(self.vars))).trans() self.vars_array = np.array(vars_matrix) return self.vars_array
python
{ "resource": "" }
q17475
preamble
train
def preamble(): """ Log the Andes command-line preamble at the `logging.INFO` level Returns ------- None """ from . import __version__ as version logger.info('ANDES {ver} (Build {b}, Python {p} on {os})' .format(ver=version[:5], b=version[-8:], p=platform.python_version(), os=platform.system())) try: username = os.getlogin() + ', ' except OSError: username = '' logger.info('Session: {}{}'.format(username, strftime("%m/%d/%Y %I:%M:%S %p"))) logger.info('')
python
{ "resource": "" }
q17476
edit_conf
train
def edit_conf(edit_config=False, load_config=None, **kwargs): """ Edit the Andes config file which occurs first in the search path. Parameters ---------- edit_config : bool If ``True``, try to open up an editor and edit the config file. Otherwise returns. load_config : None or str, optional Path to the config file, which will be placed to the first in the search order. kwargs : dict Other keyword arguments. Returns ------- bool ``True`` is a config file is found and an editor is opened. ``False`` if ``edit_config`` is False. """ ret = False # no `edit-config` supplied if edit_config == '': return ret conf_path = misc.get_config_load_path(load_config) if conf_path is not None: logger.info('Editing config file {}'.format(conf_path)) if edit_config is None: # use the following default editors if platform.system() == 'Linux': editor = os.environ.get('EDITOR', 'gedit') elif platform.system() == 'Darwin': editor = os.environ.get('EDITOR', 'vim') elif platform.system() == 'Windows': editor = 'notepad.exe' else: # use `edit_config` as default editor editor = edit_config call([editor, conf_path]) ret = True else: logger.info('Config file does not exist. Save config with \'andes ' '--save-config\'') ret = True return ret
python
{ "resource": "" }
q17477
remove_output
train
def remove_output(clean=False, **kwargs): """ Remove the outputs generated by Andes, including power flow reports ``_out.txt``, time-domain list ``_out.lst`` and data ``_out.dat``, eigenvalue analysis report ``_eig.txt``. Parameters ---------- clean : bool If ``True``, execute the function body. Returns otherwise. kwargs : dict Other keyword arguments Returns ------- bool ``True`` is the function body executes with success. ``False`` otherwise. """ if not clean: return False found = False cwd = os.getcwd() for file in os.listdir(cwd): if file.endswith('_eig.txt') or \ file.endswith('_out.txt') or \ file.endswith('_out.lst') or \ file.endswith('_out.dat') or \ file.endswith('_prof.txt'): found = True try: os.remove(file) logger.info('<{:s}> removed.'.format(file)) except IOError: logger.error('Error removing file <{:s}>.'.format(file)) if not found: logger.info('no output found in the working directory.') return True
python
{ "resource": "" }
q17478
search
train
def search(search, **kwargs): """ Search for models whose names matches the given pattern. Print the results to stdout. .. deprecated :: 1.0.0 `search` will be moved to ``andeshelp`` in future versions. Parameters ---------- search : str Partial or full name of the model to search for kwargs : dict Other keyword arguments. Returns ------- list The list of model names that match the given pattern. """ from .models import all_models out = [] if not search: return out keys = sorted(list(all_models.keys())) for key in keys: vals = all_models[key] val = list(vals.keys()) val = sorted(val) for item in val: if search.lower() in item.lower(): out.append(key + '.' + item) if out: print('Search result: <file.model> containing <{}>' .format(search)) print(' '.join(out)) else: print('No model containing <{:s}> found'.format(search)) return out
python
{ "resource": "" }
q17479
save_config
train
def save_config(save_config='', **kwargs): """ Save the Andes config to a file at the path specified by ``save_config``. The save action will not run if `save_config = ''`. Parameters ---------- save_config : None or str, optional, ('' by default) Path to the file to save the config file. If the path is an emtpy string, the save action will not run. Save to `~/.andes/andes.conf` if ``None``. kwargs : dict, optional Other keyword arguments Returns ------- bool ``True`` is the save action is run. ``False`` otherwise. """ ret = False cf_path = save_config # no ``--save-config `` if cf_path == '': return ret if cf_path is None: cf_path = 'andes.conf' home = str(pathlib.Path.home()) path = os.path.join(home, '.andes') if not os.path.exists(path): os.makedirs(path) cf_path = os.path.join(path, cf_path) ps = PowerSystem() ps.dump_config(cf_path) ret = True return ret
python
{ "resource": "" }
q17480
Call.setup
train
def setup(self): """ setup the call list after case file is parsed and jit models are loaded """ self.devices = self.system.devman.devices self.ndevice = len(self.devices) self.gcalls = [''] * self.ndevice self.fcalls = [''] * self.ndevice self.gycalls = [''] * self.ndevice self.fxcalls = [''] * self.ndevice self.jac0s = [''] * self.ndevice self.build_vec() self.build_strings() self._compile_newton() self._compile_fdpf() self._compile_pfload() self._compile_pfgen() self._compile_seriesflow() self._compile_int() self._compile_int_f() self._compile_int_g() self._compile_bus_injection()
python
{ "resource": "" }
q17481
Call.build_vec
train
def build_vec(self): """build call validity vector for each device""" for item in all_calls: self.__dict__[item] = [] for dev in self.devices: for item in all_calls: if self.system.__dict__[dev].n == 0: val = False else: val = self.system.__dict__[dev].calls.get(item, False) self.__dict__[item].append(val)
python
{ "resource": "" }
q17482
Call.build_strings
train
def build_strings(self): """build call string for each device""" for idx, dev in enumerate(self.devices): header = 'system.' + dev self.gcalls[idx] = header + '.gcall(system.dae)\n' self.fcalls[idx] = header + '.fcall(system.dae)\n' self.gycalls[idx] = header + '.gycall(system.dae)\n' self.fxcalls[idx] = header + '.fxcall(system.dae)\n' self.jac0s[idx] = header + '.jac0(system.dae)\n'
python
{ "resource": "" }
q17483
Call._compile_pfgen
train
def _compile_pfgen(self): """Post power flow computation for PV and SW""" string = '"""\n' string += 'system.dae.init_g()\n' for gcall, pflow, shunt, series, stagen, call in zip( self.gcall, self.pflow, self.shunt, self.series, self.stagen, self.gcalls): if gcall and pflow and (shunt or series) and not stagen: string += call string += '\n' string += 'system.dae.reset_small_g()\n' string += '"""' self.pfgen = compile(eval(string), '', 'exec')
python
{ "resource": "" }
q17484
Call._compile_bus_injection
train
def _compile_bus_injection(self): """Impose injections on buses""" string = '"""\n' for device, series in zip(self.devices, self.series): if series: string += 'system.' + device + '.gcall(system.dae)\n' string += '\n' string += 'system.dae.reset_small_g()\n' string += self.gisland string += '"""' self.bus_injection = compile(eval(string), '', 'exec')
python
{ "resource": "" }
q17485
Call._compile_seriesflow
train
def _compile_seriesflow(self): """Post power flow computation of series device flow""" string = '"""\n' for device, pflow, series in zip(self.devices, self.pflow, self.series): if pflow and series: string += 'system.' + device + '.seriesflow(system.dae)\n' string += '\n' string += '"""' self.seriesflow = compile(eval(string), '', 'exec')
python
{ "resource": "" }
q17486
Call._compile_int_f
train
def _compile_int_f(self): """Time Domain Simulation - update differential equations""" string = '"""\n' string += 'system.dae.init_f()\n' # evaluate differential equations f for fcall, call in zip(self.fcall, self.fcalls): if fcall: string += call string += 'system.dae.reset_small_f()\n' string += '"""' self.int_f = compile(eval(string), '', 'exec')
python
{ "resource": "" }
q17487
Call._compile_int_g
train
def _compile_int_g(self): """Time Domain Simulation - update algebraic equations and Jacobian""" string = '"""\n' # evaluate the algebraic equations g string += 'system.dae.init_g()\n' for gcall, call in zip(self.gcall, self.gcalls): if gcall: string += call string += '\n' string += 'system.dae.reset_small_g()\n' # handle islands string += self.gisland # rebuild constant Jacobian elements if needed string += 'if system.dae.factorize:\n' string += ' system.dae.init_jac0()\n' for jac0, call in zip(self.jac0, self.jac0s): if jac0: string += ' ' + call string += ' system.dae.temp_to_spmatrix(\'jac0\')\n' # evaluate Jacobians Gy string += 'system.dae.setup_Gy()\n' for gycall, call in zip(self.gycall, self.gycalls): if gycall: string += call string += '\n' string += self.gyisland string += 'system.dae.temp_to_spmatrix(\'jac\')\n' string += '"""' self.int_g = compile(eval(string), '', 'exec')
python
{ "resource": "" }
q17488
ModelBase._init
train
def _init(self): """ Convert model metadata to class attributes. This function is called automatically after ``define()`` in new versions. :return: None """ assert self._name assert self._group # self.n = 0 self.u = [] self.name = [] self.idx = [] self.uid = {} if not self._unamey: self._unamey = self._algebs else: assert len(self._unamey) == len(self._algebs) if not self._unamex: self._unamex = self._states else: assert len(self._unamex) == len(self._states) for item in self._data.keys(): self.__dict__[item] = [] for bus in self._ac.keys(): for var in self._ac[bus]: self.__dict__[var] = [] for node in self._dc.keys(): for var in self._dc[node]: self.__dict__[var] = [] for var in self._states + self._algebs + self._service: self.__dict__[var] = [] self._flags['sysbase'] = False self._flags['allocate'] = False self._flags['address'] = False
python
{ "resource": "" }
q17489
ModelBase.param_define
train
def param_define(self, param, default, unit='', descr='', tomatrix=True, nonzero=False, mandatory=False, power=False, voltage=False, current=False, z=False, y=False, r=False, g=False, dccurrent=False, dcvoltage=False, time=False, event_time=False, **kwargs): """ Define a parameter in the model :param tomatrix: convert this parameter list to matrix :param param: parameter name :param default: parameter default value :param unit: parameter unit :param descr: description :param nonzero: is non-zero :param mandatory: is mandatory :param power: is a power value in the `self.Sn` base :param voltage: is a voltage value in the `self.Vn` base :param current: is a current value in the device base :param z: is an impedance value in the device base :param y: is an admittance value in the device base :param r: is a dc resistance value in the device base :param g: is a dc conductance value in the device base :param dccurrent: is a dc current value in the device base :param dcvoltage: is a dc votlage value in the device base :param time: is a time value in the device base :param event_time: is a variable for timed event :type param: str :type tomatrix: bool :type default: str, float :type unit: str :type descr: str :type nonzero: bool :type mandatory: bool :type power: bool :type voltage: bool :type current: bool :type z: bool :type y: bool :type r: bool :type g: bool :type dccurrent: bool :type dcvoltage: bool :type time: bool :type event_time: bool """ assert param not in self._data assert param not in self._algebs assert param not in self._states assert param not in self._service self._data.update({param: default}) if unit: self._units.update({param: unit}) if descr: self._descr.update({param: descr}) if tomatrix: self._params.append(param) if nonzero: self._zeros.append(param) if mandatory: self._mandatory.append(param) if power: self._powers.append(param) if voltage: self._voltages.append(param) if current: self._currents.append(param) if z: self._z.append(param) if y: self._y.append(param) if r: self._r.append(param) if g: self._g.append(param) if dccurrent: self._dccurrents.append(param) if dcvoltage: self._dcvoltages.append(param) if time: self._times.append(param) if event_time: self._event_times.append(param)
python
{ "resource": "" }
q17490
ModelBase.var_define
train
def var_define(self, variable, ty, fname, descr='', uname=''): """ Define a variable in the model :param fname: LaTex formatted variable name string :param uname: unformatted variable name string, `variable` as default :param variable: variable name :param ty: type code in ``('x', 'y')`` :param descr: variable description :type variable: str :type ty: str :type descr: str :return: """ assert ty in ('x', 'y') if not uname: uname = variable if ty == 'x': self._states.append(variable) self._fnamex.append(fname) self._unamex.append(uname) if descr: self._states_descr.update({variable: descr}) elif ty == 'y': self._algebs.append(variable) self._fnamey.append(fname) self._unamey.append(uname) if descr: self._algebs_descr.update({variable: descr})
python
{ "resource": "" }
q17491
ModelBase.service_define
train
def service_define(self, service, ty): """ Add a service variable of type ``ty`` to this model :param str service: variable name :param type ty: variable type :return: None """ assert service not in self._data assert service not in self._algebs + self._states self._service.append(service) self._service_ty.append(ty)
python
{ "resource": "" }
q17492
ModelBase.get_uid
train
def get_uid(self, idx): """ Return the `uid` of the elements with the given `idx` :param list, matrix idx: external indices :type idx: list, matrix :return: a matrix of uid """ assert idx is not None if isinstance(idx, (int, float, str)): return self.uid[idx] ret = [] for i in idx: tmp = self.uid.get(i, None) assert tmp is not None, ( 'Model <{}> does not have element <{}>'.format(self._name, i)) ret.append(self.uid[i]) return ret
python
{ "resource": "" }
q17493
ModelBase.get_field
train
def get_field(self, field, idx=None, astype=None): """ Return `self.field` for the elements labeled by `idx` :param astype: type cast of the return value :param field: field name of this model :param idx: element indices, will be the whole list if not specified :return: field values """ assert astype in (None, list, matrix) ret = None if idx is None: idx = self.idx # ===================disable warning ============================== # if field in self._service: # logger.warning( # 'Reading service variable <{model}.{field}> could be unsafe.' # .format(field=field, model=self._name) # ) # ================================================================= uid = self.get_uid(idx) field_data = self.__dict__[field] if isinstance(field_data, matrix): ret = field_data[uid] elif isinstance(field_data, list): if isinstance(idx, (float, int, str)): ret = field_data[uid] else: ret = [field_data[x] for x in uid] if astype is not None: ret = astype(ret) return ret
python
{ "resource": "" }
q17494
ModelBase._alloc
train
def _alloc(self): """ Allocate empty memory for dae variable indices. Called in device setup phase. :return: None """ nzeros = [0] * self.n for var in self._states: self.__dict__[var] = nzeros[:] for var in self._algebs: self.__dict__[var] = nzeros[:]
python
{ "resource": "" }
q17495
ModelBase.data_to_dict
train
def data_to_dict(self, sysbase=False): """ Return the loaded model parameters as one dictionary. Each key of the dictionary is a parameter name, and the value is a list of all the parameter values. :param sysbase: use system base quantities :type sysbase: bool """ assert isinstance(sysbase, bool) ret = {} for key in self.data_keys: if (not sysbase) and (key in self._store): val = self._store[key] else: val = self.__dict__[key] ret[key] = val return ret
python
{ "resource": "" }
q17496
ModelBase.data_to_list
train
def data_to_list(self, sysbase=False): """ Return the loaded model data as a list of dictionaries. Each dictionary contains the full parameters of an element. :param sysbase: use system base quantities :type sysbase: bool """ ret = list() # for each element for i in range(self.n): # read the parameter values and put in the temp dict ``e`` e = {} for key in self.data_keys: if sysbase and (key in self._store): val = self._store[key][i] else: val = self.__dict__[key][i] e[key] = val ret.append(e) return ret
python
{ "resource": "" }
q17497
ModelBase.data_from_dict
train
def data_from_dict(self, data): """ Populate model parameters from a dictionary of parameters Parameters ---------- data : dict List of parameter dictionaries Returns ------- None """ nvars = [] for key, val in data.items(): self.__dict__[key].extend(val) # assure the same parameter matrix size if len(nvars) > 1 and len(val) != nvars[-1]: raise IndexError( 'Model <{}> parameter <{}> must have the same length'. format(self._name, key)) nvars.append(len(val)) # assign idx-uid mapping for i, idx in zip(range(self.n), self.idx): self.uid[idx] = i
python
{ "resource": "" }
q17498
ModelBase.var_to_df
train
def var_to_df(self): """ Return the current var_to_df of variables :return: pandas.DataFrame """ ret = {} self._check_pd() if self._flags['address'] is False: return pd.DataFrame.from_dict(ret) ret.update({'name': self.name}) ret.update({'idx': self.idx}) for x in self._states: idx = self.__dict__[x] ret.update({x: self.system.dae.x[idx]}) for y in self._algebs: idx = self.__dict__[y] ret.update({y: self.system.dae.y[idx]}) var_df = pd.DataFrame.from_dict(ret).set_index('idx') return var_df
python
{ "resource": "" }
q17499
ModelBase.param_remove
train
def param_remove(self, param: 'str') -> None: """ Remove a param from this model :param param: name of the parameter to be removed :type param: str """ for attr in self._param_attr_dicts: if param in self.__dict__[attr]: self.__dict__[attr].pop(param) for attr in self._param_attr_lists: if param in self.__dict__[attr]: self.__dict__[attr].remove(param)
python
{ "resource": "" }