_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q20400
MessageReceiver.on_state_changed
train
def on_state_changed(self, previous_state, new_state): """Callback called whenever the underlying Receiver undergoes a change of state. This function can be overridden. :param previous_state: The previous Receiver state. :type previous_state: ~uamqp.constants.MessageReceiverState :param new_state: The new Receiver state. :type new_state: ~uamqp.constants.MessageReceiverState """ # pylint: disable=protected-access _logger.info("Message receiver %r state changed from %r to %r on connection: %r", self.name, previous_state, new_state, self._session._connection.container_id) self._state = new_state
python
{ "resource": "" }
q20401
AMQPClient.open
train
def open(self, connection=None): """Open the client. The client can create a new Connection or an existing Connection can be passed in. This existing Connection may have an existing CBS authentication Session, which will be used for this client as well. Otherwise a new Session will be created. :param connection: An existing Connection that may be shared between multiple clients. :type connetion: ~uamqp.connection.Connection """ # pylint: disable=protected-access if self._session: return # already open. _logger.debug("Opening client connection.") if connection: _logger.debug("Using existing connection.") self._auth = connection.auth self._ext_connection = True self._connection = connection or self.connection_type( self._hostname, self._auth, container_id=self._name, max_frame_size=self._max_frame_size, channel_max=self._channel_max, idle_timeout=self._idle_timeout, properties=self._properties, remote_idle_timeout_empty_frame_send_ratio=self._remote_idle_timeout_empty_frame_send_ratio, error_policy=self._error_policy, debug=self._debug_trace, encoding=self._encoding) if not self._connection.cbs and isinstance(self._auth, authentication.CBSAuthMixin): self._connection.cbs = self._auth.create_authenticator( self._connection, debug=self._debug_trace, incoming_window=self._incoming_window, outgoing_window=self._outgoing_window, handle_max=self._handle_max, on_attach=self._on_attach) self._session = self._auth._session elif self._connection.cbs: self._session = self._auth._session else: self._session = self.session_type( self._connection, incoming_window=self._incoming_window, outgoing_window=self._outgoing_window, handle_max=self._handle_max, on_attach=self._on_attach) if self._keep_alive_interval: self._keep_alive_thread = threading.Thread(target=self._keep_alive) self._keep_alive_thread.start()
python
{ "resource": "" }
q20402
AMQPClient.close
train
def close(self): """Close the client. This includes closing the Session and CBS authentication layer as well as the Connection. If the client was opened using an external Connection, this will be left intact. No further messages can be sent or received and the client cannot be re-opened. All pending, unsent messages will remain uncleared to allow them to be inspected and queued to a new client. """ if self.message_handler: self.message_handler.destroy() self.message_handler = None self._shutdown = True if self._keep_alive_thread: self._keep_alive_thread.join() self._keep_alive_thread = None if not self._session: return # already closed. if not self._connection.cbs: _logger.debug("Closing non-CBS session.") self._session.destroy() else: _logger.debug("CBS session pending.") self._session = None if not self._ext_connection: _logger.debug("Closing exclusive connection.") self._connection.destroy() else: _logger.debug("Shared connection remaining open.") self._connection = None
python
{ "resource": "" }
q20403
AMQPClient.do_work
train
def do_work(self): """Run a single connection iteration. This will return `True` if the connection is still open and ready to be used for further work, or `False` if it needs to be shut down. :rtype: bool :raises: TimeoutError or ~uamqp.errors.ClientTimeout if CBS authentication timeout reached. """ if self._shutdown: return False if not self.client_ready(): return True return self._client_run()
python
{ "resource": "" }
q20404
SendClient._on_message_sent
train
def _on_message_sent(self, message, result, delivery_state=None): """Callback run on a message send operation. If message has a user defined callback, it will be called here. If the result of the operation is failure, the message state will be reverted to 'pending' up to the maximum retry count. :param message: The message that was sent. :type message: ~uamqp.message.Message :param result: The result of the send operation. :type result: int :param error: An Exception if an error ocurred during the send operation. :type error: ~Exception """ # pylint: disable=protected-access try: exception = delivery_state result = constants.MessageSendResult(result) if result == constants.MessageSendResult.Error: if isinstance(delivery_state, Exception): exception = errors.ClientMessageError(delivery_state, info=delivery_state) exception.action = errors.ErrorAction(retry=True) elif delivery_state: error = errors.ErrorResponse(delivery_state) exception = errors._process_send_error( self._error_policy, error.condition, error.description, error.info) else: exception = errors.MessageSendFailed(constants.ErrorCodes.UnknownError) exception.action = errors.ErrorAction(retry=True) if exception.action.retry == errors.ErrorAction.retry \ and message.retries < self._error_policy.max_retries: if exception.action.increment_retries: message.retries += 1 self._backoff = exception.action.backoff _logger.debug("Message error, retrying. Attempts: %r, Error: %r", message.retries, exception) message.state = constants.MessageState.WaitingToBeSent return if exception.action.retry == errors.ErrorAction.retry: _logger.info("Message error, %r retries exhausted. Error: %r", message.retries, exception) else: _logger.info("Message error, not retrying. Error: %r", exception) message.state = constants.MessageState.SendFailed message._response = exception else: _logger.debug("Message sent: %r, %r", result, exception) message.state = constants.MessageState.SendComplete message._response = errors.MessageAlreadySettled() if message.on_send_complete: message.on_send_complete(result, exception) except KeyboardInterrupt: _logger.error("Received shutdown signal while processing message send completion.") self.message_handler._error = errors.AMQPClientShutdown()
python
{ "resource": "" }
q20405
SendClient.send_message
train
def send_message(self, messages, close_on_done=False): """Send a single message or batched message. :param messages: A message to send. This can either be a single instance of `Message`, or multiple messages wrapped in an instance of `BatchMessage`. :type message: ~uamqp.message.Message :param close_on_done: Close the client once the message is sent. Default is `False`. :type close_on_done: bool :raises: ~uamqp.errors.MessageException if message fails to send after retry policy is exhausted. """ batch = messages.gather() pending_batch = [] for message in batch: message.idle_time = self._counter.get_current_ms() self._pending_messages.append(message) pending_batch.append(message) self.open() running = True try: while running and any([m for m in pending_batch if m.state not in constants.DONE_STATES]): running = self.do_work() failed = [m for m in pending_batch if m.state == constants.MessageState.SendFailed] if any(failed): details = {"total_messages": len(pending_batch), "number_failed": len(failed)} details['failed_messages'] = {} exception = None for failed_message in failed: exception = failed_message._response # pylint: disable=protected-access details['failed_messages'][failed_message] = exception raise errors.ClientMessageError(exception, info=details) finally: if close_on_done or not running: self.close()
python
{ "resource": "" }
q20406
SendClient.wait
train
def wait(self): """Run the client until all pending message in the queue have been processed. Returns whether the client is still running after the messages have been processed, or whether a shutdown has been initiated. :rtype: bool """ running = True while running and self.messages_pending(): running = self.do_work() return running
python
{ "resource": "" }
q20407
SendClient.send_all_messages
train
def send_all_messages(self, close_on_done=True): """Send all pending messages in the queue. This will return a list of the send result of all the pending messages so it can be determined if any messages failed to send. This function will open the client if it is not already open. :param close_on_done: Close the client once the messages are sent. Default is `True`. :type close_on_done: bool :rtype: list[~uamqp.constants.MessageState] """ self.open() running = True try: messages = self._pending_messages[:] running = self.wait() results = [m.state for m in messages] return results finally: if close_on_done or not running: self.close()
python
{ "resource": "" }
q20408
ReceiveClient._message_generator
train
def _message_generator(self): """Iterate over processed messages in the receive queue. :rtype: generator[~uamqp.message.Message] """ self.open() auto_complete = self.auto_complete self.auto_complete = False receiving = True message = None try: while receiving: while receiving and self._received_messages.empty(): receiving = self.do_work() while not self._received_messages.empty(): message = self._received_messages.get() self._received_messages.task_done() yield message self._complete_message(message, auto_complete) finally: self._complete_message(message, auto_complete) self.auto_complete = auto_complete self.close()
python
{ "resource": "" }
q20409
ReceiveClient.receive_message_batch
train
def receive_message_batch(self, max_batch_size=None, on_message_received=None, timeout=0): """Receive a batch of messages. Messages returned in the batch have already been accepted - if you wish to add logic to accept or reject messages based on custom criteria, pass in a callback. This method will return as soon as some messages are available rather than waiting to achieve a specific batch size, and therefore the number of messages returned per call will vary up to the maximum allowed. If the receive client is configured with `auto_complete=True` then the messages received in the batch returned by this function will already be settled. Alternatively, if `auto_complete=False`, then each message will need to be explicitly settled before it expires and is released. :param max_batch_size: The maximum number of messages that can be returned in one call. This value cannot be larger than the prefetch value, and if not specified, the prefetch value will be used. :type max_batch_size: int :param on_message_received: A callback to process messages as they arrive from the service. It takes a single argument, a ~uamqp.message.Message object. :type on_message_received: callable[~uamqp.message.Message] :param timeout: I timeout in milliseconds for which to wait to receive any messages. If no messages are received in this time, an empty list will be returned. If set to 0, the client will continue to wait until at least one message is received. The default is 0. :type timeout: int """ self._message_received_callback = on_message_received max_batch_size = max_batch_size or self._prefetch if max_batch_size > self._prefetch: raise ValueError( 'Maximum batch size cannot be greater than the ' 'connection link credit: {}'.format(self._prefetch)) timeout = self._counter.get_current_ms() + timeout if timeout else 0 expired = False self._received_messages = self._received_messages or compat.queue.Queue() self.open() receiving = True batch = [] while not self._received_messages.empty() and len(batch) < max_batch_size: batch.append(self._received_messages.get()) self._received_messages.task_done() if len(batch) >= max_batch_size: return batch while receiving and not expired and len(batch) < max_batch_size: while receiving and self._received_messages.qsize() < max_batch_size: if timeout and self._counter.get_current_ms() > timeout: expired = True break before = self._received_messages.qsize() receiving = self.do_work() received = self._received_messages.qsize() - before if self._received_messages.qsize() > 0 and received == 0: # No new messages arrived, but we have some - so return what we have. expired = True break while not self._received_messages.empty() and len(batch) < max_batch_size: batch.append(self._received_messages.get()) self._received_messages.task_done() return batch
python
{ "resource": "" }
q20410
ReceiveClient.receive_messages_iter
train
def receive_messages_iter(self, on_message_received=None): """Receive messages by generator. Messages returned in the generator have already been accepted - if you wish to add logic to accept or reject messages based on custom criteria, pass in a callback. :param on_message_received: A callback to process messages as they arrive from the service. It takes a single argument, a ~uamqp.message.Message object. :type on_message_received: callable[~uamqp.message.Message] """ self._message_received_callback = on_message_received self._received_messages = compat.queue.Queue() return self._message_generator()
python
{ "resource": "" }
q20411
send_message
train
def send_message(target, data, auth=None, debug=False): """Send a single message to AMQP endpoint. :param target: The target AMQP endpoint. :type target: str, bytes or ~uamqp.address.Target :param data: The contents of the message to send. :type data: str, bytes or ~uamqp.message.Message :param auth: The authentication credentials for the endpoint. This should be one of the subclasses of uamqp.authentication.AMQPAuth. Currently this includes: - uamqp.authentication.SASLAnonymous - uamqp.authentication.SASLPlain - uamqp.authentication.SASTokenAuth If no authentication is supplied, SASLAnnoymous will be used by default. :type auth: ~uamqp.authentication.common.AMQPAuth :param debug: Whether to turn on network trace logs. If `True`, trace logs will be logged at INFO level. Default is `False`. :type debug: bool :return: A list of states for each message sent. :rtype: list[~uamqp.constants.MessageState] """ message = data if isinstance(data, Message) else Message(body=data) with SendClient(target, auth=auth, debug=debug) as send_client: send_client.queue_message(message) return send_client.send_all_messages()
python
{ "resource": "" }
q20412
receive_message
train
def receive_message(source, auth=None, timeout=0, debug=False): """Receive a single message from an AMQP endpoint. :param source: The AMQP source endpoint to receive from. :type source: str, bytes or ~uamqp.address.Source :param auth: The authentication credentials for the endpoint. This should be one of the subclasses of uamqp.authentication.AMQPAuth. Currently this includes: - uamqp.authentication.SASLAnonymous - uamqp.authentication.SASLPlain - uamqp.authentication.SASTokenAuth If no authentication is supplied, SASLAnnoymous will be used by default. :type auth: ~uamqp.authentication.common.AMQPAuth :param timeout: The timeout in milliseconds after which to return None if no messages are retrieved. If set to `0` (the default), the receiver will not timeout and will continue to wait for messages until interrupted. :param debug: Whether to turn on network trace logs. If `True`, trace logs will be logged at INFO level. Default is `False`. :type debug: bool :rtype: ~uamqp.message.Message or None """ received = receive_messages(source, auth=auth, max_batch_size=1, timeout=timeout, debug=debug) if received: return received[0] return None
python
{ "resource": "" }
q20413
receive_messages
train
def receive_messages(source, auth=None, max_batch_size=None, timeout=0, debug=False, **kwargs): """Receive a batch of messages from an AMQP endpoint. :param source: The AMQP source endpoint to receive from. :type source: str, bytes or ~uamqp.address.Source :param auth: The authentication credentials for the endpoint. This should be one of the subclasses of ~uamqp.authentication.AMQPAuth. Currently this includes: - uamqp.authentication.SASLAnonymous - uamqp.authentication.SASLPlain - uamqp.authentication.SASTokenAuth If no authentication is supplied, SASLAnnoymous will be used by default. :type auth: ~uamqp.authentication.common.AMQPAuth :param max_batch_size: The maximum number of messages to return in a batch. If the receiver receives a smaller number than this, it will not wait to return them so the actual number returned can be anything up to this value. If the receiver reaches a timeout, an empty list will be returned. :param timeout: The timeout in milliseconds after which to return if no messages are retrieved. If set to `0` (the default), the receiver will not timeout and will continue to wait for messages until interrupted. :param debug: Whether to turn on network trace logs. If `True`, trace logs will be logged at INFO level. Default is `False`. :type debug: bool :rtype: list[~uamqp.message.Message] """ if max_batch_size: kwargs['prefetch'] = max_batch_size with ReceiveClient(source, auth=auth, debug=debug, **kwargs) as receive_client: return receive_client.receive_message_batch( max_batch_size=max_batch_size or receive_client._prefetch, timeout=timeout)
python
{ "resource": "" }
q20414
Address._validate_address
train
def _validate_address(self, address): """Confirm that supplied address is a valid URL and has an `amqp` or `amqps` scheme. :param address: The endpiont URL. :type address: str :rtype: ~urllib.parse.ParseResult """ parsed = compat.urlparse(address) if not parsed.path: raise ValueError("Invalid {} address: {}".format( self.__class__.__name__, parsed)) return parsed
python
{ "resource": "" }
q20415
Source.get_filter
train
def get_filter(self, name=constants.STRING_FILTER): """Get the filter on the source. :param name: The name of the filter. This will be encoded as an AMQP Symbol. By default this is set to b'apache.org:selector-filter:string'. :type name: bytes """ try: filter_key = c_uamqp.symbol_value(name) return self._address.filter_set[filter_key].value except (TypeError, KeyError): return None
python
{ "resource": "" }
q20416
Source.set_filter
train
def set_filter(self, value, name=constants.STRING_FILTER, descriptor=constants.STRING_FILTER): """Set a filter on the endpoint. Only one filter can be applied to an endpoint. :param value: The filter to apply to the endpoint. Set to None for a NULL filter. :type value: bytes or str or None :param name: The name of the filter. This will be encoded as an AMQP Symbol. By default this is set to b'apache.org:selector-filter:string'. :type name: bytes :param descriptor: The descriptor used if the filter is to be encoded as a described value. This will be encoded as an AMQP Symbol. By default this is set to b'apache.org:selector-filter:string'. Set to None if the filter should not be encoded as a described value. :type descriptor: bytes or None """ value = value.encode(self._encoding) if isinstance(value, six.text_type) else value filter_set = c_uamqp.dict_value() filter_key = c_uamqp.symbol_value(name) filter_value = utils.data_factory(value, encoding=self._encoding) if value is not None and descriptor is not None: descriptor = c_uamqp.symbol_value(descriptor) filter_value = c_uamqp.described_value(descriptor, filter_value) filter_set[filter_key] = filter_value self._address.filter_set = filter_set
python
{ "resource": "" }
q20417
CBSAsyncAuthMixin.create_authenticator_async
train
async def create_authenticator_async(self, connection, debug=False, loop=None, **kwargs): """Create the async AMQP session and the CBS channel with which to negotiate the token. :param connection: The underlying AMQP connection on which to create the session. :type connection: ~uamqp.async_ops.connection_async.ConnectionAsync :param debug: Whether to emit network trace logging events for the CBS session. Default is `False`. Logging events are set at INFO level. :type debug: bool :param loop: A user specified event loop. :type loop: ~asycnio.AbstractEventLoop :rtype: uamqp.c_uamqp.CBSTokenAuth """ self.loop = loop or asyncio.get_event_loop() self._connection = connection self._session = SessionAsync(connection, loop=self.loop, **kwargs) try: self._cbs_auth = c_uamqp.CBSTokenAuth( self.audience, self.token_type, self.token, int(self.expires_at), self._session._session, # pylint: disable=protected-access self.timeout, self._connection.container_id) self._cbs_auth.set_trace(debug) except ValueError: await self._session.destroy_async() raise errors.AMQPConnectionError( "Unable to open authentication session on connection {}.\n" "Please confirm target hostname exists: {}".format( connection.container_id, connection.hostname)) from None return self._cbs_auth
python
{ "resource": "" }
q20418
CBSAsyncAuthMixin.close_authenticator_async
train
async def close_authenticator_async(self): """Close the CBS auth channel and session asynchronously.""" _logger.info("Shutting down CBS session on connection: %r.", self._connection.container_id) try: self._cbs_auth.destroy() _logger.info("Auth closed, destroying session on connection: %r.", self._connection.container_id) await self._session.destroy_async() finally: _logger.info("Finished shutting down CBS session on connection: %r.", self._connection.container_id)
python
{ "resource": "" }
q20419
CBSAsyncAuthMixin.handle_token_async
train
async def handle_token_async(self): """This coroutine is called periodically to check the status of the current token if there is one, and request a new one if needed. If the token request fails, it will be retried according to the retry policy. A token refresh will be attempted if the token will expire soon. This function will return a tuple of two booleans. The first represents whether the token authentication has not completed within it's given timeout window. The second indicates whether the token negotiation is still in progress. :raises: ~uamqp.errors.AuthenticationException if the token authentication fails. :raises: ~uamqp.errors.TokenExpired if the token has expired and cannot be refreshed. :rtype: tuple[bool, bool] """ # pylint: disable=protected-access timeout = False in_progress = False try: await self._connection.lock_async() if self._connection._closing or self._connection._error: return timeout, in_progress auth_status = self._cbs_auth.get_status() auth_status = constants.CBSAuthStatus(auth_status) if auth_status == constants.CBSAuthStatus.Error: if self.retries >= self._retry_policy.retries: # pylint: disable=no-member _logger.warning("Authentication Put-Token failed. Retries exhausted.") raise errors.TokenAuthFailure(*self._cbs_auth.get_failure_info()) error_code, error_description = self._cbs_auth.get_failure_info() _logger.info("Authentication status: %r, description: %r", error_code, error_description) _logger.info("Authentication Put-Token failed. Retrying.") self.retries += 1 # pylint: disable=no-member await asyncio.sleep(self._retry_policy.backoff) self._cbs_auth.authenticate() in_progress = True elif auth_status == constants.CBSAuthStatus.Failure: errors.AuthenticationException("Failed to open CBS authentication link.") elif auth_status == constants.CBSAuthStatus.Expired: raise errors.TokenExpired("CBS Authentication Expired.") elif auth_status == constants.CBSAuthStatus.Timeout: timeout = True elif auth_status == constants.CBSAuthStatus.InProgress: in_progress = True elif auth_status == constants.CBSAuthStatus.RefreshRequired: _logger.info("Token on connection %r will expire soon - attempting to refresh.", self._connection.container_id) self.update_token() self._cbs_auth.refresh(self.token, int(self.expires_at)) elif auth_status == constants.CBSAuthStatus.Idle: self._cbs_auth.authenticate() in_progress = True elif auth_status != constants.CBSAuthStatus.Ok: raise ValueError("Invalid auth state.") except asyncio.TimeoutError: _logger.debug("CBS auth timed out while waiting for lock acquisition.") return None, None except ValueError as e: raise errors.AuthenticationException( "Token authentication failed: {}".format(e)) finally: self._connection.release_async() return timeout, in_progress
python
{ "resource": "" }
q20420
SessionAsync.destroy_async
train
async def destroy_async(self): """Asynchronously close any open management Links and close the Session. Cleans up and C objects for both mgmt Links and Session. """ for _, link in self._mgmt_links.items(): await link.destroy_async() self._session.destroy()
python
{ "resource": "" }
q20421
MgmtOperationAsync.execute_async
train
async def execute_async(self, operation, op_type, message, timeout=0): """Execute a request and wait on a response asynchronously. :param operation: The type of operation to be performed. This value will be service-specific, but common values include READ, CREATE and UPDATE. This value will be added as an application property on the message. :type operation: bytes :param op_type: The type on which to carry out the operation. This will be specific to the entities of the service. This value will be added as an application property on the message. :type op_type: bytes :param message: The message to send in the management request. :type message: ~uamqp.message.Message :param timeout: Provide an optional timeout in milliseconds within which a response to the management request must be received. :type timeout: int :rtype: ~uamqp.message.Message """ start_time = self._counter.get_current_ms() operation_id = str(uuid.uuid4()) self._responses[operation_id] = None def on_complete(operation_result, status_code, description, wrapped_message): result = constants.MgmtExecuteResult(operation_result) if result != constants.MgmtExecuteResult.Ok: _logger.error( "Failed to complete mgmt operation.\nStatus code: %r\nMessage: %r", status_code, description) message = Message(message=wrapped_message) if wrapped_message else None self._responses[operation_id] = (status_code, message, description) self._mgmt_op.execute(operation, op_type, None, message.get_message(), on_complete) while not self._responses[operation_id] and not self.mgmt_error: if timeout > 0: now = self._counter.get_current_ms() if (now - start_time) >= timeout: raise TimeoutException("Failed to receive mgmt response in {}ms".format(timeout)) await self.connection.work_async() if self.mgmt_error: raise self.mgmt_error response = self._responses.pop(operation_id) return response
python
{ "resource": "" }
q20422
main
train
def main(): """Provide the entry point to the subreddit_stats command.""" parser = arg_parser(usage='usage: %prog [options] SUBREDDIT VIEW') parser.add_option('-c', '--commenters', type='int', default=10, help='Number of top commenters to display ' '[default %default]') parser.add_option('-d', '--distinguished', action='store_true', help=('Include distinguished subissions and ' 'comments (default: False). Note that regular ' 'comments of distinguished submissions will still ' 'be included.')) parser.add_option('-s', '--submitters', type='int', default=10, help='Number of top submitters to display ' '[default %default]') options, args = parser.parse_args() if options.verbose == 1: logger.setLevel(logging.INFO) elif options.verbose > 1: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.NOTSET) logger.addHandler(logging.StreamHandler()) if len(args) != 2: parser.error('SUBREDDIT and VIEW must be provided') subreddit, view = args check_for_updates(options) srs = SubredditStats(subreddit, options.site, options.distinguished) result = srs.run(view, options.submitters, options.commenters) if result: print(result.permalink) return 0
python
{ "resource": "" }
q20423
SubredditStats.basic_stats
train
def basic_stats(self): """Return a markdown representation of simple statistics.""" comment_score = sum(comment.score for comment in self.comments) if self.comments: comment_duration = (self.comments[-1].created_utc - self.comments[0].created_utc) comment_rate = self._rate(len(self.comments), comment_duration) else: comment_rate = 0 submission_duration = self.max_date - self.min_date submission_rate = self._rate(len(self.submissions), submission_duration) submission_score = sum(sub.score for sub in self.submissions.values()) values = [('Total', len(self.submissions), len(self.comments)), ('Rate (per day)', '{:.2f}'.format(submission_rate), '{:.2f}'.format(comment_rate)), ('Unique Redditors', len(self.submitters), len(self.commenters)), ('Combined Score', submission_score, comment_score)] retval = 'Period: {:.2f} days\n\n'.format(submission_duration / 86400.) retval += '||Submissions|Comments|\n:-:|--:|--:\n' for quad in values: retval += '__{}__|{}|{}\n'.format(*quad) return retval + '\n'
python
{ "resource": "" }
q20424
SubredditStats.fetch_recent_submissions
train
def fetch_recent_submissions(self, max_duration): """Fetch recent submissions in subreddit with boundaries. Does not include posts within the last day as their scores may not be representative. :param max_duration: When set, specifies the number of days to include """ if max_duration: self.min_date = self.max_date - SECONDS_IN_A_DAY * max_duration for submission in self.subreddit.new(limit=None): if submission.created_utc <= self.min_date: break if submission.created_utc > self.max_date: continue self.submissions[submission.id] = MiniSubmission(submission)
python
{ "resource": "" }
q20425
SubredditStats.fetch_submissions
train
def fetch_submissions(self, submissions_callback, *args): """Wrap the submissions_callback function.""" logger.debug('Fetching submissions') submissions_callback(*args) logger.info('Found {} submissions'.format(len(self.submissions))) if not self.submissions: return self.min_date = min(x.created_utc for x in self.submissions.values()) self.max_date = max(x.created_utc for x in self.submissions.values()) self.process_submitters() self.process_commenters()
python
{ "resource": "" }
q20426
SubredditStats.fetch_top_submissions
train
def fetch_top_submissions(self, top): """Fetch top submissions by some top value. :param top: One of week, month, year, all :returns: True if any submissions were found. """ for submission in self.subreddit.top(limit=None, time_filter=top): self.submissions[submission.id] = MiniSubmission(submission)
python
{ "resource": "" }
q20427
SubredditStats.process_commenters
train
def process_commenters(self): """Group comments by author.""" for index, submission in enumerate(self.submissions.values()): if submission.num_comments == 0: continue real_submission = self.reddit.submission(id=submission.id) real_submission.comment_sort = 'top' for i in range(3): try: real_submission.comments.replace_more(limit=0) break except RequestException: if i >= 2: raise logger.debug('Failed to fetch submission {}, retrying' .format(submission.id)) self.comments.extend(MiniComment(comment, submission) for comment in real_submission.comments.list() if self.distinguished or comment.distinguished is None) if index % 50 == 49: logger.debug('Completed: {:4d}/{} submissions' .format(index + 1, len(self.submissions))) # Clean up to reduce memory usage submission = None gc.collect() self.comments.sort(key=lambda x: x.created_utc) for comment in self.comments: if comment.author: self.commenters[comment.author].append(comment)
python
{ "resource": "" }
q20428
SubredditStats.process_submitters
train
def process_submitters(self): """Group submissions by author.""" for submission in self.submissions.values(): if submission.author and (self.distinguished or submission.distinguished is None): self.submitters[submission.author].append(submission)
python
{ "resource": "" }
q20429
SubredditStats.run
train
def run(self, view, submitters, commenters): """Run stats and return the created Submission.""" logger.info('Analyzing subreddit: {}'.format(self.subreddit)) if view in TOP_VALUES: callback = self.fetch_top_submissions else: callback = self.fetch_recent_submissions view = int(view) self.fetch_submissions(callback, view) if not self.submissions: logger.warning('No submissions were found.') return return self.publish_results(view, submitters, commenters)
python
{ "resource": "" }
q20430
SubredditStats.top_commenters
train
def top_commenters(self, num): """Return a markdown representation of the top commenters.""" num = min(num, len(self.commenters)) if num <= 0: return '' top_commenters = sorted( iteritems(self.commenters), key=lambda x: (-sum(y.score for y in x[1]), -len(x[1]), str(x[0])))[:num] retval = self.post_header.format('Top Commenters') for author, comments in top_commenters: retval += '1. {} ({}, {} comment{})\n'.format( self._user(author), self._points(sum(x.score for x in comments)), len(comments), 's' if len(comments) != 1 else '') return '{}\n'.format(retval)
python
{ "resource": "" }
q20431
SubredditStats.top_submitters
train
def top_submitters(self, num): """Return a markdown representation of the top submitters.""" num = min(num, len(self.submitters)) if num <= 0: return '' top_submitters = sorted( iteritems(self.submitters), key=lambda x: (-sum(y.score for y in x[1]), -len(x[1]), str(x[0])))[:num] retval = self.post_header.format('Top Submitters\' Top Submissions') for (author, submissions) in top_submitters: retval += '1. {}, {} submission{}: {}\n'.format( self._points(sum(x.score for x in submissions)), len(submissions), 's' if len(submissions) != 1 else '', self._user(author)) for sub in sorted( submissions, key=lambda x: (-x.score, x.title))[:10]: title = self._safe_title(sub) if sub.permalink in sub.url: retval += tt(' 1. {}').format(title) else: retval += tt(' 1. [{}]({})').format(title, sub.url) retval += ' ({}, [{} comment{}]({}))\n'.format( self._points(sub.score), sub.num_comments, 's' if sub.num_comments != 1 else '', self._permalink(sub)) retval += '\n' return retval
python
{ "resource": "" }
q20432
SubredditStats.top_submissions
train
def top_submissions(self): """Return a markdown representation of the top submissions.""" num = min(10, len(self.submissions)) if num <= 0: return '' top_submissions = sorted( [x for x in self.submissions.values() if self.distinguished or x.distinguished is None], key=lambda x: (-x.score, -x.num_comments, x.title))[:num] if not top_submissions: return '' retval = self.post_header.format('Top Submissions') for sub in top_submissions: title = self._safe_title(sub) if sub.permalink in sub.url: retval += tt('1. {}').format(title) else: retval += tt('1. [{}]({})').format(title, sub.url) retval += ' by {} ({}, [{} comment{}]({}))\n'.format( self._user(sub.author), self._points(sub.score), sub.num_comments, 's' if sub.num_comments != 1 else '', self._permalink(sub)) return tt('{}\n').format(retval)
python
{ "resource": "" }
q20433
SubredditStats.top_comments
train
def top_comments(self): """Return a markdown representation of the top comments.""" num = min(10, len(self.comments)) if num <= 0: return '' top_comments = sorted( self.comments, key=lambda x: (-x.score, str(x.author)))[:num] retval = self.post_header.format('Top Comments') for comment in top_comments: title = self._safe_title(comment.submission) retval += tt('1. {}: {}\'s [comment]({}) in {}\n').format( self._points(comment.score), self._user(comment.author), self._permalink(comment), title) return tt('{}\n').format(retval)
python
{ "resource": "" }
q20434
arg_parser
train
def arg_parser(*args, **kwargs): """Return a parser with common options used in the prawtools commands.""" msg = { 'site': 'The site to connect to defined in your praw.ini file.', 'update': 'Prevent the checking for prawtools package updates.'} kwargs['version'] = 'BBoe\'s PRAWtools {}'.format(__version__) parser = OptionParser(*args, **kwargs) parser.add_option('-v', '--verbose', action='count', default=0, help='Increase the verbosity by 1 each time') parser.add_option('-U', '--disable-update-check', action='store_true', help=msg['update']) group = OptionGroup(parser, 'Site/Authentication options') group.add_option('-S', '--site', help=msg['site']) parser.add_option_group(group) return parser
python
{ "resource": "" }
q20435
ModUtils.add_users
train
def add_users(self, category): """Add users to 'banned', 'contributor', or 'moderator'.""" mapping = {'banned': 'ban', 'contributor': 'make_contributor', 'moderator': 'make_moderator'} if category not in mapping: print('{!r} is not a valid option for --add'.format(category)) return func = getattr(self.sub, mapping[category]) print('Enter user names (any separation should suffice):') data = sys.stdin.read().strip() for name in re.split('[^A-Za-z0-9_]+', data): func(name) print('Added {!r} to {}'.format(name, category))
python
{ "resource": "" }
q20436
ModUtils.clear_empty
train
def clear_empty(self): """Remove flair that is not visible or has been set to empty.""" for flair in self.current_flair(): if not flair['flair_text'] and not flair['flair_css_class']: print(self.reddit.flair.update(flair['user'])) print('Removed flair for {0}'.format(flair['user']))
python
{ "resource": "" }
q20437
ModUtils.current_flair
train
def current_flair(self): """Generate the flair, by user, for the subreddit.""" if self._current_flair is None: self._current_flair = [] if self.verbose: print('Fetching flair list for {}'.format(self.sub)) for flair in self.sub.flair: self._current_flair.append(flair) yield flair else: for item in self._current_flair: yield item
python
{ "resource": "" }
q20438
ModUtils.flair_template_sync
train
def flair_template_sync(self, editable, limit, # pylint: disable=R0912 static, sort, use_css, use_text): """Synchronize templates with flair that already exists on the site. :param editable: Indicates that all the options should be editable. :param limit: The minimum number of users that must share the flair before it is added as a template. :param static: A list of flair templates that will always be added. :param sort: The order to sort the flair templates. :param use_css: Include css in the templates. :param use_text: Include text in the templates. """ # Parameter verification if not use_text and not use_css: raise Exception('At least one of use_text or use_css must be True') sorts = ('alpha', 'size') if sort not in sorts: raise Exception('Sort must be one of: {}'.format(', '.join(sorts))) # Build current flair list along with static values counter = {} if static: for key in static: if use_css and use_text: parts = tuple(x.strip() for x in key.split(',')) if len(parts) != 2: raise Exception('--static argument {!r} must have two ' 'parts (comma separated) when using ' 'both text and css.'.format(parts)) key = parts counter[key] = limit if self.verbose: sys.stdout.write('Retrieving current flair\n') sys.stdout.flush() for flair in self.current_flair(): if self.verbose: sys.stdout.write('.') sys.stdout.flush() if use_text and use_css: key = (flair['flair_text'], flair['flair_css_class']) elif use_text: key = flair['flair_text'] else: key = flair['flair_css_class'] if key in counter: counter[key] += 1 else: counter[key] = 1 if self.verbose: print() # Sort flair list items according to the specified sort if sort == 'alpha': items = sorted(counter.items()) else: items = sorted(counter.items(), key=lambda x: x[1], reverse=True) # Clear current templates and store flair according to the sort if self.verbose: print('Clearing current flair templates') self.sub.flair.templates.clear() for key, count in items: if not key or count < limit: print('a') continue if use_text and use_css: text, css = key elif use_text: text, css = key, '' else: text, css = '', key if self.verbose: print('Adding template: text: {!r} css: {!r}' .format(text, css)) self.sub.flair.templates.add(text, css, editable)
python
{ "resource": "" }
q20439
ModUtils.message
train
def message(self, category, subject, msg_file): """Send message to all users in `category`.""" users = getattr(self.sub, category) if not users: print('There are no {} users on {}.'.format(category, self.sub)) return if msg_file: try: msg = open(msg_file).read() except IOError as error: print(str(error)) return else: print('Enter message:') msg = sys.stdin.read() print('You are about to send the following message to the users {}:' .format(', '.join([str(x) for x in users]))) print('---BEGIN MESSAGE---\n{}\n---END MESSAGE---'.format(msg)) if input('Are you sure? yes/[no]: ').lower() not in ['y', 'yes']: print('Message sending aborted.') return for user in users: user.send_message(subject, msg) print('Sent to: {}'.format(user))
python
{ "resource": "" }
q20440
ModUtils.output_current_flair
train
def output_current_flair(self, as_json=False): """Display the current flair for all users in the subreddit.""" flair_list = sorted(self.current_flair(), key=lambda x: x['user'].name) if as_json: print(json.dumps(flair_list, sort_keys=True, indent=4)) return for flair in flair_list: print(flair['user']) print(' Text: {}\n CSS: {}'.format(flair['flair_text'], flair['flair_css_class']))
python
{ "resource": "" }
q20441
ModUtils.output_list
train
def output_list(self, category): """Display the list of users in `category`.""" print('{} users:'.format(category)) for user in getattr(self.sub, category): print(' {}'.format(user))
python
{ "resource": "" }
q20442
quick_url
train
def quick_url(comment): """Return the URL for the comment without fetching its submission.""" def to_id(fullname): return fullname.split('_', 1)[1] return ('http://www.reddit.com/r/{}/comments/{}/_/{}?context=3' .format(comment.subreddit.display_name, to_id(comment.link_id), comment.id))
python
{ "resource": "" }
q20443
main
train
def main(): """Provide the entry point into the reddit_alert program.""" usage = 'Usage: %prog [options] KEYWORD...' parser = arg_parser(usage=usage) parser.add_option('-s', '--subreddit', action='append', help=('When at least one `-s` option is provided ' '(multiple can be) only alert for comments in the ' 'indicated subreddit(s).')) parser.add_option('-I', '--ignore-user', action='append', metavar='USER', help=('Ignore comments from the provided user. Can be ' 'supplied multiple times.')) parser.add_option('-m', '--message', metavar='USER', help=('When set, send a reddit message to USER with the ' 'alert.')) options, args = parser.parse_args() if not args: parser.error('At least one KEYWORD must be provided.') session = praw.Reddit(options.site, check_for_updates=False, user_agent=AGENT) if options.message: msg_to = session.redditor(options.message) check_for_updates(options) # Build regex args = [x.lower() for x in args] reg_prefix = r'(?:^|[^a-z])' # Any character (or start) can precede reg_suffix = r'(?:$|[^a-z])' # Any character (or end) can follow regex = re.compile(r'{}({}){}'.format(reg_prefix, '|'.join(args), reg_suffix), re.IGNORECASE) # Determine subreddit or multireddit if options.subreddit: subreddit = '+'.join(sorted(options.subreddit)) else: subreddit = 'all' print('Alerting on:') for item in sorted(args): print(' * {}'.format(item)) print('using the comment stream: https://www.reddit.com/r/{}/comments' .format(subreddit)) # Build ignore set if options.ignore_user: ignore_users = set(x.lower() for x in options.ignore_user) else: ignore_users = set() try: for comment in session.subreddit(subreddit).stream.comments(): if comment.author and comment.author.name.lower() in ignore_users: continue match = regex.search(comment.body) if match: keyword = match.group(1).lower() url = quick_url(comment) print('{}: {}'.format(keyword, url)) if options.message: msg_to.message( 'Reddit Alert: {}'.format(keyword), '{}\n\nby /u/{}\n\n---\n\n{}'.format( url, comment.author, comment.body)) except KeyboardInterrupt: sys.stderr.write('\n') print('Goodbye!\n')
python
{ "resource": "" }
q20444
get_object_executor
train
def get_object_executor(obj, green_mode=None): """Returns the proper executor for the given object. If the object has *_executors* and *_green_mode* members it returns the submit callable for the executor corresponding to the green_mode. Otherwise it returns the global executor for the given green_mode. Note: *None* is a valid object. :returns: submit callable""" # Get green mode if green_mode is None: green_mode = get_object_green_mode(obj) # Get executor executor = None if hasattr(obj, '_executors'): executor = obj._executors.get(green_mode, None) if executor is None: executor = get_executor(green_mode) # Get submitter return executor
python
{ "resource": "" }
q20445
green
train
def green(fn=None, consume_green_mode=True): """Make a function green. Can be used as a decorator.""" def decorator(fn): @wraps(fn) def greener(obj, *args, **kwargs): args = (obj,) + args wait = kwargs.pop('wait', None) timeout = kwargs.pop('timeout', None) access = kwargs.pop if consume_green_mode else kwargs.get green_mode = access('green_mode', None) executor = get_object_executor(obj, green_mode) return executor.run(fn, args, kwargs, wait=wait, timeout=timeout) return greener if fn is None: return decorator return decorator(fn)
python
{ "resource": "" }
q20446
green_callback
train
def green_callback(fn, obj=None, green_mode=None): """Return a green verion of the given callback.""" executor = get_object_executor(obj, green_mode) @wraps(fn) def greener(*args, **kwargs): return executor.submit(fn, *args, **kwargs) return greener
python
{ "resource": "" }
q20447
__struct_params_s
train
def __struct_params_s(obj, separator=', ', f=repr, fmt='%s = %s'): """method wrapper for printing all elements of a struct""" s = separator.join([__single_param(obj, n, f, fmt) for n in dir(obj) if __inc_param(obj, n)]) return s
python
{ "resource": "" }
q20448
__struct_params_str
train
def __struct_params_str(obj, fmt, f=repr): """method wrapper for printing all elements of a struct.""" return __struct_params_s(obj, '\n', f=f, fmt=fmt)
python
{ "resource": "" }
q20449
__registerSeqStr
train
def __registerSeqStr(): """helper function to make internal sequences printable""" _SeqStr = lambda self: (self and "[%s]" % (", ".join(map(repr, self)))) or "[]" _SeqRepr = lambda self: (self and "[%s]" % (", ".join(map(repr, self)))) or "[]" seqs = (StdStringVector, StdLongVector, CommandInfoList, AttributeInfoList, AttributeInfoListEx, PipeInfoList, DeviceDataHistoryList, GroupReplyList, GroupAttrReplyList, GroupCmdReplyList, DbData, DbDevInfos, DbDevExportInfos, DbDevImportInfos, DbHistoryList) for seq in seqs: seq.__str__ = _SeqStr seq.__repr__ = _SeqRepr
python
{ "resource": "" }
q20450
__registerStructStr
train
def __registerStructStr(): """helper method to register str and repr methods for structures""" structs = (LockerInfo, DevCommandInfo, AttributeDimension, CommandInfo, DeviceInfo, DeviceAttributeConfig, AttributeInfo, AttributeAlarmInfo, ChangeEventInfo, PeriodicEventInfo, ArchiveEventInfo, AttributeEventInfo, AttributeInfoEx, PipeInfo, DeviceAttribute, DeviceAttributeHistory, DeviceData, DeviceDataHistory, DevicePipe, DbDatum, DbDevInfo, DbDevImportInfo, DbDevExportInfo, DbServerInfo, GroupReply, GroupAttrReply, GroupCmdReply, DevError, EventData, AttrConfEventData, DataReadyEventData, AttributeConfig, AttributeConfig_2, AttributeConfig_3, AttributeConfig_5, ChangeEventProp, PeriodicEventProp, ArchiveEventProp, AttributeAlarm, EventProperties) for struct in structs: struct.__str__ = __str__Struct struct.__repr__ = __repr__Struct # special case for TimeVal: it already has a str representation itself TimeVal.__repr__ = __repr__Struct # special case for DevFailed: we want a better pretty print # also, because it is an Exception it has the message attribute which # generates a Deprecation warning in python 2.6 DevFailed.__str__ = __str__DevFailed DevFailed.__repr__ = __repr__DevFailed DevError.__str__ = __str__DevError
python
{ "resource": "" }
q20451
alias_package
train
def alias_package(package, alias, extra_modules={}): """Alias a python package properly. It ensures that modules are not duplicated by trying to import and alias all the submodules recursively. """ path = package.__path__ alias_prefix = alias + '.' prefix = package.__name__ + '.' # Alias all importable modules recursively for _, name, _ in pkgutil.walk_packages(path, prefix): # Skip databaseds backends if name.startswith('tango.databaseds.db_access.'): continue try: if name not in sys.modules: __import__(name) except ImportError: continue alias_name = name.replace(prefix, alias_prefix) sys.modules[alias_name] = sys.modules[name] # Alias extra modules for key, value in extra_modules.items(): name = prefix + value if name not in sys.modules: __import__(name) if not hasattr(package, key): setattr(package, key, sys.modules[name]) sys.modules[alias_prefix + key] = sys.modules[name] # Alias root module sys.modules[alias] = sys.modules[package.__name__]
python
{ "resource": "" }
q20452
AsyncioExecutor.delegate
train
def delegate(self, fn, *args, **kwargs): """Return the given operation as an asyncio future.""" callback = functools.partial(fn, *args, **kwargs) coro = self.loop.run_in_executor(self.subexecutor, callback) return asyncio.ensure_future(coro)
python
{ "resource": "" }
q20453
AsyncioExecutor.access
train
def access(self, accessor, timeout=None): """Return a result from an asyncio future.""" if self.loop.is_running(): raise RuntimeError("Loop is already running") coro = asyncio.wait_for(accessor, timeout, loop=self.loop) return self.loop.run_until_complete(coro)
python
{ "resource": "" }
q20454
__EncodedAttribute_encode_jpeg_gray8
train
def __EncodedAttribute_encode_jpeg_gray8(self, gray8, width=0, height=0, quality=100.0): """Encode a 8 bit grayscale image as JPEG format :param gray8: an object containning image information :type gray8: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> > :param width: image width. **MUST** be given if gray8 is a string or if it is a :class:`numpy.ndarray` with ndims != 2. Otherwise it is calculated internally. :type width: :py:obj:`int` :param height: image height. **MUST** be given if gray8 is a string or if it is a :class:`numpy.ndarray` with ndims != 2. Otherwise it is calculated internally. :type height: :py:obj:`int` :param quality: Quality of JPEG (0=poor quality 100=max quality) (default is 100.0) :type quality: :py:obj:`float` .. note:: When :class:`numpy.ndarray` is given: - gray8 **MUST** be CONTIGUOUS, ALIGNED - if gray8.ndims != 2, width and height **MUST** be given and gray8.nbytes **MUST** match width*height - if gray8.ndims == 2, gray8.itemsize **MUST** be 1 (typically, gray8.dtype is one of `numpy.dtype.byte`, `numpy.dtype.ubyte`, `numpy.dtype.int8` or `numpy.dtype.uint8`) Example:: def read_myattr(self, attr): enc = tango.EncodedAttribute() data = numpy.arange(100, dtype=numpy.byte) data = numpy.array((data,data,data)) enc.encode_jpeg_gray8(data) attr.set_value(enc) """ self._generic_encode_gray8(gray8, width=width, height=height, quality=quality, format=_ImageFormat.JpegImage)
python
{ "resource": "" }
q20455
__EncodedAttribute_encode_jpeg_rgb24
train
def __EncodedAttribute_encode_jpeg_rgb24(self, rgb24, width=0, height=0, quality=100.0): """Encode a 24 bit rgb color image as JPEG format. :param rgb24: an object containning image information :type rgb24: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> > :param width: image width. **MUST** be given if rgb24 is a string or if it is a :class:`numpy.ndarray` with ndims != 3. Otherwise it is calculated internally. :type width: :py:obj:`int` :param height: image height. **MUST** be given if rgb24 is a string or if it is a :class:`numpy.ndarray` with ndims != 3. Otherwise it is calculated internally. :type height: :py:obj:`int` :param quality: Quality of JPEG (0=poor quality 100=max quality) (default is 100.0) :type quality: :py:obj:`float` .. note:: When :class:`numpy.ndarray` is given: - rgb24 **MUST** be CONTIGUOUS, ALIGNED - if rgb24.ndims != 3, width and height **MUST** be given and rgb24.nbytes/3 **MUST** match width*height - if rgb24.ndims == 3, rgb24.itemsize **MUST** be 1 (typically, rgb24.dtype is one of `numpy.dtype.byte`, `numpy.dtype.ubyte`, `numpy.dtype.int8` or `numpy.dtype.uint8`) and shape **MUST** be (height, width, 3) Example:: def read_myattr(self, attr): enc = tango.EncodedAttribute() # create an 'image' where each pixel is R=0x01, G=0x01, B=0x01 arr = numpy.ones((10,10,3), dtype=numpy.uint8) enc.encode_jpeg_rgb24(data) attr.set_value(enc) """ self._generic_encode_rgb24(rgb24, width=width, height=height, quality=quality, format=_ImageFormat.JpegImage)
python
{ "resource": "" }
q20456
__EncodedAttribute_encode_jpeg_rgb32
train
def __EncodedAttribute_encode_jpeg_rgb32(self, rgb32, width=0, height=0, quality=100.0): """Encode a 32 bit rgb color image as JPEG format. :param rgb32: an object containning image information :type rgb32: :py:obj:`str` or :class:`numpy.ndarray` or seq< seq<element> > :param width: image width. **MUST** be given if rgb32 is a string or if it is a :class:`numpy.ndarray` with ndims != 2. Otherwise it is calculated internally. :type width: :py:obj:`int` :param height: image height. **MUST** be given if rgb32 is a string or if it is a :class:`numpy.ndarray` with ndims != 2. Otherwise it is calculated internally. :type height: :py:obj:`int` .. note:: When :class:`numpy.ndarray` is given: - rgb32 **MUST** be CONTIGUOUS, ALIGNED - if rgb32.ndims != 2, width and height **MUST** be given and rgb32.nbytes/4 **MUST** match width*height - if rgb32.ndims == 2, rgb32.itemsize **MUST** be 4 (typically, rgb32.dtype is one of `numpy.dtype.int32`, `numpy.dtype.uint32`) Example:: def read_myattr(self, attr): enc = tango.EncodedAttribute() data = numpy.arange(100, dtype=numpy.int32) data = numpy.array((data,data,data)) enc.encode_jpeg_rgb32(data) attr.set_value(enc) """ if not is_seq(rgb32): raise TypeError("Expected sequence (str, numpy.ndarray, list, tuple " "or bytearray) as first argument") is_str = is_pure_str(rgb32) if is_str: if not width or not height: raise ValueError("When giving a string as data, you must also " "supply width and height") if np and isinstance(rgb32, np.ndarray): if rgb32.ndim != 2: if not width or not height: raise ValueError("When giving a non 2D numpy array, width and " "height must be supplied") if rgb32.nbytes / 4 != width * height: raise ValueError("numpy array size mismatch") else: if rgb32.itemsize != 4: raise TypeError("Expected numpy array with itemsize == 4") if not rgb32.flags.c_contiguous: raise TypeError("Currently, only contiguous, aligned numpy arrays " "are supported") if not rgb32.flags.aligned: raise TypeError("Currently, only contiguous, aligned numpy arrays " "are supported") if not is_str and (not width or not height): height = len(rgb32) if height < 1: raise IndexError("Expected sequence with at least one row") row0 = rgb32[0] if not is_seq(row0): raise IndexError("Expected sequence (str, numpy.ndarray, list, tuple or " "bytearray) inside a sequence") width = len(row0) if is_pure_str(row0) or type(row0) == bytearray: width /= 4 self._encode_jpeg_rgb32(rgb32, width, height, quality)
python
{ "resource": "" }
q20457
is_tango_object
train
def is_tango_object(arg): """Return tango data if the argument is a tango object, False otherwise. """ classes = attribute, device_property if isinstance(arg, classes): return arg try: return arg.__tango_command__ except AttributeError: return False
python
{ "resource": "" }
q20458
inheritance_patch
train
def inheritance_patch(attrs): """Patch tango objects before they are processed by the metaclass.""" for key, obj in attrs.items(): if isinstance(obj, attribute): if getattr(obj, 'attr_write', None) == AttrWriteType.READ_WRITE: if not getattr(obj, 'fset', None): method_name = obj.write_method_name or "write_" + key obj.fset = attrs.get(method_name)
python
{ "resource": "" }
q20459
run
train
def run(classes, args=None, msg_stream=sys.stdout, verbose=False, util=None, event_loop=None, post_init_callback=None, green_mode=None, raises=False): """ Provides a simple way to run a tango server. It handles exceptions by writting a message to the msg_stream. The `classes` parameter can be either a sequence of: * :class:`~tango.server.Device` or * a sequence of two elements :class:`~tango.DeviceClass`, :class:`~tango.DeviceImpl` or * a sequence of three elements :class:`~tango.DeviceClass`, :class:`~tango.DeviceImpl`, tango class name (str) or a dictionary where: * key is the tango class name * value is either: * a :class:`~tango.server.Device` class or * a sequence of two elements :class:`~tango.DeviceClass`, :class:`~tango.DeviceImpl` or * a sequence of three elements :class:`~tango.DeviceClass`, :class:`~tango.DeviceImpl`, tango class name (str) The optional `post_init_callback` can be a callable (without arguments) or a tuple where the first element is the callable, the second is a list of arguments (optional) and the third is a dictionary of keyword arguments (also optional). .. note:: the order of registration of tango classes defines the order tango uses to initialize the corresponding devices. if using a dictionary as argument for classes be aware that the order of registration becomes arbitrary. If you need a predefined order use a sequence or an OrderedDict. Example 1: registering and running a PowerSupply inheriting from :class:`~tango.server.Device`:: from tango.server import Device, DeviceMeta, run class PowerSupply(Device): pass run((PowerSupply,)) Example 2: registering and running a MyServer defined by tango classes `MyServerClass` and `MyServer`:: from tango import Device_4Impl, DeviceClass from tango.server import run class MyServer(Device_4Impl): pass class MyServerClass(DeviceClass): pass run({'MyServer': (MyServerClass, MyServer)}) Example 3: registering and running a MyServer defined by tango classes `MyServerClass` and `MyServer`:: from tango import Device_4Impl, DeviceClass from tango.server import Device, DeviceMeta, run class PowerSupply(Device): pass class MyServer(Device_4Impl): pass class MyServerClass(DeviceClass): pass run([PowerSupply, [MyServerClass, MyServer]]) # or: run({'MyServer': (MyServerClass, MyServer)}) :param classes: a sequence of :class:`~tango.server.Device` classes or a dictionary where keyword is the tango class name and value is a sequence of Tango Device Class python class, and Tango Device python class :type classes: sequence or dict :param args: list of command line arguments [default: None, meaning use sys.argv] :type args: list :param msg_stream: stream where to put messages [default: sys.stdout] :param util: PyTango Util object [default: None meaning create a Util instance] :type util: :class:`~tango.Util` :param event_loop: event_loop callable :type event_loop: callable :param post_init_callback: an optional callback that is executed between the calls Util.server_init and Util.server_run :type post_init_callback: callable or tuple (see description above) :param raises: Disable error handling and propagate exceptions from the server :type raises: bool :return: The Util singleton object :rtype: :class:`~tango.Util` .. versionadded:: 8.1.2 .. versionchanged:: 8.1.4 when classes argument is a sequence, the items can also be a sequence <TangoClass, TangoClassClass>[, tango class name] .. versionchanged:: 9.2.2 `raises` argument has been added """ server_run = functools.partial( __server_run, classes, args=args, msg_stream=msg_stream, util=util, event_loop=event_loop, post_init_callback=post_init_callback, green_mode=green_mode) # Run the server without error handling if raises: return server_run() # Run the server with error handling write = msg_stream.write if msg_stream else lambda msg: None try: return server_run() except KeyboardInterrupt: write("Exiting: Keyboard interrupt\n") except DevFailed as df: write("Exiting: Server exited with tango.DevFailed:\n" + str(df) + "\n") if verbose: write(traceback.format_exc()) except Exception as e: write("Exiting: Server exited with unforseen exception:\n" + str(e) + "\n") if verbose: write(traceback.format_exc()) write("\nExited\n")
python
{ "resource": "" }
q20460
BaseDevice.run_server
train
def run_server(cls, args=None, **kwargs): """Run the class as a device server. It is based on the tango.server.run method. The difference is that the device class and server name are automatically given. Args: args (iterable): args as given in the tango.server.run method without the server name. If None, the sys.argv list is used kwargs: the other keywords argument are as given in the tango.server.run method. """ if args is None: args = sys.argv[1:] args = [cls.__name__] + list(args) green_mode = getattr(cls, 'green_mode', None) kwargs.setdefault("green_mode", green_mode) return run((cls,), args, **kwargs)
python
{ "resource": "" }
q20461
attribute.setter
train
def setter(self, fset): """ To be used as a decorator. Will define the decorated method as a write attribute method to be called when client writes the attribute """ self.fset = fset if self.attr_write == AttrWriteType.READ: if getattr(self, 'fget', None): self.attr_write = AttrWriteType.READ_WRITE else: self.attr_write = AttrWriteType.WRITE return self
python
{ "resource": "" }
q20462
pipe.setter
train
def setter(self, fset): """ To be used as a decorator. Will define the decorated method as a write pipe method to be called when client writes to the pipe """ self.fset = fset self.pipe_write = PipeWriteType.PIPE_READ_WRITE return self
python
{ "resource": "" }
q20463
Server.__prepare
train
def __prepare(self): """Update database with existing devices""" self.log.debug("prepare") if self.__phase > 0: raise RuntimeError("Internal error: Can only prepare in phase 0") server_instance = self.server_instance db = Database() # get list of server devices if server was already registered server_registered = server_instance in db.get_server_list() if server_registered: dserver_name = "dserver/{0}".format(server_instance) if db.import_device(dserver_name).exported: import tango dserver = tango.DeviceProxy(dserver_name) try: dserver.ping() raise Exception("Server already running") except: self.log.info("Last time server was not properly " "shutdown!") _, db_device_map = self.get_devices() else: db_device_map = {} db_devices_add = {} # all devices that are registered in database that are not registered # as tango objects or for which the tango class changed will be removed db_devices_remove = set(db_device_map) - set(self.__objects) for local_name, local_object in self.__objects.items(): local_class_name = local_object.tango_class_name db_class_name = db_device_map.get(local_name) if db_class_name: if local_class_name != db_class_name: db_devices_remove.add(local_name) db_devices_add[local_name] = local_object else: db_devices_add[local_name] = local_object for device in db_devices_remove: db.delete_device(device) try: db.delete_device_alias(db.get_alias(device)) except: pass # register devices in database # add DServer db_dev_info = DbDevInfo() db_dev_info.server = server_instance db_dev_info._class = "DServer" db_dev_info.name = "dserver/" + server_instance db_dev_infos = [db_dev_info] aliases = [] for obj_name, obj in db_devices_add.items(): db_dev_info = DbDevInfo() db_dev_info.server = server_instance db_dev_info._class = obj.tango_class_name db_dev_info.name = obj.full_name db_dev_infos.append(db_dev_info) if obj.alias: aliases.append((obj.full_name, obj.alias)) db.add_server(server_instance, db_dev_infos) # add aliases for alias_info in aliases: db.put_device_alias(*alias_info)
python
{ "resource": "" }
q20464
Server.get_devices
train
def get_devices(self): """ Helper that retuns a dict of devices for this server. :return: Returns a tuple of two elements: - dict<tango class name : list of device names> - dict<device names : tango class name> :rtype: tuple<dict, dict> """ if self.__util is None: import tango db = tango.Database() else: db = self.__util.get_database() server = self.server_instance dev_list = db.get_device_class_list(server) class_map, dev_map = {}, {} for class_name, dev_name in zip(dev_list[1::2], dev_list[::2]): dev_names = class_map.get(class_name) if dev_names is None: class_map[class_name] = dev_names = [] dev_name = dev_name.lower() dev_names.append(dev_name) dev_map[dev_name] = class_name return class_map, dev_map
python
{ "resource": "" }
q20465
_set_concurrent_future_state
train
def _set_concurrent_future_state(concurrent, source): """Copy state from a future to a concurrent.futures.Future.""" assert source.done() if source.cancelled(): concurrent.cancel() if not concurrent.set_running_or_notify_cancel(): return exception = source.exception() if exception is not None: concurrent.set_exception(exception) else: result = source.result() concurrent.set_result(result)
python
{ "resource": "" }
q20466
_copy_future_state
train
def _copy_future_state(source, dest): """Internal helper to copy state from another Future. The other Future may be a concurrent.futures.Future. """ assert source.done() if dest.cancelled(): return assert not dest.done() if source.cancelled(): dest.cancel() else: exception = source.exception() if exception is not None: dest.set_exception(exception) else: result = source.result() dest.set_result(result)
python
{ "resource": "" }
q20467
run_coroutine_threadsafe
train
def run_coroutine_threadsafe(coro, loop): """Submit a coroutine object to a given event loop. Return a concurrent.futures.Future to access the result. """ if not asyncio.iscoroutine(coro): raise TypeError('A coroutine object is required') future = concurrent.futures.Future() def callback(): try: _chain_future(asyncio.ensure_future(coro, loop=loop), future) except Exception as exc: if future.set_running_or_notify_cancel(): future.set_exception(exc) raise loop.call_soon_threadsafe(callback) return future
python
{ "resource": "" }
q20468
_init_attr_config
train
def _init_attr_config(attr_cfg): """Helper function to initialize attribute config objects""" attr_cfg.name = '' attr_cfg.writable = AttrWriteType.READ attr_cfg.data_format = AttrDataFormat.SCALAR attr_cfg.data_type = 0 attr_cfg.max_dim_x = 0 attr_cfg.max_dim_y = 0 attr_cfg.description = '' attr_cfg.label = '' attr_cfg.unit = '' attr_cfg.standard_unit = '' attr_cfg.display_unit = '' attr_cfg.format = '' attr_cfg.min_value = '' attr_cfg.max_value = '' attr_cfg.writable_attr_name = '' attr_cfg.extensions = []
python
{ "resource": "" }
q20469
GeventExecutor.delegate
train
def delegate(self, fn, *args, **kwargs): """Return the given operation as a gevent future.""" return self.subexecutor.spawn(fn, *args, **kwargs)
python
{ "resource": "" }
q20470
connect
train
def connect(obj, signal, slot, event_type=tango.EventType.CHANGE_EVENT): """Experimental function. Not part of the official API""" return obj._helper.connect(signal, slot, event_type=event_type)
python
{ "resource": "" }
q20471
get_readme
train
def get_readme(name='README.rst'): """Get readme file contents without the badges.""" with open(name) as f: return '\n'.join( line for line in f.read().splitlines() if not line.startswith('|') or not line.endswith('|'))
python
{ "resource": "" }
q20472
abspath
train
def abspath(*path): """A method to determine absolute path for a given relative path to the directory where this setup.py script is located""" setup_dir = os.path.dirname(os.path.abspath(__file__)) return os.path.join(setup_dir, *path)
python
{ "resource": "" }
q20473
DbExportDevice
train
def DbExportDevice(self, argin): """ Export a device to the database :param argin: Str[0] = Device name Str[1] = CORBA IOR Str[2] = Device server process host name Str[3] = Device server process PID or string ``null`` Str[4] = Device server process version :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbExportDevice()") if len(argin) < 5: self.warn_stream("DataBase::DbExportDevice(): insufficient export info for device ") th_exc(DB_IncorrectArguments, "insufficient export info for device", "DataBase::ExportDevice()") dev_name, IOR, host, pid, version = argin[:5] dev_name = dev_name.lower() if pid.lower() == 'null': pid = "-1" self.db.export_device(dev_name, IOR, host, pid, version)
python
{ "resource": "" }
q20474
DataBase.DbGetDeviceDomainList
train
def DbGetDeviceDomainList(self, argin): """ Get list of device domain name matching the specified :param argin: The wildcard :type: tango.DevString :return: Device name domain list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetDeviceDomainList()") return self.db.get_device_domain_list(replace_wildcard(argin))
python
{ "resource": "" }
q20475
DataBase.DbUnExportServer
train
def DbUnExportServer(self, argin): """ Mark all devices belonging to a specified device server process as non exported :param argin: Device server name (executable/instance) :type: tango.DevString :return: :rtype: tango.DevVoid """ self._log.debug("In DbUnExportServer()") self.db.unexport_server(argin)
python
{ "resource": "" }
q20476
DataBase.DbDeleteAttributeAlias
train
def DbDeleteAttributeAlias(self, argin): """ Delete an attribute alias. :param argin: Attriibute alias name. :type: tango.DevString :return: :rtype: tango.DevVoid """ self._log.debug("In DbDeleteAttributeAlias()") self.db.delete_attribute_alias(argin)
python
{ "resource": "" }
q20477
DataBase.DbGetClassAttributePropertyHist
train
def DbGetClassAttributePropertyHist(self, argin): """ Retrieve Tango class attribute property history :param argin: Str[0] = Tango class Str[1] = Attribute name Str[2] = Property name :type: tango.DevVarStringArray :return: Str[0] = Attribute name Str[1] = Property name Str[2] = date Str[3] = Property value number (array case) Str[4] = Property value 1 Str[n] = Property value n :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetClassAttributePropertyHist()") class_name = argin[0] attribute = replace_wildcard(argin[1]) prop_name = replace_wildcard(argin[2]) return self.db.get_class_attribute_property_hist(class_name, attribute, prop_name)
python
{ "resource": "" }
q20478
DataBase.DbPutDeviceAttributeProperty2
train
def DbPutDeviceAttributeProperty2(self, argin): """ Put device attribute property. This command adds the possibility to have attribute property which are arrays. Not possible with the old DbPutDeviceAttributeProperty command. This old command is not deleted for compatibility reasons. :param argin: Str[0] = Device name Str[1] = Attribute number Str[2] = Attribute name Str[3] = Property number Str[4] = Property name Str[5] = Property value number (array case) Str[5] = Property value 1 Str[n] = Property value n (array case) ..... :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbPutDeviceAttributeProperty2()") device_name = argin[0] nb_attributes = int(argin[1]) self.db.put_device_attribute_property2(device_name, nb_attributes, argin[2:])
python
{ "resource": "" }
q20479
DataBase.DbGetAttributeAliasList
train
def DbGetAttributeAliasList(self, argin): """ Get attribute alias list for a specified filter :param argin: attribute alias filter string (eg: att*) :type: tango.DevString :return: attribute aliases :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetAttributeAliasList()") if not argin: argin = "%" else: argin = replace_wildcard(argin) return self.db.get_attribute_alias_list(argin)
python
{ "resource": "" }
q20480
DataBase.DbGetExportdDeviceListForClass
train
def DbGetExportdDeviceListForClass(self, argin): """ Query the database for device exported for the specified class. :param argin: Class name :type: tango.DevString :return: Device exported list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetExportdDeviceListForClass()") argin = replace_wildcard(argin) return self.db.get_exported_device_list_for_class(argin)
python
{ "resource": "" }
q20481
DataBase.DbPutAttributeAlias
train
def DbPutAttributeAlias(self, argin): """ Define an alias for an attribute :param argin: Str[0] = attribute name Str[1] = attribute alias :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbPutAttributeAlias()") if len(argin) < 2: self.warn_stream("DataBase::DbPutAttributeAlias(): insufficient number of arguments ") th_exc(DB_IncorrectArguments, "insufficient number of arguments to put attribute alias", "DataBase::DbPutAttributeAlias()") attribute_name = argin[0] attribute_alias = argin[1] self.db.put_attribute_alias(attribute_name, attribute_alias)
python
{ "resource": "" }
q20482
DataBase.DbGetServerList
train
def DbGetServerList(self, argin): """ Get list of device server process defined in database with name matching the specified filter :param argin: The filter :type: tango.DevString :return: Device server process name list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetServerList()") argin = replace_wildcard(argin) return self.db.get_server_list(argin)
python
{ "resource": "" }
q20483
DataBase.DbDeleteDeviceAttributeProperty
train
def DbDeleteDeviceAttributeProperty(self, argin): """ delete a device attribute property from the database :param argin: Str[0] = Device name Str[1] = Attribute name Str[2] = Property name Str[n] = Property name :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbDeleteDeviceAttributeProperty()") if len(argin) < 3: self.warn_stream("DataBase::db_delete_device_attribute_property(): insufficient number of arguments ") th_exc(DB_IncorrectArguments, "insufficient number of arguments to delete device attribute property", "DataBase::DeleteDeviceAttributeProperty()") dev_name, attr_name = argin[:2] ret, dev_name, dfm = check_device_name(argin) if not ret: self.warn_stream("DataBase::db_delete_device_attribute_property(): device name " + argin + " incorrect ") th_exc(DB_IncorrectDeviceName, "failed to delete device attribute property, device name incorrect", "DataBase::DeleteDeviceAttributeProperty()") for prop_name in argin[2:]: self.db.delete_device_attribute_property(dev_name, attr_name, prop_name)
python
{ "resource": "" }
q20484
DataBase.DbGetDeviceFamilyList
train
def DbGetDeviceFamilyList(self, argin): """ Get a list of device name families for device name matching the specified wildcard :param argin: The wildcard :type: tango.DevString :return: Family list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetDeviceFamilyList()") argin = replace_wildcard(argin) return self.db.get_device_family_list(argin)
python
{ "resource": "" }
q20485
DataBase.DbGetDeviceWideList
train
def DbGetDeviceWideList(self, argin): """ Get a list of devices whose names satisfy the filter. :param argin: filter :type: tango.DevString :return: list of exported devices :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetDeviceWideList()") argin = replace_wildcard(argin) return self.db.get_device_wide_list(argin)
python
{ "resource": "" }
q20486
DataBase.DbDeleteProperty
train
def DbDeleteProperty(self, argin): """ Delete free property from database :param argin: Str[0] = Object name Str[1] = Property name Str[n] = Property name :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbDeleteProperty()") obj_name = argin[0] for prop_name in argin[1:]: self.db.delete_property(obj_name, prop_name)
python
{ "resource": "" }
q20487
DataBase.DbGetClassAttributeProperty2
train
def DbGetClassAttributeProperty2(self, argin): """ This command supports array property compared to the old command called DbGetClassAttributeProperty. The old command has not been deleted from the server for compatibility reasons. :param argin: Str[0] = Tango class name Str[1] = Attribute name Str[n] = Attribute name :type: tango.DevVarStringArray :return: Str[0] = Tango class name Str[1] = Attribute property number Str[2] = Attribute property 1 name Str[3] = Attribute property 1 value number (array case) Str[4] = Attribute property 1 value Str[n] = Attribute property 1 value (array case) Str[n + 1] = Attribute property 2 name Str[n + 2] = Attribute property 2 value number (array case) Str[n + 3] = Attribute property 2 value Str[n + m] = Attribute property 2 value (array case) :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetClassAttributeProperty2()") class_name = argin[0] return self.db.get_class_attribute_property2(class_name, argin[1:])
python
{ "resource": "" }
q20488
DataBase.DbGetDeviceExportedList
train
def DbGetDeviceExportedList(self, argin): """ Get a list of exported devices whose names satisfy the filter (wildcard is :param argin: filter :type: tango.DevString :return: list of exported devices :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetDeviceExportedList()") argin = replace_wildcard(argin) return self.db.get_device_exported_list(argin)
python
{ "resource": "" }
q20489
DataBase.DbGetDeviceAlias
train
def DbGetDeviceAlias(self, argin): """ Return alias for device name if found. :param argin: The device name :type: tango.DevString :return: The alias found :rtype: tango.DevString """ self._log.debug("In DbGetDeviceAlias()") ret, dev_name, dfm = check_device_name(argin) if not ret: th_exc(DB_IncorrectDeviceName, "device name (" + argin + ") syntax error (should be [tango:][//instance/]domain/family/member)", "DataBase::DbGetDeviceAlias()") return self.db.get_device_alias(dev_name)
python
{ "resource": "" }
q20490
DataBase.DbGetClassPropertyList
train
def DbGetClassPropertyList(self, argin): """ Get property list for a given Tango class with a specified filter :param argin: The filter :type: tango.DevString :return: Property name list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetClassPropertyList()") if not argin: argin = "%" else: argin = replace_wildcard(argin) return self.db.get_class_property_list(argin)
python
{ "resource": "" }
q20491
DataBase.DbGetDeviceAliasList
train
def DbGetDeviceAliasList(self, argin): """ Get device alias name with a specific filter :param argin: The filter :type: tango.DevString :return: Device alias list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetDeviceAliasList()") if not argin: argin = "%" else: argin = replace_wildcard(argin) return self.db.get_device_alias_list(argin)
python
{ "resource": "" }
q20492
DataBase.DbDeleteClassAttribute
train
def DbDeleteClassAttribute(self, argin): """ delete a class attribute and all its properties from database :param argin: Str[0] = Tango class name Str[1] = Attribute name :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbDeleteClassAttribute()") if len(argin) < 2: self.warn_stream("DataBase::db_delete_class_attribute(): insufficient number of arguments ") th_exc(DB_IncorrectArguments, "insufficient number of arguments to delete class attribute", "DataBase::DeleteClassAttribute()") klass_name, attr_name = argin[:2] self.db.delete_class_attribute(klass_name, attr_name)
python
{ "resource": "" }
q20493
DataBase.DbGetClassPropertyHist
train
def DbGetClassPropertyHist(self, argin): """ Retrieve Tango class property history :param argin: Str[0] = Tango class Str[1] = Property name :type: tango.DevVarStringArray :return: Str[0] = Property name Str[1] = date Str[2] = Property value number (array case) Str[3] = Property value 1 Str[n] = Property value n :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetClassPropertyHist()") class_name = argin[0] prop_name = argin[1] return self.db.get_class_property_hist(class_name, prop_name)
python
{ "resource": "" }
q20494
DataBase.DbDeleteDeviceAttribute
train
def DbDeleteDeviceAttribute(self, argin): """ Delete device attribute properties from database :param argin: Str[0] = Device name Str[1] = Attribute name :type: tango.DevVarStringArray :return: :rtype: tango.DevVoid """ self._log.debug("In DbDeleteDeviceAttribute()") if len(argin) < 2: self.warn_stream("DataBase::db_delete_device_attribute(): insufficient number of arguments ") th_exc(DB_IncorrectArguments, "insufficient number of arguments to delete device attribute", "DataBase::DeleteDeviceAttribute()") dev_name, attr_name = argin[:2] ret, dev_name, dfm = check_device_name(argin) if not ret: self.warn_stream("DataBase::db_delete_device_attribute(): device name " + argin + " incorrect ") th_exc(DB_IncorrectDeviceName, "failed to delete device attribute, device name incorrect", "DataBase::DeleteDeviceAttribute()") self.db.delete_device_attribute(dev_name, attr_name)
python
{ "resource": "" }
q20495
DataBase.DbMySqlSelect
train
def DbMySqlSelect(self, argin): """ This is a very low level command. It executes the specified SELECT command on TANGO database and returns its result without filter. :param argin: MySql Select command :type: tango.DevString :return: MySql Select command result - svalues : select results - lvalue[n] : =0 if svalue[n] is null else =1 (last lvalue -1) is number of rows, (last lvalue) is number of fields :rtype: tango.DevVarLongStringArray """ self._log.debug("In DbMySqlSelect()") tmp_argin = argin.lower() # Check if SELECT key is alread inside command cmd = argin tmp_argin = argin.lower() pos = tmp_argin.find('select') if pos == -1: cmd = "SELECT " + cmd pos = tmp_argin.find(';') if pos != -1 and len(tmp_argin) > (pos + 1): th_exc(DB_IncorrectArguments, "SQL command not valid: " + argin, "DataBase::ExportDevice()") return self.db.my_sql_select(cmd)
python
{ "resource": "" }
q20496
DataBase.DbGetPropertyList
train
def DbGetPropertyList(self, argin): """ Get list of property defined for a free object and matching the specified filter :param argin: Str[0] = Object name Str[1] = filter :type: tango.DevVarStringArray :return: Property name list :rtype: tango.DevVarStringArray """ self._log.debug("In DbGetPropertyList()") object_name = argin[0] wildcard = replace_wildcard(argin[1]) return self.db.get_property_list(object_name, wildcard)
python
{ "resource": "" }
q20497
DataBase.DbUnExportDevice
train
def DbUnExportDevice(self, argin): """ Mark a device as non exported in database :param argin: Device name :type: tango.DevString :return: :rtype: tango.DevVoid """ self._log.debug("In DbUnExportDevice()") dev_name = argin[0].lower() self.db.unexport_device(dev_name)
python
{ "resource": "" }
q20498
DataBase.DbGetAliasDevice
train
def DbGetAliasDevice(self, argin): """ Get device name from its alias. :param argin: Alias name :type: tango.DevString :return: Device name :rtype: tango.DevString """ self._log.debug("In DbGetAliasDevice()") if not argin: argin = "%" else: argin = replace_wildcard(argin) return self.db.get_alias_device(argin)
python
{ "resource": "" }
q20499
DataBase.DbDeleteDevice
train
def DbDeleteDevice(self, argin): """ Delete a devcie from database :param argin: device name :type: tango.DevString :return: :rtype: tango.DevVoid """ self._log.debug("In DbDeleteDevice()") ret, dev_name, dfm = check_device_name(argin) if not ret: self.warn_stream("DataBase::db_delete_device(): device name " + argin + " incorrect ") th_exc(DB_IncorrectDeviceName, "failed to delete device, device name incorrect", "DataBase::DeleteDevice()") self.db.delete_device(dev_name)
python
{ "resource": "" }