code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
assert check_argument_types() try: if self.listeners is not None: self.listeners.remove(callback) except ValueError: pass
def disconnect(self, callback: Callable) -> None
Disconnects the given callback. The callback will no longer receive events from this signal. No action is taken if the callback is not on the list of listener callbacks. :param callback: the callable to remove
5.870703
6.293667
0.932795
async def do_dispatch() -> None: awaitables = [] all_successful = True for callback in listeners: try: retval = callback(event) except Exception: logger.exception('Uncaught exception in event listener') all_successful = False else: if isawaitable(retval): awaitables.append(retval) # For any callbacks that returned awaitables, wait for their completion and log any # exceptions they raised if awaitables: done, _ = await wait(awaitables, loop=loop) for f in done: exc = f.exception() if exc is not None: all_successful = False logger.error('Uncaught exception in event listener', exc_info=exc) if not future.cancelled(): future.set_result(all_successful) if not isinstance(event, self.event_class): raise TypeError('event must be of type {}'.format(qualified_name(self.event_class))) loop = get_event_loop() future = loop.create_future() if self.listeners: listeners = list(self.listeners) loop.create_task(do_dispatch()) else: future.set_result(True) return future
def dispatch_raw(self, event: Event) -> Awaitable[bool]
Dispatch the given event object to all listeners. Creates a new task in which all listener callbacks are called with the given event as the only argument. Coroutine callbacks are converted to their own respective tasks and waited for concurrently. Before the dispatching is done, a snapshot of the listeners is taken and the event is only dispatched to those listeners, so adding a listener between the call to this method and the actual dispatching will only affect future calls to this method. :param event: the event object to dispatch :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise
2.719892
2.621584
1.037499
event = self.event_class(self.source(), cast(str, self.topic), *args, **kwargs) return self.dispatch_raw(event)
def dispatch(self, *args, **kwargs) -> Awaitable[bool]
Create and dispatch an event. This method constructs an event object and then passes it to :meth:`dispatch_event` for the actual dispatching. :param args: positional arguments to the constructor of the associated event class :param kwargs: keyword arguments to the constructor of the associated event class :returns: an awaitable that completes when all the callbacks have been called (and any awaitables waited on) and resolves to ``True`` if there were no exceptions raised by the callbacks, ``False`` otherwise
6.752778
9.25385
0.729726
return wait_event([self], filter)
def wait_event(self, filter: Callable[[T_Event], bool] = None) -> Awaitable[T_Event]
Shortcut for calling :func:`wait_event` with this signal in the first argument.
22.362467
15.415379
1.45066
return stream_events([self], filter, max_queue_size=max_queue_size)
def stream_events(self, filter: Callable[[Event], bool] = None, *, max_queue_size: int = 0)
Shortcut for calling :func:`stream_events` with this signal in the first argument.
6.209915
3.460018
1.794764
num = ord(char) if char == "[": self.state = "escape-lb" elif char == "(": self.state = "charset-g0" elif char == ")": self.state = "charset-g1" elif num in self.escape: self.dispatch(self.escape[num]) self.state = "stream" elif self.fail_on_unknown_esc: raise StreamProcessError("Unexpected character '%c' == '0x%02x'" % (char, ord(char)))
def _escape_sequence(self, char)
Handle characters seen when in an escape sequence. Most non-vt52 commands start with a left-bracket after the escape and then a stream of parameters and a command.
4.976354
4.452121
1.117749
num = ord(char) if num in self.sequence: self.dispatch(self.sequence[num], *self.params) self.state = "stream" self.current_param = "" self.params = []
def _end_escape_sequence(self, char)
Handle the end of an escape sequence. The final character in an escape sequence is the command to execute, which corresponds to the event that is dispatched here.
6.652257
5.499281
1.209659
if char == ";": self.params.append(int(self.current_param)) self.current_param = "" elif char == "?": self.state = "mode" elif not char.isdigit(): if len(self.current_param) > 0: self.params.append(int(self.current_param)) # If we're in parameter parsing mode, but we see a non-numeric # value, it must be the end of the control sequence. self._end_escape_sequence(char) else: self.current_param += char
def _escape_parameters(self, char)
Parse parameters in an escape sequence. Parameters are a list of numbers in ascii (e.g. '12', '4', '42', etc) separated by a semicolon (e.g. "12;4;42"). See the [vt102 user guide](http://vt100.net/docs/vt102-ug/) for more details on the formatting of escape parameters.
3.813037
3.518253
1.083787
num = ord(char) if num in self.basic: self.dispatch(self.basic[num]) elif num == ctrl.ESC: self.state = "escape" elif num == 0x00: # nulls are just ignored. pass else: self.dispatch("print", char)
def _stream(self, char)
Process a character when in the default 'stream' state.
5.604036
4.910083
1.141332
if self.state == "stream": self._stream(char) elif self.state == "escape": self._escape_sequence(char) elif self.state == "escape-lb": self._escape_parameters(char) elif self.state == "mode": self._mode(char) elif self.state == "charset-g0": self._charset_g0(char) elif self.state == "charset-g1": self._charset_g1(char)
def consume(self, char)
Consume a single character and advance the state as necessary.
2.784286
2.697542
1.032157
while len(chars) > 0: self.consume(chars[0]) chars = chars[1:]
def process(self, chars)
Consume a string of and advance the state as necessary.
4.519554
3.977754
1.136208
if event not in self.listeners: self.listeners[event] = [] self.listeners[event].append(function)
def add_event_listener(self, event, function)
Add an event listen for a particular event. Depending on the event there may or may not be parameters passed to function. Most escape streams also allow for an empty set of parameters (with a default value). Providing these default values and accepting variable arguments is the responsibility of function. More than one listener may be added for a single event. Each listener will be called. * **event** The event to listen for. * **function** The callable to invoke.
2.679869
2.748874
0.974897
for callback in self.listeners.get(event, []): if len(args) > 0: callback(*args) else: callback()
def dispatch(self, event, *args)
Dispatch an event where `args` is a tuple of the arguments to send to any callbacks. If any callback throws an exception, the subsequent callbacks will be aborted.
3.177264
2.960003
1.073399
if events is not None: events.add_event_listener("print", self._print) events.add_event_listener("backspace", self._backspace) events.add_event_listener("tab", self._tab) events.add_event_listener("linefeed", self._linefeed) events.add_event_listener("reverse-linefeed", self._reverse_linefeed) events.add_event_listener("carriage-return", self._carriage_return) events.add_event_listener("index", self._index) events.add_event_listener("reverse-index", self._reverse_index) events.add_event_listener("store-cursor", self._save_cursor) events.add_event_listener("restore-cursor", self._restore_cursor) events.add_event_listener("cursor-up", self._cursor_up) events.add_event_listener("cursor-down", self._cursor_down) events.add_event_listener("cursor-right", self._cursor_forward) events.add_event_listener("cursor-left", self._cursor_back) events.add_event_listener("cursor-move", self._cursor_position) events.add_event_listener("erase-in-line", self._erase_in_line) events.add_event_listener("erase-in-display", self._erase_in_display) events.add_event_listener("delete-characters", self._delete_character) events.add_event_listener("insert-lines", self._insert_line) events.add_event_listener("delete-lines", self._delete_line) events.add_event_listener("select-graphic-rendition", self._select_graphic_rendition) events.add_event_listener("charset-g0", self._charset_g0) events.add_event_listener("charset-g1", self._charset_g1) events.add_event_listener("shift-in", self._shift_in) events.add_event_listener("shift-out", self._shift_out) events.add_event_listener("bell", self._bell)
def attach(self, events)
Attach this screen to a events that processes commands and dispatches events. Sets up the appropriate event handlers so that the screen will update itself automatically as the events processes data.
1.607422
1.589663
1.011171
rows, cols = shape # Honestly though, you can't trust anyone these days... assert(rows > 0 and cols > 0) # First resize the rows if self.size[0] < rows: # If the current display size is shorter than the requested screen # size, then add rows to the bottom. Note that the old column size # is used here so these new rows will get expanded/contracted as # necessary by the column resize when it happens next. self.display += [u" " * self.size[1]] * (rows - self.size[0]) self.attributes += [[self.default_attributes] * self.size[1]] * \ (rows - self.size[0]) elif self.size[0] > rows: # If the current display size is taller than the requested display, # then take rows off the top. self.display = self.display[self.size[0]-rows:] self.attributes = self.attributes[self.size[0]-rows:] # Next, of course, resize the columns. if self.size[1] < cols: # If the current display size is thinner than the requested size, # expand each row to be the new size. self.display = \ [row + (u" " * (cols - self.size[1])) for row in self.display] self.attributes = \ [row + ([self.default_attributes] * (cols - self.size[1])) for row in self.attributes] elif self.size[1] > cols: # If the current display size is fatter than the requested size, # then trim each row from the right to be the new size. self.display = [row[:cols-self.size[1]] for row in self.display] self.attributes = [row[:cols-self.size[1]] for row in self.attributes] self.size = (rows, cols) return self.size
def resize(self, shape)
Resize the screen. If the requested screen size has more rows than the existing screen, rows will be added at the bottom. If the requested size has less rows than the existing screen rows will be clipped at the top of the screen. Similarly if the existing screen has less columns than the requested size, columns will be added at the right, and it it has more, columns will be clipped at the right.
3.198409
3.075317
1.040026
# Don't make bugs where we try to print a screen. assert len(char) == 1 try: try: # Python 3 char = self.decoder(bytes(char, self.encoding))[0] except TypeError: # Python 2.x char = self.decoder(char)[0] except UnicodeDecodeError: char = "?" if self.current_charset == "g0" and self.g0 is not None: char = char.translate(self.g0) elif self.current_charset == "g1" and self.g1 is not None: char = char.translate(self.g1) row = self.display[self.y] self.display[self.y] = row[:self.x] + char + row[self.x+1:] attrs = self.attributes[self.y] self.attributes[self.y] = attrs[:self.x] + [self.cursor_attributes] + \ attrs[self.x+1:] self.x += 1 if self.x >= self.size[1]: # If this was the last column in a row, move the cursor to the # next row. self._linefeed()
def _print(self, char)
Print a character at the current cursor position and advance the cursor.
3.181571
3.196281
0.995398
if self.y + 1 >= self.size[0]: # If the cursor is currently on the last row, then spawn another # and scroll down (removing the top row). self.display = self.display[1:] + [u" " * self.size[1]] else: # If the cursor is anywhere else, then just move it to the # next line. self.y += 1
def _index(self)
Move the cursor down one row in the same column. If the cursor is at the last row, create a new row at the bottom.
7.178697
5.448712
1.317503
if self.y == 0: # If the cursor is currently at the first row, then scroll the # screen up. self.display = [u" " * self.size[1]] + self.display[:-1] else: # If the cursor is anywhere other than the first row than just move # it up by one row. self.y -= 1
def _reverse_index(self)
Move the cursor up one row in the same column. If the cursor is at the first row, create a new row at the top.
6.455103
4.344891
1.485677
for stop in sorted(self.tabstops): if self.x < stop: return stop return self.size[1] - 1
def _next_tab_stop(self)
Return the x value of the next available tabstop or the x value of the margin if there are no more tabstops.
8.324388
7.487428
1.111782
if len(self.cursor_save_stack): self.x, self.y = self.cursor_save_stack.pop()
def _restore_cursor(self)
Set the current cursor position to whatever cursor is on top of the stack.
5.172084
3.688607
1.402178
trimmed = self.display[:self.y+1] + \ [u" " * self.size[1]] * count + \ self.display[self.y+1:self.y+count+1] self.display = trimmed[:self.size[0]]
def _insert_line(self, count=1)
Inserts lines at line with cursor. Lines displayed below cursor move down. Lines moved past the bottom margin are lost.
4.851087
4.459545
1.087799
self.display = self.display[:self.y] + \ self.display[self.y+1:] self.display.append([u" " * self.size[1]] * count) self.attributes = self.attributes[:self.y] + \ self.attributes[self.y+1:] last_attributes = self.attributes[-1] for _ in xrange(count): self.attributes.append(copy(last_attributes))
def _delete_line(self, count=1)
Deletes count lines, starting at line with cursor. As lines are deleted, lines displayed below cursor move up. Lines added to bottom of screen have spaces with same character attributes as last line moved up.
3.719603
3.360107
1.106989
# First resize the text display row = self.display[self.y] count = min(count, self.size[1] - self.x) row = row[:self.x] + row[self.x+count:] + u" " * count self.display[self.y] = row # Then resize the attribute array too attrs = self.attributes[self.y] attrs = attrs[:self.x] + attrs[self.x+count:] + [self.default_attributes] * count self.attributes[self.y] = attrs
def _delete_character(self, count=1)
Deletes count characters, starting with the character at cursor position. When a character is deleted, all characters to the right of cursor move left.
3.618522
3.444612
1.050488
row = self.display[self.y] attrs = self.attributes[self.y] if type_of == 0: # Erase from the cursor to the end of line, including the cursor row = row[:self.x] + u" " * (self.size[1] - self.x) attrs = attrs[:self.x] + [self.default_attributes] * (self.size[1] - self.x) elif type_of == 1: # Erase from the beginning of the line to the cursor, including it row = u" " * (self.x+1) + row[self.x+1:] attrs = [self.default_attributes] * (self.x+1) + attrs[self.x+1:] elif type_of == 2: # Erase the entire line. row = u" " * self.size[1] attrs = [self.default_attributes] * self.size[1] self.display[self.y] = row self.attributes[self.y] = attrs
def _erase_in_line(self, type_of=0)
Erases the row in a specific way, depending on the type_of.
2.003434
1.962283
1.020971
self.y = min(self.size[0] - 1, self.y + count)
def _cursor_down(self, count=1)
Moves cursor down count lines in same column. Cursor stops at bottom margin.
5.078432
4.677591
1.085694
self.x = min(self.size[1] - 1, self.x + count)
def _cursor_forward(self, count=1)
Moves cursor right count columns. Cursor stops at right margin.
6.970177
5.987589
1.164104
if row == 0: row = 1 if column == 0: column = 1 self.y = min(row - 1, self.size[0] - 1) self.x = min(column - 1, self.size[1] - 1)
def _cursor_position(self, row=0, column=0)
Set the cursor to a specific row and column. Obnoxiously row/column is 1 based, instead of zero based, so we need to compensate. I know I've created bugs in here somehow. Confoundingly, inputs of 0 are still acceptable, and should move to the beginning of the row/column as if they were 1. *sigh*
2.701973
2.358497
1.145633
attr = text[attr] if attr == "reset": self.cursor_attributes = self.default_attributes elif attr == "underline-off": self.cursor_attributes = self._remove_text_attr("underline") elif attr == "blink-off": self.cursor_attributes = self._remove_text_attr("blink") elif attr == "reverse-off": self.cursor_attributes = self._remove_text_attr("reverse") else: self.cursor_attributes = self._add_text_attr(attr)
def _text_attr(self, attr)
Given a text attribute, set the current cursor appropriately.
2.463657
2.158023
1.141627
attr = colors[ground][attr] attrs = self.cursor_attributes if ground == "foreground": self.cursor_attributes = (attrs[0], attr, attrs[2]) elif ground == "background": self.cursor_attributes = (attrs[0], attrs[1], attr)
def _color_attr(self, ground, attr)
Given a color attribute, set the current cursor appropriately.
4.054319
3.403581
1.191192
if attr in text: self._text_attr(attr) elif attr in colors["foreground"]: self._color_attr("foreground", attr) elif attr in colors["background"]: self._color_attr("background", attr)
def _set_attr(self, attr)
Given some text attribute, set the current cursor attributes appropriately.
3.540844
3.287632
1.07702
if len(attrs) == 0: # No arguments means that we're really trying to do a reset. attrs = [0] for attr in attrs: self._set_attr(attr)
def _select_graphic_rendition(self, *attrs)
Set the current text attribute.
6.703663
5.403561
1.240601
out = dict({}) for key in self.keys(): if self._has_delimiter(key): pk, ck = key.split(self._delimiter, 1) if self._has_delimiter(ck): ck = ck.split(self._delimiter, 1)[0] if isinstance(self._values[pk], FlatDict) and pk not in out: out[pk] = dict() if isinstance(self._values[pk][ck], FlatDict): out[pk][ck] = self._values[pk][ck].as_dict() else: out[pk][ck] = self._values[pk][ck] else: out[key] = self._values[key] return out
def as_dict(self)
Return the :class:`~flatdict.FlatDict` as a :class:`dict` :rtype: dict
2.399121
2.290485
1.047429
keys = [] for key, value in self._values.items(): if isinstance(value, (FlatDict, dict)): nested = [self._delimiter.join([key, k]) for k in value.keys()] keys += nested if nested else [key] else: keys.append(key) return sorted(keys)
def keys(self)
Return a copy of the flat dictionary's list of keys. See the note for :meth:`flatdict.FlatDict.items`. :rtype: list
3.53647
4.008951
0.882144
if key not in self and default != NO_DEFAULT: return default value = self[key] self.__delitem__(key) return value
def pop(self, key, default=NO_DEFAULT)
If key is in the flat dictionary, remove it and return its value, else return default. If default is not given and key is not in the dictionary, :exc:`KeyError` is raised. :param mixed key: The key name :param mixed default: The default value :rtype: mixed
3.139938
3.677162
0.853903
if key not in self or not self.__getitem__(key): self.__setitem__(key, default) return self.__getitem__(key)
def setdefault(self, key, default)
If key is in the flat dictionary, return its value. If not, insert key with a value of default and return default. default defaults to ``None``. :param mixed key: The key name :param mixed default: The default value :rtype: mixed
3.146462
3.744535
0.840281
for key in self.keys(): if delimiter in key: raise ValueError('Key {!r} collides with delimiter {!r}', key, delimiter) self._delimiter = delimiter for key in self._values.keys(): if isinstance(self._values[key], FlatDict): self._values[key].set_delimiter(delimiter)
def set_delimiter(self, delimiter)
Override the default or passed in delimiter with a new value. If the requested delimiter already exists in a key, a :exc:`ValueError` will be raised. :param str delimiter: The delimiter to use :raises: ValueError
3.694202
3.88234
0.95154
out = dict({}) for key in self.keys(): if self._has_delimiter(key): pk, ck = key.split(self._delimiter, 1) if self._has_delimiter(ck): ck = ck.split(self._delimiter, 1)[0] if isinstance(self._values[pk], FlatterDict) and pk not in out: out[pk] = dict() if isinstance(self._values[pk][ck], FlatterDict): if self._values[pk][ck].original_type == tuple: out[pk][ck] = tuple(self._child_as_list(pk, ck)) elif self._values[pk][ck].original_type == list: out[pk][ck] = self._child_as_list(pk, ck) elif self._values[pk][ck].original_type == set: out[pk][ck] = set(self._child_as_list(pk, ck)) elif self._values[pk][ck].original_type == dict: out[pk][ck] = self._values[pk][ck].as_dict() else: out[pk][ck] = self._values[pk][ck] else: out[key] = self._values[key] return out
def as_dict(self)
Return the :class:`~flatdict.FlatterDict` as a nested :class:`dict`. :rtype: dict
2.126483
1.991951
1.067538
return [self._values[pk][ck][k] for k in sorted(self._values[pk][ck].keys(), key=lambda x: int(x))]
def _child_as_list(self, pk, ck)
Returns a list of values from the child FlatterDict instance with string based integer keys. :param str pk: The parent key :param str ck: The child key :rtype: list
3.944905
4.052716
0.973398
return ( SQL.forwards_relation(self.src, self.rel) if self.dst is None else SQL.inverse_relation(self.dst, self.rel) )
def gen_query(self)
Generate an SQL query for the edge object.
8.482893
6.802137
1.247092
return Query( db=self.db, sql=(statement,) if replace else self.sql + (statement,), params=self.params + params, )
def derived(self, statement, params=(), replace=False)
Returns a new query object set up correctly with the *statement* and *params* appended to the end of the new instance's internal query and params, along with the current instance's connection. :param statement: The SQL query string to append. :param params: The parameters to append. :param replace: Whether to replace the entire SQL query.
5.269507
5.983434
0.880683
query = self.statement rel, dst = edge.rel, edge.dst statement, params = ( SQL.compound_fwd_query(query, rel) if dst is None else SQL.compound_inv_query(query, rel, dst) ) return self.derived(statement, params, replace=True)
def traverse(self, edge)
Traverse the graph, and selecting the destination nodes for a particular relation that the selected nodes are a source of, i.e. select the friends of my friends. You can traverse indefinitely. :param edge: The edge query. If the edge's destination node is specified then the source nodes will be selected.
8.336209
9.789317
0.851562
with closing(self.db.cursor()) as cursor: for table in graphs: cursor.execute(SQL.CREATE_TABLE % (table)) for index in SQL.INDEXES: cursor.execute(index % (table)) self.db.commit()
def setup_sql(self, graphs)
Sets up the SQL tables for the graph object, and creates indexes as well. :param graphs: The graphs to create.
3.442319
3.492823
0.985541
smt = 'DELETE FROM %s' % rel queries = [] params = [] if src is not None: queries.append('src = ?') params.append(src) if dst is not None: queries.append('dst = ?') params.append(dst) if not queries: return smt, params smt = '%s WHERE %s' % (smt, ' AND '.join(queries)) return smt, params
def remove(src, rel, dst)
Returns an SQL statement that removes edges from the SQL backing store. Either `src` or `dst` may be specified, even both. :param src: The source node. :param rel: The relation. :param dst: The destination node.
2.478147
2.61944
0.94606
offset = lower or 0 lim = (upper - offset) if upper else -1 smt = 'LIMIT %d OFFSET %d' % (lim, offset) return smt, ()
def limit(lower, upper)
Returns a SQlite-compliant LIMIT statement that takes the *lower* and *upper* bounds into account. :param lower: The lower bound. :param upper: The upper bound.
6.080163
9.210752
0.660116
with self.db: with closing(self.db.cursor()) as cursor: cursor.execute('BEGIN TRANSACTION') self._perform_ops(cursor)
def perform_ops(self)
Performs the stored operations on the database connection.
4.126709
3.448261
1.196751
url = urllib3.util.parse_url(url) if url.host: kwargs.setdefault('host', url.host) if url.port: kwargs.setdefault('port', url.port) if url.scheme == 'https': kwargs.setdefault('connection_class', urllib3.HTTPSConnectionPool) return cls(**kwargs)
def from_url(cls, url, **kwargs)
Create a client from a url.
2.513372
2.214062
1.135186
if self.state == DISCONNECTED: raise errors.NSQException('connection already closed') if self.is_connected: return stream = Stream(self.address, self.port, self.timeout) stream.connect() self.stream = stream self.state = CONNECTED self.send(nsq.MAGIC_V2)
def connect(self)
Initialize connection to the nsqd.
4.857443
4.293814
1.131266
if not self.is_connected: return self.stream.close() self.state = DISCONNECTED self.on_close.send(self)
def close_stream(self)
Close the underlying socket.
5.698748
4.778883
1.192485
response = self._read_response() frame, data = nsq.unpack_response(response) self.last_response = time.time() if frame not in self._frame_handlers: raise errors.NSQFrameError('unknown frame {}'.format(frame)) frame_handler = self._frame_handlers[frame] processed_data = frame_handler(data) return frame, processed_data
def read_response(self)
Read an individual response from nsqd. :returns: tuple of the frame type and the processed data.
4.230399
3.526681
1.199541
self.send(nsq.identify({ # nsqd 0.2.28+ 'client_id': self.client_id, 'hostname': self.hostname, # nsqd 0.2.19+ 'feature_negotiation': True, 'heartbeat_interval': self.heartbeat_interval, # nsqd 0.2.21+ 'output_buffer_size': self.output_buffer_size, 'output_buffer_timeout': self.output_buffer_timeout, # nsqd 0.2.22+ 'tls_v1': self.tls_v1, # nsqd 0.2.23+ 'snappy': self.snappy, 'deflate': self.deflate, 'deflate_level': self.deflate_level, # nsqd nsqd 0.2.25+ 'sample_rate': self.sample_rate, 'user_agent': self.user_agent, })) frame, data = self.read_response() if frame == nsq.FRAME_TYPE_ERROR: raise data if data == nsq.OK: return try: data = json.loads(data.decode('utf-8')) except ValueError: self.close_stream() raise errors.NSQException( 'failed to parse IDENTIFY response JSON from nsqd: ' '{!r}'.format(data)) self.max_ready_count = data.get('max_rdy_count', self.max_ready_count) if self.tls_v1 and data.get('tls_v1'): self.upgrade_to_tls() if self.snappy and data.get('snappy'): self.upgrade_to_snappy() elif self.deflate and data.get('deflate'): self.deflate_level = data.get('deflate_level', self.deflate_level) self.upgrade_to_defalte() if self.auth_secret and data.get('auth_required'): self.auth() return data
def identify(self)
Update client metadata on the server and negotiate features. :returns: nsqd response data if there was feature negotiation, otherwise ``None``
2.694986
2.533814
1.063609
self.send(nsq.auth(self.auth_secret)) frame, data = self.read_response() if frame == nsq.FRAME_TYPE_ERROR: raise data try: response = json.loads(data.decode('utf-8')) except ValueError: self.close_stream() raise errors.NSQException( 'failed to parse AUTH response JSON from nsqd: ' '{!r}'.format(data)) self.on_auth.send(self, response=response) return response
def auth(self)
Send authorization secret to nsqd.
5.295287
4.231578
1.251374
self.send(nsq.subscribe(topic, channel))
def subscribe(self, topic, channel)
Subscribe to a nsq `topic` and `channel`.
17.903786
7.504282
2.385809
if defer is None: self.send(nsq.publish(topic, data)) else: self.send(nsq.deferpublish(topic, data, defer))
def publish(self, topic, data, defer=None)
Publish a message to the given topic over tcp. :param topic: the topic to publish to :param data: bytestring data to publish :param defer: duration in milliseconds to defer before publishing (requires nsq 0.3.6)
4.170641
3.748152
1.112719
self.send(nsq.multipublish(topic, messages))
def multipublish(self, topic, messages)
Publish an iterable of messages to the given topic over http. :param topic: the topic to publish to :param messages: iterable of bytestrings to publish
14.812643
23.804121
0.622272
self.ready_count = count self.send(nsq.ready(count))
def ready(self, count)
Indicate you are ready to receive ``count`` messages.
12.666507
11.020093
1.149401
self.send(nsq.finish(message_id)) self.finish_inflight() self.on_finish.send(self, message_id=message_id)
def finish(self, message_id)
Finish a message (indicate successful processing).
6.085169
6.07461
1.001738
self.send(nsq.requeue(message_id, timeout)) self.finish_inflight() self.on_requeue.send( self, message_id=message_id, timeout=timeout, backoff=backoff )
def requeue(self, message_id, timeout=0, backoff=True)
Re-queue a message (indicate failure to process).
5.003385
4.799942
1.042384
nsq.assert_valid_topic_name(topic) fields = {'topic': topic} if defer is not None: fields['defer'] = '{}'.format(defer) return self._request('POST', '/pub', fields=fields, body=data)
def publish(self, topic, data, defer=None)
Publish a message to the given topic over http. :param topic: the topic to publish to :param data: bytestring data to publish :param defer: duration in millisconds to defer before publishing (requires nsq 0.3.6)
4.922667
4.524709
1.087952
nsq.assert_valid_topic_name(topic) fields = {'topic': topic} if binary: fields['binary'] = 'true' body = nsq.multipublish_body(messages) else: body = b'\n'.join(self._validate_mpub_message(m) for m in messages) return self._request('POST', '/mpub', fields=fields, body=body)
def multipublish(self, topic, messages, binary=False)
Publish an iterable of messages to the given topic over http. :param topic: the topic to publish to :param messages: iterable of bytestrings to publish :param binary: enable binary mode. defaults to False (requires nsq 1.0.0) By default multipublish expects messages to be delimited by ``"\\n"``, use the binary flag to enable binary mode where the POST body is expected to be in the following wire protocol format.
4.928522
4.401577
1.119717
nsq.assert_valid_topic_name(topic) return self._request('POST', '/topic/create', fields={'topic': topic})
def create_topic(self, topic)
Create a topic.
8.117567
7.394241
1.097823
nsq.assert_valid_topic_name(topic) return self._request('POST', '/topic/delete', fields={'topic': topic})
def delete_topic(self, topic)
Delete a topic.
9.111859
7.551358
1.206652
nsq.assert_valid_topic_name(topic) return self._request('POST', '/topic/empty', fields={'topic': topic})
def empty_topic(self, topic)
Empty all the queued messages for an existing topic.
8.538266
7.977348
1.070314
nsq.assert_valid_topic_name(topic) nsq.assert_valid_channel_name(channel) return self._request('POST', '/channel/empty', fields={'topic': topic, 'channel': channel})
def empty_channel(self, topic, channel)
Empty all the queued messages for an existing channel.
4.186464
3.984723
1.050628
nsq.assert_valid_topic_name(topic) return self._request('POST', '/topic/pause', fields={'topic': topic})
def pause_topic(self, topic)
Pause message flow to all channels on an existing topic. Messages will queue at topic.
9.36878
9.742542
0.961636
nsq.assert_valid_topic_name(topic) return self._request('POST', '/topic/unpause', fields={'topic': topic})
def unpause_topic(self, topic)
Resume message flow to channels of an existing, paused, topic.
8.658853
7.206508
1.201532
if text: fields = {'format': 'text'} else: fields = {'format': 'json'} if topic: nsq.assert_valid_topic_name(topic) fields['topic'] = topic if channel: nsq.assert_valid_channel_name(channel) fields['channel'] = channel return self._request('GET', '/stats', fields=fields)
def stats(self, topic=None, channel=None, text=False)
Return internal instrumented statistics. :param topic: (optional) filter to topic :param channel: (optional) filter to channel :param text: return the stats as a string (default: ``False``)
2.692087
3.084426
0.8728
return self.__tcp_client.publish(topic, data, **kwargs)
def publish_tcp(self, topic, data, **kwargs)
Use :meth:`NsqdTCPClient.publish` instead. .. deprecated:: 1.0.0
5.333361
7.175383
0.743286
self.__http_client.publish(topic, data, **kwargs)
def publish_http(self, topic, data, **kwargs)
Use :meth:`NsqdHTTPClient.publish` instead. .. deprecated:: 1.0.0
4.9606
6.214271
0.798259
return self.__tcp_client.multipublish(topic, messages, **kwargs)
def multipublish_tcp(self, topic, messages, **kwargs)
Use :meth:`NsqdTCPClient.multipublish` instead. .. deprecated:: 1.0.0
5.318666
6.081954
0.874499
return self.__http_client.multipublish(topic, messages, **kwargs)
def multipublish_http(self, topic, messages, **kwargs)
Use :meth:`NsqdHTTPClient.multipublish` instead. .. deprecated:: 1.0.0
4.968454
5.208436
0.953925
if self._state == CLOSED: raise NSQException('producer already closed') if self.is_running: self.logger.warn('producer already started') return self.logger.debug('starting producer...') self._state = RUNNING for address in self.nsqd_tcp_addresses: address, port = address.split(':') self.connect_to_nsqd(address, int(port))
def start(self)
Start discovering and listing to connections.
4.139413
3.853991
1.074059
if not self.is_running: return self._state = CLOSED self.logger.debug('closing connection(s)') while True: try: conn = self._connections.get(block=False) except Empty: break conn.close_stream() self.on_close.send(self)
def close(self)
Immediately close all connections and stop workers.
4.61198
4.044906
1.140195
self._workers.join(timeout, raise_error)
def join(self, timeout=None, raise_error=False)
Block until all connections have closed and workers stopped.
13.472239
5.574904
2.416587
result = AsyncResult() conn = self._get_connection(block=block, timeout=timeout) try: self._response_queues[conn].append(result) conn.publish(topic, data, defer=defer) finally: self._put_connection(conn) if raise_error: return result.get() return result
def publish(self, topic, data, defer=None, block=True, timeout=None, raise_error=True)
Publish a message to the given topic. :param topic: the topic to publish to :param data: bytestring data to publish :param defer: duration in milliseconds to defer before publishing (requires nsq 0.3.6) :param block: wait for a connection to become available before publishing the message. If block is `False` and no connections are available, :class:`~gnsq.errors.NSQNoConnections` is raised :param timeout: if timeout is a positive number, it blocks at most ``timeout`` seconds before raising :class:`~gnsq.errors.NSQNoConnections` :param raise_error: if ``True``, it blocks until a response is received from the nsqd server, and any error response is raised. Otherwise an :class:`~gevent.event.AsyncResult` is returned
3.546149
3.67052
0.966116
result = AsyncResult() conn = self._get_connection(block=block, timeout=timeout) try: self._response_queues[conn].append(result) conn.multipublish(topic, messages) finally: self._put_connection(conn) if raise_error: return result.get() return result
def multipublish(self, topic, messages, block=True, timeout=None, raise_error=True)
Publish an iterable of messages to the given topic. :param topic: the topic to publish to :param messages: iterable of bytestrings to publish :param block: wait for a connection to become available before publishing the message. If block is `False` and no connections are available, :class:`~gnsq.errors.NSQNoConnections` is raised :param timeout: if timeout is a positive number, it blocks at most ``timeout`` seconds before raising :class:`~gnsq.errors.NSQNoConnections` :param raise_error: if ``True``, it blocks until a response is received from the nsqd server, and any error response is raised. Otherwise an :class:`~gevent.event.AsyncResult` is returned
3.67506
3.924872
0.936351
if self._has_responded: raise NSQException('already responded') self._has_responded = True self.on_finish.send(self)
def finish(self)
Respond to nsqd that you’ve processed this message successfully (or would like to silently discard it).
6.743006
4.968858
1.357054
if self._has_responded: raise NSQException('already responded') self._has_responded = True self.on_requeue.send(self, timeout=time_ms, backoff=backoff)
def requeue(self, time_ms=0, backoff=True)
Respond to nsqd that you’ve failed to process this message successfully (and would like it to be requeued).
5.020082
4.519213
1.110831
nsq.assert_valid_topic_name(topic) return self._request('GET', '/lookup', fields={'topic': topic})
def lookup(self, topic)
Returns producers for a topic.
9.409066
8.312818
1.131874
nsq.assert_valid_topic_name(topic) return self._request('GET', '/channels', fields={'topic': topic})
def channels(self, topic)
Returns all known channels of a topic.
9.557205
7.786727
1.227371
nsq.assert_valid_topic_name(topic) return self._request('POST', '/topic/tombstone', fields={'topic': topic, 'node': node})
def tombstone_topic(self, topic, node)
Tombstones a specific producer of an existing topic.
6.308685
5.538874
1.138983
if self._state == INIT: if not any(self.on_message.receivers_for(blinker.ANY)): raise RuntimeError('no receivers connected to on_message') self.logger.debug('starting %s...', self.name) self._state = RUNNING self.query_nsqd() if self.lookupds: self.query_lookupd() self._killables.add(self._workers.spawn(self._poll_lookupd)) self._killables.add(self._workers.spawn(self._poll_ready)) else: self.logger.warn('%s already started', self.name) if block: self.join()
def start(self, block=True)
Start discovering and listing to connections.
5.629911
5.517345
1.020402
if not self.is_running: return self._state = CLOSED self.logger.debug('killing %d worker(s)', len(self._killables)) self._killables.kill(block=False) self.logger.debug('closing %d connection(s)', len(self._connections)) for conn in self._connections: conn.close_stream() self.on_close.send(self)
def close(self)
Immediately close all connections and stop workers.
4.000921
3.673703
1.08907
if not self.connections: raise NSQNoConnections() conn = random.choice(list(self.connections)) conn.publish(topic, message)
def publish(self, topic, message)
Use :class:`~gnsq.Producer` instead. .. deprecated:: 1.0.0
5.334084
5.458473
0.977212
@functools.wraps(fn) def wrapper(*args, **kwargs): warnings.warn(fn.__doc__.split('\n')[0], category=DeprecationWarning, stacklevel=2) return fn(*args, **kwargs) return wrapper
def deprecated(fn)
Mark a function as deprecated and warn the user on use.
1.795988
1.829571
0.981644
return { lno: _a_if_not_none(instr._stolen_by, instr) for lno, instr in lnotab.items() }
def _new_lnotab(instrs, lnotab)
The updated lnotab after the instructions have been transformed. Parameters ---------- instrs : iterable[Instruction] The new instructions. lnotab : dict[Instruction -> int] The lnotab for the old code object. Returns ------- new_lnotab : dict[Instruction -> int] The post transform lnotab.
10.40419
11.540155
0.901564
return tuple( self.transform(Code.from_pycode(const)).to_pycode() if isinstance(const, CodeType) else const for const in consts )
def transform_consts(self, consts)
transformer for the co_consts field. Override this method to transform the `co_consts` of the code object. Parameters ---------- consts : tuple The co_consts Returns ------- new_consts : tuple The new constants.
7.18805
8.504675
0.845188
# reverse lookups from for constants and names. reversed_consts = {} reversed_names = {} reversed_varnames = {} for instr in code: if isinstance(instr, LOAD_CONST): reversed_consts[instr] = instr.arg if instr.uses_name: reversed_names[instr] = instr.arg if isinstance(instr, (STORE_FAST, LOAD_FAST)): reversed_varnames[instr] = instr.arg instrs, consts = tuple(zip(*reversed_consts.items())) or ((), ()) for instr, const in zip(instrs, self.transform_consts(consts)): instr.arg = const instrs, names = tuple(zip(*reversed_names.items())) or ((), ()) for instr, name_ in zip(instrs, self.transform_names(names)): instr.arg = name_ instrs, varnames = tuple(zip(*reversed_varnames.items())) or ((), ()) for instr, varname in zip(instrs, self.transform_varnames(varnames)): instr.arg = varname with self._new_context(code): post_transform = self.patterndispatcher(code) return Code( post_transform, code.argnames, cellvars=self.transform_cellvars(code.cellvars), freevars=self.transform_freevars(code.freevars), name=name if name is not None else code.name, filename=filename if filename is not None else code.filename, firstlineno=code.firstlineno, lnotab=_new_lnotab(post_transform, code.lnotab), flags=code.flags, )
def transform(self, code, *, name=None, filename=None)
Transform a codetransformer.Code object applying the transforms. Parameters ---------- code : Code The code object to transform. name : str, optional The new name for this code object. filename : str, optional The new filename for this code object. Returns ------- new_code : Code The transformed code object.
2.733959
2.710741
1.008566
def _fmt(node, prefix, level): def with_indent(*strs): return ''.join(((indent * level,) + strs)) with_prefix = partial(with_indent, prefix) if isinstance(node, Name): # Special Case: # Render Name nodes on a single line. yield with_prefix( type(node).__name__, '(id=', repr(node.id), ', ctx=', type(node.ctx).__name__, '()),', ) elif isinstance(node, Num): # Special Case: # Render Num nodes on a single line without names. yield with_prefix( type(node).__name__, '(%r),' % node.n, ) elif isinstance(node, AST): fields_attrs = list( chain( iter_fields(node), iter_attributes(node) if include_attributes else (), ) ) if not fields_attrs: # Special Case: # Render the whole expression on one line if there are no # attributes. yield with_prefix(type(node).__name__, '(),') return yield with_prefix(type(node).__name__, '(') for name, value in fields_attrs: yield from _fmt(value, name + '=', level + 1) # Put a trailing comma if we're not at the top level. yield with_indent(')', ',' if level > 0 else '') elif isinstance(node, list): if not node: # Special Case: # Render empty lists on one line. yield with_prefix('[],') return yield with_prefix('[') yield from chain.from_iterable( map(partial(_fmt, prefix='', level=level + 1), node) ) yield with_indent('],') else: yield with_prefix(repr(node), ',') return '\n'.join(_fmt(node, prefix='', level=0))
def pformat_ast(node, include_attributes=INCLUDE_ATTRIBUTES_DEFAULT, indent=INDENT_DEFAULT)
Pretty-format an AST tree element Parameters ---------- node : ast.AST Top-level node to render. include_attributes : bool, optional Whether to include node attributes. Default False. indent : str, optional. Indentation string for nested expressions. Default is two spaces.
2.98382
2.917113
1.022867
if file is None: file = sys.stdout print( pformat_ast( node, include_attributes=include_attributes, indent=indent ), file=file, )
def pprint_ast(node, include_attributes=INCLUDE_ATTRIBUTES_DEFAULT, indent=INDENT_DEFAULT, file=None)
Pretty-print an AST tree. Parameters ---------- node : ast.AST Top-level node to render. include_attributes : bool, optional Whether to include node attributes. Default False. indent : str, optional. Indentation string for nested expressions. Default is two spaces. file : None or file-like object, optional File to use to print output. If the default of `None` is passed, we use sys.stdout.
2.651577
3.377179
0.785146
name = _prefix + co.co_name yield name, co yield from chain.from_iterable( walk_code(c, _prefix=_extend_name(name, co)) for c in co.co_consts if isinstance(c, CodeType) )
def walk_code(co, _prefix='')
Traverse a code object, finding all consts which are also code objects. Yields pairs of (name, code object).
3.432149
3.798831
0.903475
pprint_ast(parse(text, mode=mode), indent=indent, file=file)
def a(text, mode='exec', indent=' ', file=None)
Interactive convenience for displaying the AST of a code string. Writes a pretty-formatted AST-tree to `file`. Parameters ---------- text : str Text of Python code to render as AST. mode : {'exec', 'eval'}, optional Mode for `ast.parse`. Default is 'exec'. indent : str, optional String to use for indenting nested expressions. Default is two spaces. file : None or file-like object, optional File to use to print output. If the default of `None` is passed, we use sys.stdout.
5.426275
9.312376
0.582695
if file is None: file = sys.stdout for name, co in walk_code(extract_code(obj, compile_mode=mode)): print(name, file=file) print('-' * len(name), file=file) dis.dis(co, file=file) print('', file=file)
def d(obj, mode='exec', file=None)
Interactive convenience for displaying the disassembly of a function, module, or code string. Compiles `text` and recursively traverses the result looking for `code` objects to render with `dis.dis`. Parameters ---------- obj : str, CodeType, or object with __code__ attribute Object to disassemble. If `obj` is an instance of CodeType, we use it unchanged. If `obj` is a string, we compile it with `mode` and then disassemble. Otherwise, we look for a `__code__` attribute on `obj`. mode : {'exec', 'eval'}, optional Mode for `compile`. Default is 'exec'. file : None or file-like object, optional File to use to print output. If the default of `None` is passed, we use sys.stdout.
3.31023
3.404731
0.972244
try: code = obj.__code__ if isinstance(code, CodeType): return code raise ValueError( "{obj} has a `__code__` attribute, " "but it's an instance of {notcode!r}, not CodeType.".format( obj=obj, notcode=type(code).__name__, ) ) except AttributeError: raise ValueError("Don't know how to extract code from %s." % obj)
def extract_code(obj, compile_mode)
Generic function for converting objects into instances of `CodeType`.
3.519889
3.475031
1.012909
if file is None: file = sys.stdout ast_section = StringIO() a(text, mode=mode, file=ast_section) code_section = StringIO() d(text, mode=mode, file=code_section) rendered = _DISPLAY_TEMPLATE.format( text=text, ast=ast_section.getvalue(), code=code_section.getvalue(), ) print(rendered, file=file)
def display(text, mode='exec', file=None)
Show `text`, rendered as AST and as Bytecode. Parameters ---------- text : str Text of Python code to render. mode : {'exec', 'eval'}, optional Mode for `ast.parse` and `compile`. Default is 'exec'. file : None or file-like object, optional File to use to print output. If the default of `None` is passed, we use sys.stdout.
3.189106
3.261183
0.977899
# Emulate standard behavior when match_expr is an exception subclass. if isinstance(match_expr, type) and issubclass(match_expr, BaseException): return issubclass(exc_type, match_expr) # Match on type and args when match_expr is an exception instance. return ( issubclass(exc_type, type(match_expr)) and match_expr.args == exc_value.args )
def match(match_expr, exc_type, exc_value, exc_traceback)
Called to determine whether or not an except block should be matched. True -> enter except block False -> don't enter except block
3.289943
3.356631
0.980132
co = f.__code__ args, kwonly, varargs, varkwargs = paramnames(co) annotations = f.__annotations__ or {} defaults = list(f.__defaults__ or ()) kw_defaults = f.__kwdefaults__ or {} if f.__name__ == '<lambda>': node = ast.Lambda body = pycode_to_body(co, DecompilationContext(in_lambda=True))[0] extra_kwargs = {} else: node = ast.FunctionDef body = pycode_to_body(co, DecompilationContext(in_function_block=True)) extra_kwargs = { 'decorator_list': [], 'returns': annotations.get('return') } return node( name=f.__name__, args=make_function_arguments( args=args, kwonly=kwonly, varargs=varargs, varkwargs=varkwargs, defaults=defaults, kw_defaults=kw_defaults, annotations=annotations, ), body=body, **extra_kwargs )
def decompile(f)
Decompile a function. Parameters ---------- f : function The function to decompile. Returns ------- ast : ast.FunctionDef A FunctionDef node that compiles to f.
3.166505
3.141053
1.008103
code = Code.from_pycode(co) # On each instruction, temporarily store all the jumps to the **next** # instruction. This is used in _make_expr to determine when an expression # is part of a short-circuiting expression. for a, b in sliding_window(2, code.instrs): a._next_target_of = b._target_of b._next_target_of = set() try: body = instrs_to_body(deque(code.instrs), context) if context.in_function_block: return make_global_and_nonlocal_decls(code.instrs) + body return body finally: # Clean up jump target data. for i in code.instrs: del i._next_target_of
def pycode_to_body(co, context)
Convert a Python code object to a list of AST body elements.
6.632893
6.532588
1.015354
stack = [] body = [] process_instrs(instrs, stack, body, context) if stack: raise DecompilationError( "Non-empty stack at the end of instrs_to_body(): %s." % stack ) return body
def instrs_to_body(instrs, context)
Convert a list of Instruction objects to a list of AST body nodes.
4.767778
4.41591
1.079682
next_instr = queue.popleft while queue: newcontext = _process_instr(next_instr(), queue, stack, body, context) if newcontext is not None: context = newcontext
def process_instrs(queue, stack, body, context)
Process instructions from the instruction queue.
4.290635
4.196517
1.022428
test_expr = make_expr(stack) if isinstance(instr, instrs.POP_JUMP_IF_TRUE): test_expr = ast.UnaryOp(op=ast.Not(), operand=test_expr) first_block = popwhile(op.is_not(instr.arg), queue, side='left') if isinstance(first_block[-1], instrs.RETURN_VALUE): body = instrs_to_body(first_block, context) return ast.If(test=test_expr, body=body, orelse=[]) jump_to_end = expect( first_block.pop(), instrs.JUMP_FORWARD, "at end of if-block" ) body = instrs_to_body(first_block, context) # First instruction after the whole if-block. end = jump_to_end.arg if instr.arg is jump_to_end.arg: orelse = [] else: orelse = instrs_to_body( popwhile(op.is_not(end), queue, side='left'), context, ) return ast.If(test=test_expr, body=body, orelse=orelse)
def make_if_statement(instr, queue, stack, context)
Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE.
3.455534
3.298627
1.047568