_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q6000
|
screen._print
|
train
|
def _print(self, char):
"""
Print a character at the current cursor position and advance the
cursor.
"""
# Don't make bugs where we try to print a screen.
assert len(char) == 1
try:
try:
# Python 3
char = self.decoder(bytes(char, self.encoding))[0]
except TypeError:
# Python 2.x
char = self.decoder(char)[0]
except UnicodeDecodeError:
char = "?"
if self.current_charset == "g0" and self.g0 is not None:
char = char.translate(self.g0)
elif self.current_charset == "g1" and self.g1 is not None:
char = char.translate(self.g1)
row = self.display[self.y]
self.display[self.y] = row[:self.x] + char + row[self.x+1:]
attrs = self.attributes[self.y]
self.attributes[self.y] = attrs[:self.x] + [self.cursor_attributes] + \
attrs[self.x+1:]
self.x += 1
if self.x >= self.size[1]:
# If this was the last column in a row, move the cursor to the
# next row.
self._linefeed()
|
python
|
{
"resource": ""
}
|
q6001
|
screen._index
|
train
|
def _index(self):
"""
Move the cursor down one row in the same column. If the cursor is at
the last row, create a new row at the bottom.
"""
if self.y + 1 >= self.size[0]:
# If the cursor is currently on the last row, then spawn another
# and scroll down (removing the top row).
self.display = self.display[1:] + [u" " * self.size[1]]
else:
# If the cursor is anywhere else, then just move it to the
# next line.
self.y += 1
|
python
|
{
"resource": ""
}
|
q6002
|
screen._reverse_index
|
train
|
def _reverse_index(self):
"""
Move the cursor up one row in the same column. If the cursor is at the
first row, create a new row at the top.
"""
if self.y == 0:
# If the cursor is currently at the first row, then scroll the
# screen up.
self.display = [u" " * self.size[1]] + self.display[:-1]
else:
# If the cursor is anywhere other than the first row than just move
# it up by one row.
self.y -= 1
|
python
|
{
"resource": ""
}
|
q6003
|
screen._next_tab_stop
|
train
|
def _next_tab_stop(self):
"""
Return the x value of the next available tabstop or the x value of the
margin if there are no more tabstops.
"""
for stop in sorted(self.tabstops):
if self.x < stop:
return stop
return self.size[1] - 1
|
python
|
{
"resource": ""
}
|
q6004
|
screen._insert_line
|
train
|
def _insert_line(self, count=1):
"""
Inserts lines at line with cursor. Lines displayed below cursor move
down. Lines moved past the bottom margin are lost.
"""
trimmed = self.display[:self.y+1] + \
[u" " * self.size[1]] * count + \
self.display[self.y+1:self.y+count+1]
self.display = trimmed[:self.size[0]]
|
python
|
{
"resource": ""
}
|
q6005
|
screen._delete_line
|
train
|
def _delete_line(self, count=1):
"""
Deletes count lines, starting at line with cursor. As lines are
deleted, lines displayed below cursor move up. Lines added to bottom of
screen have spaces with same character attributes as last line moved
up.
"""
self.display = self.display[:self.y] + \
self.display[self.y+1:]
self.display.append([u" " * self.size[1]] * count)
self.attributes = self.attributes[:self.y] + \
self.attributes[self.y+1:]
last_attributes = self.attributes[-1]
for _ in xrange(count):
self.attributes.append(copy(last_attributes))
|
python
|
{
"resource": ""
}
|
q6006
|
screen._delete_character
|
train
|
def _delete_character(self, count=1):
"""
Deletes count characters, starting with the character at cursor
position. When a character is deleted, all characters to the right
of cursor move left.
"""
# First resize the text display
row = self.display[self.y]
count = min(count, self.size[1] - self.x)
row = row[:self.x] + row[self.x+count:] + u" " * count
self.display[self.y] = row
# Then resize the attribute array too
attrs = self.attributes[self.y]
attrs = attrs[:self.x] + attrs[self.x+count:] + [self.default_attributes] * count
self.attributes[self.y] = attrs
|
python
|
{
"resource": ""
}
|
q6007
|
screen._erase_in_line
|
train
|
def _erase_in_line(self, type_of=0):
"""
Erases the row in a specific way, depending on the type_of.
"""
row = self.display[self.y]
attrs = self.attributes[self.y]
if type_of == 0:
# Erase from the cursor to the end of line, including the cursor
row = row[:self.x] + u" " * (self.size[1] - self.x)
attrs = attrs[:self.x] + [self.default_attributes] * (self.size[1] - self.x)
elif type_of == 1:
# Erase from the beginning of the line to the cursor, including it
row = u" " * (self.x+1) + row[self.x+1:]
attrs = [self.default_attributes] * (self.x+1) + attrs[self.x+1:]
elif type_of == 2:
# Erase the entire line.
row = u" " * self.size[1]
attrs = [self.default_attributes] * self.size[1]
self.display[self.y] = row
self.attributes[self.y] = attrs
|
python
|
{
"resource": ""
}
|
q6008
|
screen._cursor_down
|
train
|
def _cursor_down(self, count=1):
"""
Moves cursor down count lines in same column. Cursor stops at bottom
margin.
"""
self.y = min(self.size[0] - 1, self.y + count)
|
python
|
{
"resource": ""
}
|
q6009
|
screen._cursor_forward
|
train
|
def _cursor_forward(self, count=1):
"""
Moves cursor right count columns. Cursor stops at right margin.
"""
self.x = min(self.size[1] - 1, self.x + count)
|
python
|
{
"resource": ""
}
|
q6010
|
screen._cursor_position
|
train
|
def _cursor_position(self, row=0, column=0):
"""
Set the cursor to a specific row and column.
Obnoxiously row/column is 1 based, instead of zero based, so we need
to compensate. I know I've created bugs in here somehow.
Confoundingly, inputs of 0 are still acceptable, and should move to
the beginning of the row/column as if they were 1. *sigh*
"""
if row == 0:
row = 1
if column == 0:
column = 1
self.y = min(row - 1, self.size[0] - 1)
self.x = min(column - 1, self.size[1] - 1)
|
python
|
{
"resource": ""
}
|
q6011
|
screen._text_attr
|
train
|
def _text_attr(self, attr):
"""
Given a text attribute, set the current cursor appropriately.
"""
attr = text[attr]
if attr == "reset":
self.cursor_attributes = self.default_attributes
elif attr == "underline-off":
self.cursor_attributes = self._remove_text_attr("underline")
elif attr == "blink-off":
self.cursor_attributes = self._remove_text_attr("blink")
elif attr == "reverse-off":
self.cursor_attributes = self._remove_text_attr("reverse")
else:
self.cursor_attributes = self._add_text_attr(attr)
|
python
|
{
"resource": ""
}
|
q6012
|
screen._color_attr
|
train
|
def _color_attr(self, ground, attr):
"""
Given a color attribute, set the current cursor appropriately.
"""
attr = colors[ground][attr]
attrs = self.cursor_attributes
if ground == "foreground":
self.cursor_attributes = (attrs[0], attr, attrs[2])
elif ground == "background":
self.cursor_attributes = (attrs[0], attrs[1], attr)
|
python
|
{
"resource": ""
}
|
q6013
|
screen._set_attr
|
train
|
def _set_attr(self, attr):
"""
Given some text attribute, set the current cursor attributes
appropriately.
"""
if attr in text:
self._text_attr(attr)
elif attr in colors["foreground"]:
self._color_attr("foreground", attr)
elif attr in colors["background"]:
self._color_attr("background", attr)
|
python
|
{
"resource": ""
}
|
q6014
|
screen._select_graphic_rendition
|
train
|
def _select_graphic_rendition(self, *attrs):
"""
Set the current text attribute.
"""
if len(attrs) == 0:
# No arguments means that we're really trying to do a reset.
attrs = [0]
for attr in attrs:
self._set_attr(attr)
|
python
|
{
"resource": ""
}
|
q6015
|
FlatDict.setdefault
|
train
|
def setdefault(self, key, default):
"""If key is in the flat dictionary, return its value. If not,
insert key with a value of default and return default.
default defaults to ``None``.
:param mixed key: The key name
:param mixed default: The default value
:rtype: mixed
"""
if key not in self or not self.__getitem__(key):
self.__setitem__(key, default)
return self.__getitem__(key)
|
python
|
{
"resource": ""
}
|
q6016
|
FlatterDict._child_as_list
|
train
|
def _child_as_list(self, pk, ck):
"""Returns a list of values from the child FlatterDict instance
with string based integer keys.
:param str pk: The parent key
:param str ck: The child key
:rtype: list
"""
return [self._values[pk][ck][k]
for k in sorted(self._values[pk][ck].keys(),
key=lambda x: int(x))]
|
python
|
{
"resource": ""
}
|
q6017
|
V.gen_query
|
train
|
def gen_query(self):
"""
Generate an SQL query for the edge object.
"""
return (
SQL.forwards_relation(self.src, self.rel) if self.dst is None else
SQL.inverse_relation(self.dst, self.rel)
)
|
python
|
{
"resource": ""
}
|
q6018
|
Query.traverse
|
train
|
def traverse(self, edge):
"""
Traverse the graph, and selecting the destination
nodes for a particular relation that the selected
nodes are a source of, i.e. select the friends of
my friends. You can traverse indefinitely.
:param edge: The edge query. If the edge's
destination node is specified then the source
nodes will be selected.
"""
query = self.statement
rel, dst = edge.rel, edge.dst
statement, params = (
SQL.compound_fwd_query(query, rel) if dst is None else
SQL.compound_inv_query(query, rel, dst)
)
return self.derived(statement, params, replace=True)
|
python
|
{
"resource": ""
}
|
q6019
|
Graph.setup_sql
|
train
|
def setup_sql(self, graphs):
"""
Sets up the SQL tables for the graph object,
and creates indexes as well.
:param graphs: The graphs to create.
"""
with closing(self.db.cursor()) as cursor:
for table in graphs:
cursor.execute(SQL.CREATE_TABLE % (table))
for index in SQL.INDEXES:
cursor.execute(index % (table))
self.db.commit()
|
python
|
{
"resource": ""
}
|
q6020
|
remove
|
train
|
def remove(src, rel, dst):
"""
Returns an SQL statement that removes edges from
the SQL backing store. Either `src` or `dst` may
be specified, even both.
:param src: The source node.
:param rel: The relation.
:param dst: The destination node.
"""
smt = 'DELETE FROM %s' % rel
queries = []
params = []
if src is not None:
queries.append('src = ?')
params.append(src)
if dst is not None:
queries.append('dst = ?')
params.append(dst)
if not queries:
return smt, params
smt = '%s WHERE %s' % (smt, ' AND '.join(queries))
return smt, params
|
python
|
{
"resource": ""
}
|
q6021
|
Transaction.perform_ops
|
train
|
def perform_ops(self):
"""
Performs the stored operations on the database
connection.
"""
with self.db:
with closing(self.db.cursor()) as cursor:
cursor.execute('BEGIN TRANSACTION')
self._perform_ops(cursor)
|
python
|
{
"resource": ""
}
|
q6022
|
HTTPClient.from_url
|
train
|
def from_url(cls, url, **kwargs):
"""Create a client from a url."""
url = urllib3.util.parse_url(url)
if url.host:
kwargs.setdefault('host', url.host)
if url.port:
kwargs.setdefault('port', url.port)
if url.scheme == 'https':
kwargs.setdefault('connection_class', urllib3.HTTPSConnectionPool)
return cls(**kwargs)
|
python
|
{
"resource": ""
}
|
q6023
|
NsqdTCPClient.connect
|
train
|
def connect(self):
"""Initialize connection to the nsqd."""
if self.state == DISCONNECTED:
raise errors.NSQException('connection already closed')
if self.is_connected:
return
stream = Stream(self.address, self.port, self.timeout)
stream.connect()
self.stream = stream
self.state = CONNECTED
self.send(nsq.MAGIC_V2)
|
python
|
{
"resource": ""
}
|
q6024
|
NsqdTCPClient.close_stream
|
train
|
def close_stream(self):
"""Close the underlying socket."""
if not self.is_connected:
return
self.stream.close()
self.state = DISCONNECTED
self.on_close.send(self)
|
python
|
{
"resource": ""
}
|
q6025
|
NsqdTCPClient.read_response
|
train
|
def read_response(self):
"""Read an individual response from nsqd.
:returns: tuple of the frame type and the processed data.
"""
response = self._read_response()
frame, data = nsq.unpack_response(response)
self.last_response = time.time()
if frame not in self._frame_handlers:
raise errors.NSQFrameError('unknown frame {}'.format(frame))
frame_handler = self._frame_handlers[frame]
processed_data = frame_handler(data)
return frame, processed_data
|
python
|
{
"resource": ""
}
|
q6026
|
NsqdTCPClient.identify
|
train
|
def identify(self):
"""Update client metadata on the server and negotiate features.
:returns: nsqd response data if there was feature negotiation,
otherwise ``None``
"""
self.send(nsq.identify({
# nsqd 0.2.28+
'client_id': self.client_id,
'hostname': self.hostname,
# nsqd 0.2.19+
'feature_negotiation': True,
'heartbeat_interval': self.heartbeat_interval,
# nsqd 0.2.21+
'output_buffer_size': self.output_buffer_size,
'output_buffer_timeout': self.output_buffer_timeout,
# nsqd 0.2.22+
'tls_v1': self.tls_v1,
# nsqd 0.2.23+
'snappy': self.snappy,
'deflate': self.deflate,
'deflate_level': self.deflate_level,
# nsqd nsqd 0.2.25+
'sample_rate': self.sample_rate,
'user_agent': self.user_agent,
}))
frame, data = self.read_response()
if frame == nsq.FRAME_TYPE_ERROR:
raise data
if data == nsq.OK:
return
try:
data = json.loads(data.decode('utf-8'))
except ValueError:
self.close_stream()
raise errors.NSQException(
'failed to parse IDENTIFY response JSON from nsqd: '
'{!r}'.format(data))
self.max_ready_count = data.get('max_rdy_count', self.max_ready_count)
if self.tls_v1 and data.get('tls_v1'):
self.upgrade_to_tls()
if self.snappy and data.get('snappy'):
self.upgrade_to_snappy()
elif self.deflate and data.get('deflate'):
self.deflate_level = data.get('deflate_level', self.deflate_level)
self.upgrade_to_defalte()
if self.auth_secret and data.get('auth_required'):
self.auth()
return data
|
python
|
{
"resource": ""
}
|
q6027
|
NsqdTCPClient.auth
|
train
|
def auth(self):
"""Send authorization secret to nsqd."""
self.send(nsq.auth(self.auth_secret))
frame, data = self.read_response()
if frame == nsq.FRAME_TYPE_ERROR:
raise data
try:
response = json.loads(data.decode('utf-8'))
except ValueError:
self.close_stream()
raise errors.NSQException(
'failed to parse AUTH response JSON from nsqd: '
'{!r}'.format(data))
self.on_auth.send(self, response=response)
return response
|
python
|
{
"resource": ""
}
|
q6028
|
NsqdTCPClient.subscribe
|
train
|
def subscribe(self, topic, channel):
"""Subscribe to a nsq `topic` and `channel`."""
self.send(nsq.subscribe(topic, channel))
|
python
|
{
"resource": ""
}
|
q6029
|
NsqdTCPClient.publish
|
train
|
def publish(self, topic, data, defer=None):
"""Publish a message to the given topic over tcp.
:param topic: the topic to publish to
:param data: bytestring data to publish
:param defer: duration in milliseconds to defer before publishing
(requires nsq 0.3.6)
"""
if defer is None:
self.send(nsq.publish(topic, data))
else:
self.send(nsq.deferpublish(topic, data, defer))
|
python
|
{
"resource": ""
}
|
q6030
|
NsqdTCPClient.ready
|
train
|
def ready(self, count):
"""Indicate you are ready to receive ``count`` messages."""
self.ready_count = count
self.send(nsq.ready(count))
|
python
|
{
"resource": ""
}
|
q6031
|
NsqdHTTPClient.publish
|
train
|
def publish(self, topic, data, defer=None):
"""Publish a message to the given topic over http.
:param topic: the topic to publish to
:param data: bytestring data to publish
:param defer: duration in millisconds to defer before publishing
(requires nsq 0.3.6)
"""
nsq.assert_valid_topic_name(topic)
fields = {'topic': topic}
if defer is not None:
fields['defer'] = '{}'.format(defer)
return self._request('POST', '/pub', fields=fields, body=data)
|
python
|
{
"resource": ""
}
|
q6032
|
NsqdHTTPClient.create_topic
|
train
|
def create_topic(self, topic):
"""Create a topic."""
nsq.assert_valid_topic_name(topic)
return self._request('POST', '/topic/create', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6033
|
NsqdHTTPClient.delete_topic
|
train
|
def delete_topic(self, topic):
"""Delete a topic."""
nsq.assert_valid_topic_name(topic)
return self._request('POST', '/topic/delete', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6034
|
NsqdHTTPClient.empty_topic
|
train
|
def empty_topic(self, topic):
"""Empty all the queued messages for an existing topic."""
nsq.assert_valid_topic_name(topic)
return self._request('POST', '/topic/empty', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6035
|
NsqdHTTPClient.empty_channel
|
train
|
def empty_channel(self, topic, channel):
"""Empty all the queued messages for an existing channel."""
nsq.assert_valid_topic_name(topic)
nsq.assert_valid_channel_name(channel)
return self._request('POST', '/channel/empty',
fields={'topic': topic, 'channel': channel})
|
python
|
{
"resource": ""
}
|
q6036
|
NsqdHTTPClient.pause_topic
|
train
|
def pause_topic(self, topic):
"""Pause message flow to all channels on an existing topic.
Messages will queue at topic.
"""
nsq.assert_valid_topic_name(topic)
return self._request('POST', '/topic/pause', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6037
|
NsqdHTTPClient.unpause_topic
|
train
|
def unpause_topic(self, topic):
"""Resume message flow to channels of an existing, paused, topic."""
nsq.assert_valid_topic_name(topic)
return self._request('POST', '/topic/unpause', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6038
|
NsqdHTTPClient.stats
|
train
|
def stats(self, topic=None, channel=None, text=False):
"""Return internal instrumented statistics.
:param topic: (optional) filter to topic
:param channel: (optional) filter to channel
:param text: return the stats as a string (default: ``False``)
"""
if text:
fields = {'format': 'text'}
else:
fields = {'format': 'json'}
if topic:
nsq.assert_valid_topic_name(topic)
fields['topic'] = topic
if channel:
nsq.assert_valid_channel_name(channel)
fields['channel'] = channel
return self._request('GET', '/stats', fields=fields)
|
python
|
{
"resource": ""
}
|
q6039
|
Producer.join
|
train
|
def join(self, timeout=None, raise_error=False):
"""Block until all connections have closed and workers stopped."""
self._workers.join(timeout, raise_error)
|
python
|
{
"resource": ""
}
|
q6040
|
Producer.publish
|
train
|
def publish(self, topic, data, defer=None, block=True, timeout=None,
raise_error=True):
"""Publish a message to the given topic.
:param topic: the topic to publish to
:param data: bytestring data to publish
:param defer: duration in milliseconds to defer before publishing
(requires nsq 0.3.6)
:param block: wait for a connection to become available before
publishing the message. If block is `False` and no connections
are available, :class:`~gnsq.errors.NSQNoConnections` is raised
:param timeout: if timeout is a positive number, it blocks at most
``timeout`` seconds before raising
:class:`~gnsq.errors.NSQNoConnections`
:param raise_error: if ``True``, it blocks until a response is received
from the nsqd server, and any error response is raised. Otherwise
an :class:`~gevent.event.AsyncResult` is returned
"""
result = AsyncResult()
conn = self._get_connection(block=block, timeout=timeout)
try:
self._response_queues[conn].append(result)
conn.publish(topic, data, defer=defer)
finally:
self._put_connection(conn)
if raise_error:
return result.get()
return result
|
python
|
{
"resource": ""
}
|
q6041
|
Producer.multipublish
|
train
|
def multipublish(self, topic, messages, block=True, timeout=None,
raise_error=True):
"""Publish an iterable of messages to the given topic.
:param topic: the topic to publish to
:param messages: iterable of bytestrings to publish
:param block: wait for a connection to become available before
publishing the message. If block is `False` and no connections
are available, :class:`~gnsq.errors.NSQNoConnections` is raised
:param timeout: if timeout is a positive number, it blocks at most
``timeout`` seconds before raising
:class:`~gnsq.errors.NSQNoConnections`
:param raise_error: if ``True``, it blocks until a response is received
from the nsqd server, and any error response is raised. Otherwise
an :class:`~gevent.event.AsyncResult` is returned
"""
result = AsyncResult()
conn = self._get_connection(block=block, timeout=timeout)
try:
self._response_queues[conn].append(result)
conn.multipublish(topic, messages)
finally:
self._put_connection(conn)
if raise_error:
return result.get()
return result
|
python
|
{
"resource": ""
}
|
q6042
|
LookupdClient.lookup
|
train
|
def lookup(self, topic):
"""Returns producers for a topic."""
nsq.assert_valid_topic_name(topic)
return self._request('GET', '/lookup', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6043
|
LookupdClient.channels
|
train
|
def channels(self, topic):
"""Returns all known channels of a topic."""
nsq.assert_valid_topic_name(topic)
return self._request('GET', '/channels', fields={'topic': topic})
|
python
|
{
"resource": ""
}
|
q6044
|
LookupdClient.tombstone_topic
|
train
|
def tombstone_topic(self, topic, node):
"""Tombstones a specific producer of an existing topic."""
nsq.assert_valid_topic_name(topic)
return self._request('POST', '/topic/tombstone',
fields={'topic': topic, 'node': node})
|
python
|
{
"resource": ""
}
|
q6045
|
deprecated
|
train
|
def deprecated(fn):
"""Mark a function as deprecated and warn the user on use."""
@functools.wraps(fn)
def wrapper(*args, **kwargs):
warnings.warn(fn.__doc__.split('\n')[0],
category=DeprecationWarning, stacklevel=2)
return fn(*args, **kwargs)
return wrapper
|
python
|
{
"resource": ""
}
|
q6046
|
_new_lnotab
|
train
|
def _new_lnotab(instrs, lnotab):
"""The updated lnotab after the instructions have been transformed.
Parameters
----------
instrs : iterable[Instruction]
The new instructions.
lnotab : dict[Instruction -> int]
The lnotab for the old code object.
Returns
-------
new_lnotab : dict[Instruction -> int]
The post transform lnotab.
"""
return {
lno: _a_if_not_none(instr._stolen_by, instr)
for lno, instr in lnotab.items()
}
|
python
|
{
"resource": ""
}
|
q6047
|
CodeTransformer.transform_consts
|
train
|
def transform_consts(self, consts):
"""transformer for the co_consts field.
Override this method to transform the `co_consts` of the code object.
Parameters
----------
consts : tuple
The co_consts
Returns
-------
new_consts : tuple
The new constants.
"""
return tuple(
self.transform(Code.from_pycode(const)).to_pycode()
if isinstance(const, CodeType) else
const
for const in consts
)
|
python
|
{
"resource": ""
}
|
q6048
|
CodeTransformer.transform
|
train
|
def transform(self, code, *, name=None, filename=None):
"""Transform a codetransformer.Code object applying the transforms.
Parameters
----------
code : Code
The code object to transform.
name : str, optional
The new name for this code object.
filename : str, optional
The new filename for this code object.
Returns
-------
new_code : Code
The transformed code object.
"""
# reverse lookups from for constants and names.
reversed_consts = {}
reversed_names = {}
reversed_varnames = {}
for instr in code:
if isinstance(instr, LOAD_CONST):
reversed_consts[instr] = instr.arg
if instr.uses_name:
reversed_names[instr] = instr.arg
if isinstance(instr, (STORE_FAST, LOAD_FAST)):
reversed_varnames[instr] = instr.arg
instrs, consts = tuple(zip(*reversed_consts.items())) or ((), ())
for instr, const in zip(instrs, self.transform_consts(consts)):
instr.arg = const
instrs, names = tuple(zip(*reversed_names.items())) or ((), ())
for instr, name_ in zip(instrs, self.transform_names(names)):
instr.arg = name_
instrs, varnames = tuple(zip(*reversed_varnames.items())) or ((), ())
for instr, varname in zip(instrs, self.transform_varnames(varnames)):
instr.arg = varname
with self._new_context(code):
post_transform = self.patterndispatcher(code)
return Code(
post_transform,
code.argnames,
cellvars=self.transform_cellvars(code.cellvars),
freevars=self.transform_freevars(code.freevars),
name=name if name is not None else code.name,
filename=filename if filename is not None else code.filename,
firstlineno=code.firstlineno,
lnotab=_new_lnotab(post_transform, code.lnotab),
flags=code.flags,
)
|
python
|
{
"resource": ""
}
|
q6049
|
pformat_ast
|
train
|
def pformat_ast(node,
include_attributes=INCLUDE_ATTRIBUTES_DEFAULT,
indent=INDENT_DEFAULT):
"""
Pretty-format an AST tree element
Parameters
----------
node : ast.AST
Top-level node to render.
include_attributes : bool, optional
Whether to include node attributes. Default False.
indent : str, optional.
Indentation string for nested expressions. Default is two spaces.
"""
def _fmt(node, prefix, level):
def with_indent(*strs):
return ''.join(((indent * level,) + strs))
with_prefix = partial(with_indent, prefix)
if isinstance(node, Name):
# Special Case:
# Render Name nodes on a single line.
yield with_prefix(
type(node).__name__,
'(id=',
repr(node.id),
', ctx=',
type(node.ctx).__name__,
'()),',
)
elif isinstance(node, Num):
# Special Case:
# Render Num nodes on a single line without names.
yield with_prefix(
type(node).__name__,
'(%r),' % node.n,
)
elif isinstance(node, AST):
fields_attrs = list(
chain(
iter_fields(node),
iter_attributes(node) if include_attributes else (),
)
)
if not fields_attrs:
# Special Case:
# Render the whole expression on one line if there are no
# attributes.
yield with_prefix(type(node).__name__, '(),')
return
yield with_prefix(type(node).__name__, '(')
for name, value in fields_attrs:
yield from _fmt(value, name + '=', level + 1)
# Put a trailing comma if we're not at the top level.
yield with_indent(')', ',' if level > 0 else '')
elif isinstance(node, list):
if not node:
# Special Case:
# Render empty lists on one line.
yield with_prefix('[],')
return
yield with_prefix('[')
yield from chain.from_iterable(
map(partial(_fmt, prefix='', level=level + 1), node)
)
yield with_indent('],')
else:
yield with_prefix(repr(node), ',')
return '\n'.join(_fmt(node, prefix='', level=0))
|
python
|
{
"resource": ""
}
|
q6050
|
pprint_ast
|
train
|
def pprint_ast(node,
include_attributes=INCLUDE_ATTRIBUTES_DEFAULT,
indent=INDENT_DEFAULT,
file=None):
"""
Pretty-print an AST tree.
Parameters
----------
node : ast.AST
Top-level node to render.
include_attributes : bool, optional
Whether to include node attributes. Default False.
indent : str, optional.
Indentation string for nested expressions. Default is two spaces.
file : None or file-like object, optional
File to use to print output. If the default of `None` is passed, we
use sys.stdout.
"""
if file is None:
file = sys.stdout
print(
pformat_ast(
node,
include_attributes=include_attributes,
indent=indent
),
file=file,
)
|
python
|
{
"resource": ""
}
|
q6051
|
walk_code
|
train
|
def walk_code(co, _prefix=''):
"""
Traverse a code object, finding all consts which are also code objects.
Yields pairs of (name, code object).
"""
name = _prefix + co.co_name
yield name, co
yield from chain.from_iterable(
walk_code(c, _prefix=_extend_name(name, co))
for c in co.co_consts
if isinstance(c, CodeType)
)
|
python
|
{
"resource": ""
}
|
q6052
|
a
|
train
|
def a(text, mode='exec', indent=' ', file=None):
"""
Interactive convenience for displaying the AST of a code string.
Writes a pretty-formatted AST-tree to `file`.
Parameters
----------
text : str
Text of Python code to render as AST.
mode : {'exec', 'eval'}, optional
Mode for `ast.parse`. Default is 'exec'.
indent : str, optional
String to use for indenting nested expressions. Default is two spaces.
file : None or file-like object, optional
File to use to print output. If the default of `None` is passed, we
use sys.stdout.
"""
pprint_ast(parse(text, mode=mode), indent=indent, file=file)
|
python
|
{
"resource": ""
}
|
q6053
|
d
|
train
|
def d(obj, mode='exec', file=None):
"""
Interactive convenience for displaying the disassembly of a function,
module, or code string.
Compiles `text` and recursively traverses the result looking for `code`
objects to render with `dis.dis`.
Parameters
----------
obj : str, CodeType, or object with __code__ attribute
Object to disassemble.
If `obj` is an instance of CodeType, we use it unchanged.
If `obj` is a string, we compile it with `mode` and then disassemble.
Otherwise, we look for a `__code__` attribute on `obj`.
mode : {'exec', 'eval'}, optional
Mode for `compile`. Default is 'exec'.
file : None or file-like object, optional
File to use to print output. If the default of `None` is passed, we
use sys.stdout.
"""
if file is None:
file = sys.stdout
for name, co in walk_code(extract_code(obj, compile_mode=mode)):
print(name, file=file)
print('-' * len(name), file=file)
dis.dis(co, file=file)
print('', file=file)
|
python
|
{
"resource": ""
}
|
q6054
|
extract_code
|
train
|
def extract_code(obj, compile_mode):
"""
Generic function for converting objects into instances of `CodeType`.
"""
try:
code = obj.__code__
if isinstance(code, CodeType):
return code
raise ValueError(
"{obj} has a `__code__` attribute, "
"but it's an instance of {notcode!r}, not CodeType.".format(
obj=obj,
notcode=type(code).__name__,
)
)
except AttributeError:
raise ValueError("Don't know how to extract code from %s." % obj)
|
python
|
{
"resource": ""
}
|
q6055
|
display
|
train
|
def display(text, mode='exec', file=None):
"""
Show `text`, rendered as AST and as Bytecode.
Parameters
----------
text : str
Text of Python code to render.
mode : {'exec', 'eval'}, optional
Mode for `ast.parse` and `compile`. Default is 'exec'.
file : None or file-like object, optional
File to use to print output. If the default of `None` is passed, we
use sys.stdout.
"""
if file is None:
file = sys.stdout
ast_section = StringIO()
a(text, mode=mode, file=ast_section)
code_section = StringIO()
d(text, mode=mode, file=code_section)
rendered = _DISPLAY_TEMPLATE.format(
text=text,
ast=ast_section.getvalue(),
code=code_section.getvalue(),
)
print(rendered, file=file)
|
python
|
{
"resource": ""
}
|
q6056
|
match
|
train
|
def match(match_expr, exc_type, exc_value, exc_traceback):
"""
Called to determine whether or not an except block should be matched.
True -> enter except block
False -> don't enter except block
"""
# Emulate standard behavior when match_expr is an exception subclass.
if isinstance(match_expr, type) and issubclass(match_expr, BaseException):
return issubclass(exc_type, match_expr)
# Match on type and args when match_expr is an exception instance.
return (
issubclass(exc_type, type(match_expr))
and
match_expr.args == exc_value.args
)
|
python
|
{
"resource": ""
}
|
q6057
|
decompile
|
train
|
def decompile(f):
"""
Decompile a function.
Parameters
----------
f : function
The function to decompile.
Returns
-------
ast : ast.FunctionDef
A FunctionDef node that compiles to f.
"""
co = f.__code__
args, kwonly, varargs, varkwargs = paramnames(co)
annotations = f.__annotations__ or {}
defaults = list(f.__defaults__ or ())
kw_defaults = f.__kwdefaults__ or {}
if f.__name__ == '<lambda>':
node = ast.Lambda
body = pycode_to_body(co, DecompilationContext(in_lambda=True))[0]
extra_kwargs = {}
else:
node = ast.FunctionDef
body = pycode_to_body(co, DecompilationContext(in_function_block=True))
extra_kwargs = {
'decorator_list': [],
'returns': annotations.get('return')
}
return node(
name=f.__name__,
args=make_function_arguments(
args=args,
kwonly=kwonly,
varargs=varargs,
varkwargs=varkwargs,
defaults=defaults,
kw_defaults=kw_defaults,
annotations=annotations,
),
body=body,
**extra_kwargs
)
|
python
|
{
"resource": ""
}
|
q6058
|
pycode_to_body
|
train
|
def pycode_to_body(co, context):
"""
Convert a Python code object to a list of AST body elements.
"""
code = Code.from_pycode(co)
# On each instruction, temporarily store all the jumps to the **next**
# instruction. This is used in _make_expr to determine when an expression
# is part of a short-circuiting expression.
for a, b in sliding_window(2, code.instrs):
a._next_target_of = b._target_of
b._next_target_of = set()
try:
body = instrs_to_body(deque(code.instrs), context)
if context.in_function_block:
return make_global_and_nonlocal_decls(code.instrs) + body
return body
finally:
# Clean up jump target data.
for i in code.instrs:
del i._next_target_of
|
python
|
{
"resource": ""
}
|
q6059
|
instrs_to_body
|
train
|
def instrs_to_body(instrs, context):
"""
Convert a list of Instruction objects to a list of AST body nodes.
"""
stack = []
body = []
process_instrs(instrs, stack, body, context)
if stack:
raise DecompilationError(
"Non-empty stack at the end of instrs_to_body(): %s." % stack
)
return body
|
python
|
{
"resource": ""
}
|
q6060
|
process_instrs
|
train
|
def process_instrs(queue, stack, body, context):
"""
Process instructions from the instruction queue.
"""
next_instr = queue.popleft
while queue:
newcontext = _process_instr(next_instr(), queue, stack, body, context)
if newcontext is not None:
context = newcontext
|
python
|
{
"resource": ""
}
|
q6061
|
make_if_statement
|
train
|
def make_if_statement(instr, queue, stack, context):
"""
Make an ast.If block from a POP_JUMP_IF_TRUE or POP_JUMP_IF_FALSE.
"""
test_expr = make_expr(stack)
if isinstance(instr, instrs.POP_JUMP_IF_TRUE):
test_expr = ast.UnaryOp(op=ast.Not(), operand=test_expr)
first_block = popwhile(op.is_not(instr.arg), queue, side='left')
if isinstance(first_block[-1], instrs.RETURN_VALUE):
body = instrs_to_body(first_block, context)
return ast.If(test=test_expr, body=body, orelse=[])
jump_to_end = expect(
first_block.pop(), instrs.JUMP_FORWARD, "at end of if-block"
)
body = instrs_to_body(first_block, context)
# First instruction after the whole if-block.
end = jump_to_end.arg
if instr.arg is jump_to_end.arg:
orelse = []
else:
orelse = instrs_to_body(
popwhile(op.is_not(end), queue, side='left'),
context,
)
return ast.If(test=test_expr, body=body, orelse=orelse)
|
python
|
{
"resource": ""
}
|
q6062
|
_process_instr_import_name
|
train
|
def _process_instr_import_name(instr, queue, stack, body, context):
"""
Process an IMPORT_NAME instruction.
Side Effects
------------
Pops two instuctions from `stack`
Consumes instructions from `queue` to the end of the import statement.
Appends an ast.Import or ast.ImportFrom node to `body`.
"""
# If this is "import module", fromlist is None.
# If this this is "from module import a, b fromlist will be ('a', 'b').
fromlist = stack.pop().arg
# level argument to __import__. Should be 0, 1, or 2.
level = stack.pop().arg
module = instr.arg
if fromlist is None: # Regular import.
attr_loads = _pop_import_LOAD_ATTRs(module, queue)
store = queue.popleft()
# There are two cases where we should emit an alias:
# import a as <anything but a>
# import a.b.c as <anything (including a)>
if attr_loads or module.split('.')[0] != store.arg:
asname = store.arg
else:
asname = None
body.append(
ast.Import(
names=[
ast.alias(
name=module,
asname=(asname),
),
],
level=level,
),
)
return
elif fromlist == ('*',): # From module import *.
expect(queue.popleft(), instrs.IMPORT_STAR, "after IMPORT_NAME")
body.append(
ast.ImportFrom(
module=module,
names=[ast.alias(name='*', asname=None)],
level=level,
),
)
return
# Consume a pair of IMPORT_FROM, STORE_NAME instructions for each entry in
# fromlist.
names = list(map(make_importfrom_alias(queue, body, context), fromlist))
body.append(ast.ImportFrom(module=module, names=names, level=level))
# Remove the final POP_TOP of the imported module.
expect(queue.popleft(), instrs.POP_TOP, "after 'from import'")
|
python
|
{
"resource": ""
}
|
q6063
|
make_importfrom_alias
|
train
|
def make_importfrom_alias(queue, body, context, name):
"""
Make an ast.alias node for the names list of an ast.ImportFrom.
Parameters
----------
queue : deque
Instruction Queue
body : list
Current body.
context : DecompilationContext
name : str
Expected name of the IMPORT_FROM node to be popped.
Returns
-------
alias : ast.alias
Side Effects
------------
Consumes IMPORT_FROM and STORE_NAME instructions from queue.
"""
import_from, store = queue.popleft(), queue.popleft()
expect(import_from, instrs.IMPORT_FROM, "after IMPORT_NAME")
if not import_from.arg == name:
raise DecompilationError(
"IMPORT_FROM name mismatch. Expected %r, but got %s." % (
name, import_from,
)
)
return ast.alias(
name=name,
asname=store.arg if store.arg != name else None,
)
|
python
|
{
"resource": ""
}
|
q6064
|
_make_function
|
train
|
def _make_function(instr, queue, stack, body, context):
"""
Set a make_function_context, then push onto the stack.
"""
assert stack, "Empty stack before MAKE_FUNCTION."
prev = stack[-1]
expect(prev, instrs.LOAD_CONST, "before MAKE_FUNCTION")
stack.append(instr)
if is_lambda_name(prev.arg):
return
return context.update(
make_function_context=MakeFunctionContext(
closure=isinstance(instr, instrs.MAKE_CLOSURE),
)
)
|
python
|
{
"resource": ""
}
|
q6065
|
make_assignment
|
train
|
def make_assignment(instr, queue, stack):
"""
Make an ast.Assign node.
"""
value = make_expr(stack)
# Make assignment targets.
# If there are multiple assignments (e.g. 'a = b = c'),
# each LHS expression except the last is preceded by a DUP_TOP instruction.
# Thus, we make targets until we don't see a DUP_TOP, and then make one
# more.
targets = []
while isinstance(instr, instrs.DUP_TOP):
targets.append(make_assign_target(queue.popleft(), queue, stack))
instr = queue.popleft()
targets.append(make_assign_target(instr, queue, stack))
return ast.Assign(targets=targets, value=value)
|
python
|
{
"resource": ""
}
|
q6066
|
pop_with_body_instrs
|
train
|
def pop_with_body_instrs(setup_with_instr, queue):
"""
Pop instructions from `queue` that form the body of a with block.
"""
body_instrs = popwhile(op.is_not(setup_with_instr.arg), queue, side='left')
# Last two instructions should always be POP_BLOCK, LOAD_CONST(None).
# These don't correspond to anything in the AST, so remove them here.
load_none = body_instrs.pop()
expect(load_none, instrs.LOAD_CONST, "at end of with-block")
pop_block = body_instrs.pop()
expect(pop_block, instrs.POP_BLOCK, "at end of with-block")
if load_none.arg is not None:
raise DecompilationError(
"Expected LOAD_CONST(None), but got "
"%r instead" % (load_none)
)
# Target of the setup_with should be a WITH_CLEANUP instruction followed by
# an END_FINALLY. Neither of these correspond to anything in the AST.
with_cleanup = queue.popleft()
expect(with_cleanup, instrs.WITH_CLEANUP, "at end of with-block")
end_finally = queue.popleft()
expect(end_finally, instrs.END_FINALLY, "at end of with-block")
return body_instrs
|
python
|
{
"resource": ""
}
|
q6067
|
make_withitem
|
train
|
def make_withitem(queue, stack):
"""
Make an ast.withitem node.
"""
context_expr = make_expr(stack)
# This is a POP_TOP for just "with <expr>:".
# This is a STORE_NAME(name) for "with <expr> as <name>:".
as_instr = queue.popleft()
if isinstance(as_instr, (instrs.STORE_FAST,
instrs.STORE_NAME,
instrs.STORE_DEREF,
instrs.STORE_GLOBAL)):
return ast.withitem(
context_expr=context_expr,
optional_vars=make_assign_target(as_instr, queue, stack),
)
elif isinstance(as_instr, instrs.POP_TOP):
return ast.withitem(context_expr=context_expr, optional_vars=None)
else:
raise DecompilationError(
"Don't know how to make withitem from %s" % as_instr,
)
|
python
|
{
"resource": ""
}
|
q6068
|
make_for_loop
|
train
|
def make_for_loop(loop_body_instrs, else_body_instrs, context):
"""
Make an ast.For node.
"""
# Instructions from start until GET_ITER are the builders for the iterator
# expression.
iterator_expr = make_expr(
popwhile(not_a(instrs.GET_ITER), loop_body_instrs, side='left')
)
# Next is the GET_ITER instruction, which we don't need.
loop_body_instrs.popleft()
# Next is FOR_ITER, which is the jump target for Continue nodes.
top_of_loop = loop_body_instrs.popleft()
# This can be a STORE_* or an UNPACK_SEQUENCE followed by some number of
# stores.
target = make_assign_target(
loop_body_instrs.popleft(),
loop_body_instrs,
stack=[],
)
body, orelse_body = make_loop_body_and_orelse(
top_of_loop, loop_body_instrs, else_body_instrs, context
)
return ast.For(
target=target,
iter=iterator_expr,
body=body,
orelse=orelse_body,
)
|
python
|
{
"resource": ""
}
|
q6069
|
make_while_loop
|
train
|
def make_while_loop(test_and_body_instrs, else_body_instrs, context):
"""
Make an ast.While node.
Parameters
----------
test_and_body_instrs : deque
Queue of instructions forming the loop test expression and body.
else_body_instrs : deque
Queue of instructions forming the else block of the loop.
context : DecompilationContext
"""
top_of_loop = test_and_body_instrs[0]
# The popped elements are the stack_builders for the loop test expression.
# The top of the loop_body_instrs is either a POP_JUMP_IF_TRUE or a
# POP_JUMP_IF_FALSE.
test, body_instrs = make_while_loop_test_expr(test_and_body_instrs)
body, orelse_body = make_loop_body_and_orelse(
top_of_loop, body_instrs, else_body_instrs, context,
)
# while-else blocks are not yet supported or handled.
return ast.While(test=test, body=body, orelse=orelse_body)
|
python
|
{
"resource": ""
}
|
q6070
|
pop_loop_instrs
|
train
|
def pop_loop_instrs(setup_loop_instr, queue):
"""
Determine whether setup_loop_instr is setting up a for-loop or a
while-loop. Then pop the loop instructions from queue.
The easiest way to tell the difference is to look at the target of the
JUMP_ABSOLUTE instruction at the end of the loop. If it jumps to a
FOR_ITER, then this is a for-loop. Otherwise it's a while-loop.
The jump we want to inspect is the first JUMP_ABSOLUTE instruction prior to
the jump target of `setup_loop_instr`.
Parameters
----------
setup_loop_instr : instructions.SETUP_LOOP
First instruction of the loop being parsed.
queue : collections.deque
Queue of unprocessed instructions.
Returns
-------
loop_type : str, {'for', 'while'}
The kind of loop being constructed.
loop_instrs : deque
The instructions forming body of the loop.
else_instrs : deque
The instructions forming the else-block of the loop.
Side Effects
------------
Pops all returned instructions from `queue`.
"""
# Grab everything from left side of the queue until the jump target of
# SETUP_LOOP.
body = popwhile(op.is_not(setup_loop_instr.arg), queue, side='left')
# Anything after the last POP_BLOCK instruction is the else-block.
else_body = popwhile(not_a(instrs.POP_BLOCK), body, side='right')
jump_to_top, pop_block = body[-2], body[-1]
if not isinstance(jump_to_top, instrs.JUMP_ABSOLUTE):
raise DecompilationError(
"Penultimate instruction of loop body is "
"%s, not JUMP_ABSOLUTE." % jump_to_top,
)
if not isinstance(pop_block, instrs.POP_BLOCK):
raise DecompilationError(
"Last instruction of loop body is "
"%s, not pop_block." % pop_block,
)
loop_expr = jump_to_top.arg
if isinstance(loop_expr, instrs.FOR_ITER):
return 'for', body, else_body
return 'while', body, else_body
|
python
|
{
"resource": ""
}
|
q6071
|
_make_expr
|
train
|
def _make_expr(toplevel, stack_builders):
"""
Override the single-dispatched make_expr with wrapper logic for handling
short-circuiting expressions.
"""
base_expr = _make_expr_internal(toplevel, stack_builders)
if not toplevel._next_target_of:
return base_expr
subexprs = deque([base_expr])
ops = deque([])
while stack_builders and stack_builders[-1] in toplevel._next_target_of:
jump = stack_builders.pop()
if not isinstance(jump, _BOOLOP_JUMP_TYPES):
raise DecompilationError(
"Don't know how to decompile %s inside expression." % jump,
)
subexprs.appendleft(make_expr(stack_builders))
ops.appendleft(_BOOLOP_JUMP_TO_AST_OP[type(jump)]())
if len(subexprs) <= 1:
raise DecompilationError(
"Expected at least one JUMP instruction before expression."
)
return normalize_boolop(make_boolop(subexprs, ops))
|
python
|
{
"resource": ""
}
|
q6072
|
make_call_keywords
|
train
|
def make_call_keywords(stack_builders, count):
"""
Make the keywords entry for an ast.Call node.
"""
out = []
for _ in range(count):
value = make_expr(stack_builders)
load_kwname = stack_builders.pop()
if not isinstance(load_kwname, instrs.LOAD_CONST):
raise DecompilationError(
"Expected a LOAD_CONST, but got %r" % load_kwname
)
if not isinstance(load_kwname.arg, str):
raise DecompilationError(
"Expected LOAD_CONST of a str, but got %r." % load_kwname,
)
out.append(ast.keyword(arg=load_kwname.arg, value=value))
out.reverse()
return out
|
python
|
{
"resource": ""
}
|
q6073
|
make_call_positionals
|
train
|
def make_call_positionals(stack_builders, count):
"""
Make the args entry for an ast.Call node.
"""
out = [make_expr(stack_builders) for _ in range(count)]
out.reverse()
return out
|
python
|
{
"resource": ""
}
|
q6074
|
_make_expr_empty_dict
|
train
|
def _make_expr_empty_dict(toplevel, stack_builders):
"""
This should only be hit for empty dicts. Anything else should hit the
STORE_MAP handler instead.
"""
if toplevel.arg:
raise DecompilationError(
"make_expr() called with nonzero BUILD_MAP arg %d" % toplevel.arg
)
if stack_builders:
raise DecompilationError(
"Unexpected stack_builders for BUILD_MAP(0): %s" % stack_builders
)
return ast.Dict(keys=[], values=[])
|
python
|
{
"resource": ""
}
|
q6075
|
find_build_map
|
train
|
def find_build_map(stack_builders):
"""
Find the BUILD_MAP instruction for which the last element of
``stack_builders`` is a store.
"""
assert isinstance(stack_builders[-1], instrs.STORE_MAP)
to_consume = 0
for instr in reversed(stack_builders):
if isinstance(instr, instrs.STORE_MAP):
# NOTE: This branch should always be hit on the first iteration.
to_consume += 1
elif isinstance(instr, instrs.BUILD_MAP):
to_consume -= instr.arg
if to_consume <= 0:
return instr
else:
raise DecompilationError(
"Couldn't find BUILD_MAP for last element of %s." % stack_builders
)
|
python
|
{
"resource": ""
}
|
q6076
|
_make_dict_elems
|
train
|
def _make_dict_elems(build_instr, builders):
"""
Return a list of keys and a list of values for the dictionary literal
generated by ``build_instr``.
"""
keys = []
values = []
for _ in range(build_instr.arg):
popped = builders.pop()
if not isinstance(popped, instrs.STORE_MAP):
raise DecompilationError(
"Expected a STORE_MAP but got %s" % popped
)
keys.append(make_expr(builders))
values.append(make_expr(builders))
# Keys and values are emitted in reverse order of how they appear in the
# AST.
keys.reverse()
values.reverse()
return keys, values
|
python
|
{
"resource": ""
}
|
q6077
|
normalize_tuple_slice
|
train
|
def normalize_tuple_slice(node):
"""
Normalize an ast.Tuple node representing the internals of a slice.
Returns the node wrapped in an ast.Index.
Returns an ExtSlice node built from the tuple elements if there are any
slices.
"""
if not any(isinstance(elt, ast.Slice) for elt in node.elts):
return ast.Index(value=node)
return ast.ExtSlice(
[
# Wrap non-Slice nodes in Index nodes.
elt if isinstance(elt, ast.Slice) else ast.Index(value=elt)
for elt in node.elts
]
)
|
python
|
{
"resource": ""
}
|
q6078
|
_binop_handler
|
train
|
def _binop_handler(nodetype):
"""
Factory function for binary operator handlers.
"""
def _handler(toplevel, stack_builders):
right = make_expr(stack_builders)
left = make_expr(stack_builders)
return ast.BinOp(left=left, op=nodetype(), right=right)
return _handler
|
python
|
{
"resource": ""
}
|
q6079
|
make_function_arguments
|
train
|
def make_function_arguments(args,
kwonly,
varargs,
varkwargs,
defaults,
kw_defaults,
annotations):
"""
Make an ast.arguments from the args parsed out of a code object.
"""
return ast.arguments(
args=[ast.arg(arg=a, annotation=annotations.get(a)) for a in args],
kwonlyargs=[
ast.arg(arg=a, annotation=annotations.get(a)) for a in kwonly
],
defaults=defaults,
kw_defaults=list(map(kw_defaults.get, kwonly)),
vararg=None if varargs is None else ast.arg(
arg=varargs, annotation=annotations.get(varargs),
),
kwarg=None if varkwargs is None else ast.arg(
arg=varkwargs, annotation=annotations.get(varkwargs)
),
)
|
python
|
{
"resource": ""
}
|
q6080
|
make_global_and_nonlocal_decls
|
train
|
def make_global_and_nonlocal_decls(code_instrs):
"""
Find all STORE_GLOBAL and STORE_DEREF instructions in `instrs` and convert
them into a canonical list of `ast.Global` and `ast.Nonlocal` declarations.
"""
globals_ = sorted(set(
i.arg for i in code_instrs if isinstance(i, instrs.STORE_GLOBAL)
))
nonlocals = sorted(set(
i.arg for i in code_instrs
if isinstance(i, instrs.STORE_DEREF) and i.vartype == 'free'
))
out = []
if globals_:
out.append(ast.Global(names=globals_))
if nonlocals:
out.append(ast.Nonlocal(names=nonlocals))
return out
|
python
|
{
"resource": ""
}
|
q6081
|
make_defaults_and_annotations
|
train
|
def make_defaults_and_annotations(make_function_instr, builders):
"""
Get the AST expressions corresponding to the defaults, kwonly defaults, and
annotations for a function created by `make_function_instr`.
"""
# Integer counts.
n_defaults, n_kwonlydefaults, n_annotations = unpack_make_function_arg(
make_function_instr.arg
)
if n_annotations:
# TOS should be a tuple of annotation names.
load_annotation_names = builders.pop()
annotations = dict(zip(
reversed(load_annotation_names.arg),
(make_expr(builders) for _ in range(n_annotations - 1))
))
else:
annotations = {}
kwonlys = {}
while n_kwonlydefaults:
default_expr = make_expr(builders)
key_instr = builders.pop()
if not isinstance(key_instr, instrs.LOAD_CONST):
raise DecompilationError(
"kwonlydefault key is not a LOAD_CONST: %s" % key_instr
)
if not isinstance(key_instr.arg, str):
raise DecompilationError(
"kwonlydefault key builder is not a "
"'LOAD_CONST of a string: %s" % key_instr
)
kwonlys[key_instr.arg] = default_expr
n_kwonlydefaults -= 1
defaults = make_exprs(builders, n_defaults)
return defaults, kwonlys, annotations
|
python
|
{
"resource": ""
}
|
q6082
|
_check_make_function_instrs
|
train
|
def _check_make_function_instrs(load_code_instr,
load_name_instr,
make_function_instr,
*,
expect_lambda=False):
"""
Validate the instructions passed to a make_function call.
"""
# Validate load_code_instr.
if not isinstance(load_code_instr, instrs.LOAD_CONST):
raise TypeError(
"make_function expected 'load_code_instr` to be a "
"LOAD_CONST, but got %s" % load_code_instr,
)
if not isinstance(load_code_instr.arg, types.CodeType):
raise TypeError(
"make_function expected load_code_instr "
"to load a code object, but got %s" % load_code_instr.arg,
)
# Validate load_name_instr
if not isinstance(load_name_instr, instrs.LOAD_CONST):
raise TypeError(
"make_function expected 'load_name_instr` to be a "
"LOAD_CONST, but got %s" % load_code_instr,
)
if not isinstance(load_name_instr.arg, str):
raise TypeError(
"make_function expected load_name_instr "
"to load a string, but got %r instead" % load_name_instr.arg
)
# This is an endswith rather than '==' because the arg is the
# fully-qualified name.
is_lambda = is_lambda_name(load_name_instr.arg)
if expect_lambda and not is_lambda:
raise ValueError(
"Expected to make a function named <lambda>, but "
"got %r instead." % load_name_instr.arg
)
if not expect_lambda and is_lambda:
raise ValueError("Unexpectedly received lambda function.")
# Validate make_function_instr
if not isinstance(make_function_instr, (instrs.MAKE_FUNCTION,
instrs.MAKE_CLOSURE)):
raise TypeError(
"make_function expected a MAKE_FUNCTION or MAKE_CLOSURE"
"instruction, but got %s instead." % make_function_instr
)
|
python
|
{
"resource": ""
}
|
q6083
|
pop_arguments
|
train
|
def pop_arguments(instr, stack):
"""
Pop instructions off `stack` until we pop all instructions that will
produce values popped by `instr`.
"""
needed = instr.stack_effect
if needed >= 0:
raise DecompilationError(
"%s is does not have a negative stack effect" % instr
)
for popcount, to_pop in enumerate(reversed(stack), start=1):
needed += to_pop.stack_effect
if not needed:
break
else:
raise DecompilationError(
"Reached end of stack without finding inputs to %s" % instr,
)
popped = stack[-popcount:]
stack[:] = stack[:-popcount]
return popped
|
python
|
{
"resource": ""
}
|
q6084
|
_check_stack_for_module_return
|
train
|
def _check_stack_for_module_return(stack):
"""
Verify that the stack is in the expected state before the dummy
RETURN_VALUE instruction of a module or class.
"""
fail = (
len(stack) != 1
or not isinstance(stack[0], instrs.LOAD_CONST)
or stack[0].arg is not None
)
if fail:
raise DecompilationError(
"Reached end of non-function code "
"block with unexpected stack: %s." % stack
)
|
python
|
{
"resource": ""
}
|
q6085
|
expect
|
train
|
def expect(instr, expected, context):
"""
Check that an instruction is of the expected type.
"""
if not isinstance(instr, expected):
raise DecompilationError(
"Expected a {expected} instruction {context}. Got {instr}.".format(
instr=instr, expected=expected, context=context,
)
)
return instr
|
python
|
{
"resource": ""
}
|
q6086
|
overloaded_constants
|
train
|
def overloaded_constants(type_, __doc__=None):
"""A factory for transformers that apply functions to literals.
Parameters
----------
type_ : type
The type to overload.
__doc__ : str, optional
Docstring for the generated transformer.
Returns
-------
transformer : subclass of CodeTransformer
A new code transformer class that will overload the provided
literal types.
"""
typename = type_.__name__
if typename.endswith('x'):
typename += 'es'
elif not typename.endswith('s'):
typename += 's'
if __doc__ is None:
__doc__ = _format_constant_docstring(type_)
return type(
"overloaded_" + typename,
(_ConstantTransformerBase,), {
'_type': type_,
'__doc__': __doc__,
},
)
|
python
|
{
"resource": ""
}
|
q6087
|
paramnames
|
train
|
def paramnames(co):
"""
Get the parameter names from a pycode object.
Returns a 4-tuple of (args, kwonlyargs, varargs, varkwargs).
varargs and varkwargs will be None if the function doesn't take *args or
**kwargs, respectively.
"""
flags = co.co_flags
varnames = co.co_varnames
argcount, kwonlyargcount = co.co_argcount, co.co_kwonlyargcount
total = argcount + kwonlyargcount
args = varnames[:argcount]
kwonlyargs = varnames[argcount:total]
varargs, varkwargs = None, None
if flags & Flag.CO_VARARGS:
varargs = varnames[total]
total += 1
if flags & Flag.CO_VARKEYWORDS:
varkwargs = varnames[total]
return args, kwonlyargs, varargs, varkwargs
|
python
|
{
"resource": ""
}
|
q6088
|
_freevar_argname
|
train
|
def _freevar_argname(arg, cellvars, freevars):
"""
Get the name of the variable manipulated by a 'uses_free' instruction.
Parameters
----------
arg : int
The raw argument to a uses_free instruction that we want to resolve to
a name.
cellvars : list[str]
The co_cellvars of the function for which we want to resolve `arg`.
freevars : list[str]
The co_freevars of the function for which we want to resolve `arg`.
Notes
-----
From https://docs.python.org/3.5/library/dis.html#opcode-LOAD_CLOSURE:
The name of the variable is co_cellvars[i] if i is less than the length
of co_cellvars. Otherwise it is co_freevars[i - len(co_cellvars)]
"""
len_cellvars = len(cellvars)
if arg < len_cellvars:
return cellvars[arg]
return freevars[arg - len_cellvars]
|
python
|
{
"resource": ""
}
|
q6089
|
pycode
|
train
|
def pycode(argcount,
kwonlyargcount,
nlocals,
stacksize,
flags,
codestring,
constants,
names,
varnames,
filename,
name,
firstlineno,
lnotab,
freevars=(),
cellvars=()):
"""types.CodeType constructor that accepts keyword arguments.
See Also
--------
types.CodeType
"""
return CodeType(
argcount,
kwonlyargcount,
nlocals,
stacksize,
flags,
codestring,
constants,
names,
varnames,
filename,
name,
firstlineno,
lnotab,
freevars,
cellvars,
)
|
python
|
{
"resource": ""
}
|
q6090
|
Code.from_pycode
|
train
|
def from_pycode(cls, co):
"""Create a Code object from a python code object.
Parameters
----------
co : CodeType
The python code object.
Returns
-------
code : Code
The codetransformer Code object.
"""
# Make it sparse to instrs[n] is the instruction at bytecode[n]
sparse_instrs = tuple(
_sparse_args(
Instruction.from_opcode(
b.opcode,
Instruction._no_arg if b.arg is None else _RawArg(b.arg),
) for b in Bytecode(co)
),
)
for idx, instr in enumerate(sparse_instrs):
if instr is None:
# The sparse value
continue
if instr.absjmp:
instr.arg = sparse_instrs[instr.arg]
elif instr.reljmp:
instr.arg = sparse_instrs[instr.arg + idx + argsize + 1]
elif isinstance(instr, LOAD_CONST):
instr.arg = co.co_consts[instr.arg]
elif instr.uses_name:
instr.arg = co.co_names[instr.arg]
elif instr.uses_varname:
instr.arg = co.co_varnames[instr.arg]
elif instr.uses_free:
instr.arg = _freevar_argname(
instr.arg,
co.co_freevars,
co.co_cellvars,
)
elif instr.have_arg and isinstance(instr.arg, _RawArg):
instr.arg = int(instr.arg)
flags = Flag.unpack(co.co_flags)
has_vargs = flags['CO_VARARGS']
has_kwargs = flags['CO_VARKEYWORDS']
# Here we convert the varnames format into our argnames format.
paramnames = co.co_varnames[
:(co.co_argcount +
co.co_kwonlyargcount +
has_vargs +
has_kwargs)
]
# We start with the positional arguments.
new_paramnames = list(paramnames[:co.co_argcount])
# Add *args next.
if has_vargs:
new_paramnames.append('*' + paramnames[-1 - has_kwargs])
# Add positional only arguments next.
new_paramnames.extend(paramnames[
co.co_argcount:co.co_argcount + co.co_kwonlyargcount
])
# Add **kwargs last.
if has_kwargs:
new_paramnames.append('**' + paramnames[-1])
return cls(
filter(bool, sparse_instrs),
argnames=new_paramnames,
cellvars=co.co_cellvars,
freevars=co.co_freevars,
name=co.co_name,
filename=co.co_filename,
firstlineno=co.co_firstlineno,
lnotab={
lno: sparse_instrs[off] for off, lno in findlinestarts(co)
},
flags=flags,
)
|
python
|
{
"resource": ""
}
|
q6091
|
Code.to_pycode
|
train
|
def to_pycode(self):
"""Create a python code object from the more abstract
codetransfomer.Code object.
Returns
-------
co : CodeType
The python code object.
"""
consts = self.consts
names = self.names
varnames = self.varnames
freevars = self.freevars
cellvars = self.cellvars
bc = bytearray()
for instr in self.instrs:
bc.append(instr.opcode) # Write the opcode byte.
if isinstance(instr, LOAD_CONST):
# Resolve the constant index.
bc.extend(consts.index(instr.arg).to_bytes(argsize, 'little'))
elif instr.uses_name:
# Resolve the name index.
bc.extend(names.index(instr.arg).to_bytes(argsize, 'little'))
elif instr.uses_varname:
# Resolve the local variable index.
bc.extend(
varnames.index(instr.arg).to_bytes(argsize, 'little'),
)
elif instr.uses_free:
# uses_free is really "uses freevars **or** cellvars".
try:
# look for the name in cellvars
bc.extend(
cellvars.index(instr.arg).to_bytes(argsize, 'little'),
)
except ValueError:
# fall back to freevars, incrementing the length of
# cellvars.
bc.extend(
(freevars.index(instr.arg) + len(cellvars)).to_bytes(
argsize,
'little',
)
)
elif instr.absjmp:
# Resolve the absolute jump target.
bc.extend(
self.bytecode_offset(instr.arg).to_bytes(
argsize,
'little',
),
)
elif instr.reljmp:
# Resolve the relative jump target.
# We do this by subtracting the curren't instructions's
# sparse index from the sparse index of the argument.
# We then subtract argsize - 1 to account for the bytes the
# current instruction takes up.
bytecode_offset = self.bytecode_offset
bc.extend((
bytecode_offset(instr.arg) -
bytecode_offset(instr) -
argsize -
1
).to_bytes(argsize, 'little',))
elif instr.have_arg:
# Write any other arg here.
bc.extend(instr.arg.to_bytes(argsize, 'little'))
elif WORDCODE:
# with wordcode, all instructions are padded to 2 bytes
bc.append(0)
return CodeType(
self.argcount,
self.kwonlyargcount,
len(varnames),
self.stacksize,
self.py_flags,
bytes(bc),
consts,
names,
varnames,
self.filename,
self.name,
self.firstlineno,
self.py_lnotab,
freevars,
cellvars,
)
|
python
|
{
"resource": ""
}
|
q6092
|
Code.consts
|
train
|
def consts(self):
"""The constants referenced in this code object.
"""
# We cannot use a set comprehension because consts do not need
# to be hashable.
consts = []
append_const = consts.append
for instr in self.instrs:
if isinstance(instr, LOAD_CONST) and instr.arg not in consts:
append_const(instr.arg)
return tuple(consts)
|
python
|
{
"resource": ""
}
|
q6093
|
Code.names
|
train
|
def names(self):
"""The names referenced in this code object.
Names come from instructions like LOAD_GLOBAL or STORE_ATTR
where the name of the global or attribute is needed at runtime.
"""
# We must sort to preserve the order between calls.
# The set comprehension is to drop the duplicates.
return tuple(sorted({
instr.arg for instr in self.instrs if instr.uses_name
}))
|
python
|
{
"resource": ""
}
|
q6094
|
Code.varnames
|
train
|
def varnames(self):
"""The names of all of the local variables in this code object.
"""
# We must sort to preserve the order between calls.
# The set comprehension is to drop the duplicates.
return self._argnames + tuple(sorted({
instr.arg
for instr in self.instrs
if instr.uses_varname and instr.arg not in self._argnames
}))
|
python
|
{
"resource": ""
}
|
q6095
|
Code.py_lnotab
|
train
|
def py_lnotab(self):
"""The encoded lnotab that python uses to compute when lines start.
Note
----
See Objects/lnotab_notes.txt in the cpython source for more details.
"""
reverse_lnotab = reverse_dict(self.lnotab)
py_lnotab = []
prev_instr = 0
prev_lno = self.firstlineno
for addr, instr in enumerate(_sparse_args(self.instrs)):
lno = reverse_lnotab.get(instr)
if lno is None:
continue
delta = lno - prev_lno
py_lnotab.append(addr - prev_instr)
py_lnotab.append(min(delta, max_lnotab_increment))
delta -= max_lnotab_increment
while delta > 0:
py_lnotab.append(0)
py_lnotab.append(min(delta, max_lnotab_increment))
delta -= max_lnotab_increment
prev_lno = lno
prev_instr = addr
return bytes(py_lnotab)
|
python
|
{
"resource": ""
}
|
q6096
|
Code.stacksize
|
train
|
def stacksize(self):
"""The maximum amount of stack space used by this code object.
"""
return max(scanl(
op.add,
0,
map(op.attrgetter('stack_effect'), self.instrs),
))
|
python
|
{
"resource": ""
}
|
q6097
|
initialize_slot
|
train
|
def initialize_slot(obj, name, value):
"""Initalize an unitialized slot to a value.
If there is already a value for this slot, this is a nop.
Parameters
----------
obj : immutable
An immutable object.
name : str
The name of the slot to initialize.
value : any
The value to initialize the slot to.
"""
if not hasattr(obj, name):
object_setattr(obj, name, value)
|
python
|
{
"resource": ""
}
|
q6098
|
scanl
|
train
|
def scanl(f, n, ns):
"""Reduce ns by f starting with n yielding each intermediate value.
tuple(scanl(f, n, ns))[-1] == reduce(f, ns, n)
Parameters
----------
f : callable
A binary function.
n : any
The starting value.
ns : iterable of any
The iterable to scan over.
Yields
------
p : any
The value of reduce(f, ns[:idx]) where idx is the current index.
Examples
--------
>>> import operator as op
>>> tuple(scanl(op.add, 0, (1, 2, 3, 4)))
(0, 1, 3, 6, 10)
"""
yield n
for m in ns:
n = f(n, m)
yield n
|
python
|
{
"resource": ""
}
|
q6099
|
ffill
|
train
|
def ffill(iterable):
"""Forward fill non None values in some iterable.
Parameters
----------
iterable : iterable
The iterable to forward fill.
Yields
------
e : any
The last non None value or None if there has not been a non None value.
"""
it = iter(iterable)
previous = next(it)
yield previous
for e in it:
if e is None:
yield previous
else:
previous = e
yield e
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.