desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Produce text documentation for a given module object.'
def docmodule(self, object, name=None, mod=None):
name = object.__name__ (synop, desc) = splitdoc(getdoc(object)) result = self.section('NAME', (name + (synop and (' - ' + synop)))) try: all = object.__all__ except AttributeError: all = None try: file = inspect.getabsfile(object) except TypeError: file ...
'Produce text documentation for a given class object.'
def docclass(self, object, name=None, mod=None, *ignored):
realname = object.__name__ name = (name or realname) bases = object.__bases__ def makename(c, m=object.__module__): return classname(c, m) if (name == realname): title = ('class ' + self.bold(realname)) else: title = ((self.bold(name) + ' = class ') + realname...
'Format an argument default value as text.'
def formatvalue(self, object):
return ('=' + self.repr(object))
'Produce text documentation for a function or method object.'
def docroutine(self, object, name=None, mod=None, cl=None):
realname = object.__name__ name = (name or realname) note = '' skipdocs = 0 if inspect.ismethod(object): imclass = object.im_class if cl: if (imclass is not cl): note = (' from ' + classname(imclass, mod)) elif (object.im_self is not None): ...
'Produce text documentation for a property.'
def docproperty(self, object, name=None, mod=None, cl=None):
return self._docdescriptor(name, object, mod)
'Produce text documentation for a data descriptor.'
def docdata(self, object, name=None, mod=None, cl=None):
return self._docdescriptor(name, object, mod)
'Produce text documentation for a data object.'
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
repr = self.repr(object) if maxlen: line = (((name and (name + ' = ')) or '') + repr) chop = (maxlen - len(line)) if (chop < 0): repr = (repr[:chop] + '...') line = (((name and (self.bold(name) + ' = ')) or '') + repr) if (doc is not None): line +=...
'Read one line, using raw_input when available.'
def getline(self, prompt):
if (self.input is sys.stdin): return raw_input(prompt) else: self.output.write(prompt) self.output.flush() return self.input.readline()
'Return scope of name. The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.'
def check_name(self, name):
if (name in self.globals): return SC_GLOBAL_EXPLICT if (name in self.cells): return SC_CELL if (name in self.defs): return SC_LOCAL if (self.nested and ((name in self.frees) or (name in self.uses))): return SC_FREE if self.nested: return SC_UNKNOWN else: ...
'Force name to be global in scope. Some child of the current node had a free reference to name. When the child was processed, it was labelled a free variable. Now that all its enclosing scope have been processed, the name is known to be a global or builtin. So walk back down the child chain and set the name to be glo...
def force_global(self, name):
self.globals[name] = 1 if (name in self.frees): del self.frees[name] for child in self.children: if (child.check_name(name) == SC_FREE): child.force_global(name)
'Process list of free vars from nested scope. Returns a list of names that are either 1) declared global in the parent or 2) undefined in a top-level parent. In either case, the nested scope should treat them as globals.'
def add_frees(self, names):
child_globals = [] for name in names: sc = self.check_name(name) if self.nested: if ((sc == SC_UNKNOWN) or (sc == SC_FREE) or isinstance(self, ClassScope)): self.frees[name] = 1 elif (sc == SC_GLOBAL_IMPLICIT): child_globals.append(name) ...
'Propagate assignment flag down to child nodes. The Assign node doesn\'t itself contains the variables being assigned to. Instead, the children in node.nodes are visited with the assign flag set to true. When the names occur in those nodes, they are marked as defs. Some names that occur in an assignment target are no...
def visitAssign(self, node, scope):
for n in node.nodes: self.visit(n, scope, 1) self.visit(node.expr, scope)
'Verify that class is constructed correctly'
def checkClass(self):
try: assert hasattr(self, 'graph') assert getattr(self, 'NameFinder') assert getattr(self, 'FunctionGen') assert getattr(self, 'ClassGen') except AssertionError as msg: intro = ('Bad class construction for %s' % self.__class__.__name__) raise Assertion...
'Return a code object'
def getCode(self):
return self.graph.getCode()
'Emit name ops for names generated implicitly by for loops The interpreter generates names that start with a period or dollar sign. The symbol table ignores these names because they aren\'t present in the program text.'
def _implicitNameOp(self, prefix, name):
if self.optimized: self.emit((prefix + '_FAST'), name) else: self.emit((prefix + '_NAME'), name)
'Emit SET_LINENO if necessary. The instruction is considered necessary if the node has a lineno attribute and it is different than the last lineno emitted. Returns true if SET_LINENO was emitted. There are no rules for when an AST node should have a lineno attribute. The transformer and AST code need to be reviewed an...
def set_lineno(self, node, force=False):
lineno = getattr(node, 'lineno', None) if ((lineno is not None) and ((lineno != self.last_lineno) or force)): self.emit('SET_LINENO', lineno) self.last_lineno = lineno return True return False
'Transform an AST into a modified parse tree.'
def transform(self, tree):
if (not (isinstance(tree, tuple) or isinstance(tree, list))): tree = parser.st2tuple(tree, line_info=1) return self.compile_node(tree)
'Return a modified parse tree for the given suite text.'
def parsesuite(self, text):
return self.transform(parser.suite(text))
'Return a modified parse tree for the given expression text.'
def parseexpr(self, text):
return self.transform(parser.expr(text))
'Return a modified parse tree for the contents of the given file.'
def parsefile(self, file):
if (type(file) == type('')): file = open(file) return self.parsesuite(file.read())
'Return node suitable for lvalue of augmented assignment Names, slices, and attributes are the only allowable nodes.'
def com_augassign(self, node):
l = self.com_node(node) if (l.__class__ in (Name, Slice, Subscript, Getattr)): return l raise SyntaxError, ("can't assign to %s" % l.__class__.__name__)
'Compile \'NODE (OP NODE)*\' into (type, [ node1, ..., nodeN ]).'
def com_binary(self, constructor, nodelist):
l = len(nodelist) if (l == 1): n = nodelist[0] return self.lookup_node(n)(n[1:]) items = [] for i in range(0, l, 2): n = nodelist[i] items.append(self.lookup_node(n)(n[1:])) return constructor(items, lineno=extractLineNo(nodelist))
'Do preorder walk of tree using visitor'
def preorder(self, tree, visitor, *args):
self.visitor = visitor visitor.visit = self.dispatch self.dispatch(tree, *args)
'Return list of features enabled by future statements'
def get_features(self):
return self.found.keys()
'Return the blocks in reverse postorder i.e. each node appears before all of its successors'
def getBlocksInOrder(self):
order = order_blocks(self.entry, self.exit) return order
'Return nodes appropriate for use with dominator'
def getRoot(self):
return self.entry
'Returns True if there is an unconditional transfer to an other block at the end of this block. This means there is no risk for the bytecode executer to go past this block\'s bytecode.'
def has_unconditional_transfer(self):
try: (op, arg) = self.insts[(-1)] except (IndexError, ValueError): return return (op in self._uncond_transfer)
'Get the whole list of followers, including the next block.'
def get_followers(self):
followers = set(self.next) for inst in self.insts: if (inst[0] in PyFlowGraph.hasjrel): followers.add(inst[1]) return followers
'Return all graphs contained within this block. For example, a MAKE_FUNCTION block will contain a reference to the graph for the function body.'
def getContainedGraphs(self):
contained = [] for inst in self.insts: if (len(inst) == 1): continue op = inst[1] if hasattr(op, 'graph'): contained.append(op.graph) return contained
'Get a Python code object'
def getCode(self):
assert (self.stage == RAW) self.computeStackDepth() self.flattenGraph() assert (self.stage == FLAT) self.convertArgs() assert (self.stage == CONV) self.makeByteCode() assert (self.stage == DONE) return self.newCodeObject()
'Compute the max stack depth. Approach is to compute the stack effect of each basic block. Then find the path through the code with the largest total effect.'
def computeStackDepth(self):
depth = {} exit = None for b in self.getBlocks(): depth[b] = findDepth(b.getInstructions()) seen = {} def max_depth(b, d): if (b in seen): return d seen[b] = 1 d = (d + depth[b]) children = b.get_children() if children: return m...
'Arrange the blocks in order and resolve jumps'
def flattenGraph(self):
assert (self.stage == RAW) self.insts = insts = [] pc = 0 begin = {} end = {} for b in self.getBlocksInOrder(): begin[b] = pc for inst in b.getInstructions(): insts.append(inst) if (len(inst) == 1): pc = (pc + 1) elif (inst[0] !...
'Convert arguments from symbolic to concrete form'
def convertArgs(self):
assert (self.stage == FLAT) self.consts.insert(0, self.docstring) self.sort_cellvars() for i in range(len(self.insts)): t = self.insts[i] if (len(t) == 2): (opname, oparg) = t conv = self._converters.get(opname, None) if conv: self.inst...
'Sort cellvars in the order of varnames and prune from freevars.'
def sort_cellvars(self):
cells = {} for name in self.cellvars: cells[name] = 1 self.cellvars = [name for name in self.varnames if (name in cells)] for name in self.cellvars: del cells[name] self.cellvars = (self.cellvars + cells.keys()) self.closure = (self.cellvars + self.freevars)
'Return index of name in list, appending if necessary This routine uses a list instead of a dictionary, because a dictionary can\'t store two different keys if the keys have the same value but different types, e.g. 2 and 2L. The compiler must treat these two separately, so it does an explicit type comparison before co...
def _lookupName(self, name, list):
t = type(name) for i in range(len(list)): if ((t == type(list[i])) and (list[i] == name)): return i end = len(list) list.append(name) return end
'Return a tuple for the const slot of the code object Must convert references to code (MAKE_FUNCTION) to code objects recursively.'
def getConsts(self):
l = [] for elt in self.consts: if isinstance(elt, PyFlowGraph): elt = elt.getCode() l.append(elt) return tuple(l)
'Create new visitor object. If optional argument multi is not None, then print messages for each error rather than raising a SyntaxError for the first.'
def __init__(self, multi=None):
self.multi = multi self.errors = 0
'Create a new mutex -- initially unlocked.'
def __init__(self):
self.locked = False self.queue = deque()
'Test the locked bit of the mutex.'
def test(self):
return self.locked
'Atomic test-and-set -- grab the lock if it is not set, return True if it succeeded.'
def testandset(self):
if (not self.locked): self.locked = True return True else: return False
'Lock a mutex, call the function with supplied argument when it is acquired. If the mutex is already locked, place function and argument in the queue.'
def lock(self, function, argument):
if self.testandset(): function(argument) else: self.queue.append((function, argument))
'Unlock a mutex. If the queue is not empty, call the next function with its argument.'
def unlock(self):
if self.queue: (function, argument) = self.queue.popleft() function(argument) else: self.locked = False
'Handle pretty printing operations onto a stream using a set of configured parameters. indent Number of spaces to indent for each level of nesting. width Attempted maximum number of columns in the output. depth The maximum depth to print out nested structures. stream The desired output stream. If omitted (or false), t...
def __init__(self, indent=1, width=80, depth=None, stream=None):
indent = int(indent) width = int(width) assert (indent >= 0), 'indent must be >= 0' assert ((depth is None) or (depth > 0)), 'depth must be > 0' assert width, 'width must be != 0' self._depth = depth self._indent_per_level = indent self._width = width ...
'Format object for a specific context, returning a string and flags indicating whether the representation is \'readable\' and whether the object represents a recursive construct.'
def format(self, object, context, maxlevels, level):
return _safe_repr(object, context, maxlevels, level)
'Run the callback unless it has already been called or cancelled'
def __call__(self, wr=None):
try: del _finalizer_registry[self._key] except KeyError: sub_debug('finalizer no longer registered') else: sub_debug('finalizer calling %s with args %s and kwargs %s', self._callback, self._args, self._kwargs) res = self._callback(*self._args,...
'Cancel finalization of the object'
def cancel(self):
try: del _finalizer_registry[self._key] except KeyError: pass else: self._weakref = self._callback = self._args = self._kwargs = self._key = None
'Return whether this finalizer is still waiting to invoke callback'
def still_active(self):
return (self._key in _finalizer_registry)
'Method to be run in sub-process; can be overridden in sub-class'
def run(self):
if self._target: self._target(*self._args, **self._kwargs)
'Start child process'
def start(self):
assert (self._popen is None), 'cannot start a process twice' assert (self._parent_pid == os.getpid()), 'can only start a process object created by current process' assert (not _current_process._daemonic), 'daemonic processes are not allowed to have ...
'Terminate process; sends SIGTERM signal or uses TerminateProcess()'
def terminate(self):
self._popen.terminate()
'Wait until child process terminates'
def join(self, timeout=None):
assert (self._parent_pid == os.getpid()), 'can only join a child process' assert (self._popen is not None), 'can only join a started process' res = self._popen.wait(timeout) if (res is not None): _current_process._children.discard(self)
'Return whether process is alive'
def is_alive(self):
if (self is _current_process): return True assert (self._parent_pid == os.getpid()), 'can only test a child process' if (self._popen is None): return False self._popen.poll() return (self._popen.returncode is None)
'Return whether process is a daemon'
@property def daemon(self):
return self._daemonic
'Set whether process is a daemon'
@daemon.setter def daemon(self, daemonic):
assert (self._popen is None), 'process has already started' self._daemonic = daemonic
'Set authorization key of process'
@authkey.setter def authkey(self, authkey):
self._authkey = AuthenticationString(authkey)
'Return exit code of process or `None` if it has yet to stop'
@property def exitcode(self):
if (self._popen is None): return self._popen return self._popen.poll()
'Return identifier (PID) of process or `None` if it has yet to start'
@property def ident(self):
if (self is _current_process): return os.getpid() else: return (self._popen and self._popen.pid)
'Accept a connection on the bound socket or named pipe of `self`. Returns a `Connection` object.'
def accept(self):
c = self._listener.accept() if self._authkey: deliver_challenge(c, self._authkey) answer_challenge(c, self._authkey) return c
'Close the bound socket or named pipe of `self`.'
def close(self):
return self._listener.close()
'Run the server forever'
def serve_forever(self):
current_process()._manager_server = self try: while 1: try: c = self.listener.accept() except (OSError, IOError): continue t = threading.Thread(target=self.handle_request, args=(c,)) t.daemon = True t.start() ...
'Handle a new connection'
def handle_request(self, c):
funcname = result = request = None try: connection.deliver_challenge(c, self.authkey) connection.answer_challenge(c, self.authkey) request = c.recv() (ignore, funcname, args, kwds) = request assert (funcname in self.public), ('%r unrecognized' % funcname) func ...
'Handle requests from the proxies in a particular process/thread'
def serve_client(self, conn):
util.debug('starting server thread to service %r', threading.current_thread().name) recv = conn.recv send = conn.send id_to_obj = self.id_to_obj while (not self.stop): try: methodname = obj = None request = recv() (ident, methodname, args, k...
'Return some info --- useful to spot problems with refcounting'
def debug_info(self, c):
self.mutex.acquire() try: result = [] keys = self.id_to_obj.keys() keys.sort() for ident in keys: if (ident != '0'): result.append((' %s: refcount=%s\n %s' % (ident, self.id_to_refcount[ident], str(self....
'Number of shared objects'
def number_of_objects(self, c):
return (len(self.id_to_obj) - 1)
'Shutdown this process'
def shutdown(self, c):
try: util.debug('manager received shutdown message') c.send(('#RETURN', None)) if (sys.stdout != sys.__stdout__): util.debug('resetting stdout, stderr') sys.stdout = sys.__stdout__ sys.stderr = sys.__stderr__ util._run_finalizers(0) ...
'Create a new shared object and return its id'
def create(self, c, typeid, *args, **kwds):
self.mutex.acquire() try: (callable, exposed, method_to_typeid, proxytype) = self.registry[typeid] if (callable is None): assert ((len(args) == 1) and (not kwds)) obj = args[0] else: obj = callable(*args, **kwds) if (exposed is None): ...
'Return the methods of the shared object indicated by token'
def get_methods(self, c, token):
return tuple(self.id_to_obj[token.id][1])
'Spawn a new thread to serve this connection'
def accept_connection(self, c, name):
threading.current_thread().name = name c.send(('#RETURN', None)) self.serve_client(c)
'Return server object with serve_forever() method and address attribute'
def get_server(self):
assert (self._state.value == State.INITIAL) return Server(self._registry, self._address, self._authkey, self._serializer)
'Connect manager object to the server process'
def connect(self):
(Listener, Client) = listener_client[self._serializer] conn = Client(self._address, authkey=self._authkey) dispatch(conn, None, 'dummy') self._state.value = State.STARTED
'Spawn a server process for this manager object'
def start(self, initializer=None, initargs=()):
assert (self._state.value == State.INITIAL) if ((initializer is not None) and (not hasattr(initializer, '__call__'))): raise TypeError('initializer must be a callable') (reader, writer) = connection.Pipe(duplex=False) self._process = Process(target=type(self)._run_server, args=(self....
'Create a server, report its address and run it'
@classmethod def _run_server(cls, registry, address, authkey, serializer, writer, initializer=None, initargs=()):
if (initializer is not None): initializer(*initargs) server = cls._Server(registry, address, authkey, serializer) writer.send(server.address) writer.close() util.info('manager serving at %r', server.address) server.serve_forever()
'Create a new shared object; return the token and exposed tuple'
def _create(self, typeid, *args, **kwds):
assert (self._state.value == State.STARTED), 'server not yet started' conn = self._Client(self._address, authkey=self._authkey) try: (id, exposed) = dispatch(conn, None, 'create', ((typeid,) + args), kwds) finally: conn.close() return (Token(typeid, self._address, id), expos...
'Join the manager process (if it has been spawned)'
def join(self, timeout=None):
self._process.join(timeout)
'Return some info about the servers shared objects and connections'
def _debug_info(self):
conn = self._Client(self._address, authkey=self._authkey) try: return dispatch(conn, None, 'debug_info') finally: conn.close()
'Return the number of shared objects'
def _number_of_objects(self):
conn = self._Client(self._address, authkey=self._authkey) try: return dispatch(conn, None, 'number_of_objects') finally: conn.close()
'Shutdown the manager process; will be registered as a finalizer'
@staticmethod def _finalize_manager(process, address, authkey, state, _Client):
if process.is_alive(): util.info('sending shutdown message to manager') try: conn = _Client(address, authkey=authkey) try: dispatch(conn, None, 'shutdown') finally: conn.close() except Exception: pass...
'Register a typeid with the manager type'
@classmethod def register(cls, typeid, callable=None, proxytype=None, exposed=None, method_to_typeid=None, create_method=True):
if ('_registry' not in cls.__dict__): cls._registry = cls._registry.copy() if (proxytype is None): proxytype = AutoProxy exposed = (exposed or getattr(proxytype, '_exposed_', None)) method_to_typeid = (method_to_typeid or getattr(proxytype, '_method_to_typeid_', None)) if method_to_t...
'Try to call a method of the referrent and return a copy of the result'
def _callmethod(self, methodname, args=(), kwds={}):
try: conn = self._tls.connection except AttributeError: util.debug('thread %r does not own a connection', threading.current_thread().name) self._connect() conn = self._tls.connection conn.send((self._id, methodname, args, kwds)) (kind, result) = conn.rec...
'Get a copy of the value of the referent'
def _getvalue(self):
return self._callmethod('#GETVALUE')
'Return representation of the referent (or a fall-back if that fails)'
def __str__(self):
try: return self._callmethod('__repr__') except Exception: return (repr(self)[:(-1)] + "; '__str__()' failed>")
'Cleanup after any worker processes which have exited due to reaching their specified lifetime. Returns True if any workers were cleaned up.'
def _join_exited_workers(self):
cleaned = False for i in reversed(range(len(self._pool))): worker = self._pool[i] if (worker.exitcode is not None): debug(('cleaning up worker %d' % i)) worker.join() cleaned = True del self._pool[i] return cleaned
'Bring the number of pool processes up to the specified number, for use after reaping workers which have exited.'
def _repopulate_pool(self):
for i in range((self._processes - len(self._pool))): w = self.Process(target=worker, args=(self._inqueue, self._outqueue, self._initializer, self._initargs, self._maxtasksperchild)) self._pool.append(w) w.name = w.name.replace('Process', 'PoolWorker') w.daemon = True w.start(...
'Clean up any exited workers and start replacements for them.'
def _maintain_pool(self):
if self._join_exited_workers(): self._repopulate_pool()
'Equivalent of `apply()` builtin'
def apply(self, func, args=(), kwds={}):
assert (self._state == RUN) return self.apply_async(func, args, kwds).get()
'Equivalent of `map()` builtin'
def map(self, func, iterable, chunksize=None):
assert (self._state == RUN) return self.map_async(func, iterable, chunksize).get()
'Equivalent of `itertools.imap()` -- can be MUCH slower than `Pool.map()`'
def imap(self, func, iterable, chunksize=1):
assert (self._state == RUN) if (chunksize == 1): result = IMapIterator(self._cache) self._taskqueue.put((((result._job, i, func, (x,), {}) for (i, x) in enumerate(iterable)), result._set_length)) return result else: assert (chunksize > 1) task_batches = Pool._get_task...
'Like `imap()` method but ordering of results is arbitrary'
def imap_unordered(self, func, iterable, chunksize=1):
assert (self._state == RUN) if (chunksize == 1): result = IMapUnorderedIterator(self._cache) self._taskqueue.put((((result._job, i, func, (x,), {}) for (i, x) in enumerate(iterable)), result._set_length)) return result else: assert (chunksize > 1) task_batches = Pool....
'Asynchronous equivalent of `apply()` builtin'
def apply_async(self, func, args=(), kwds={}, callback=None):
assert (self._state == RUN) result = ApplyResult(self._cache, callback) self._taskqueue.put(([(result._job, None, func, args, kwds)], None)) return result
'Asynchronous equivalent of `map()` builtin'
def map_async(self, func, iterable, chunksize=None, callback=None):
assert (self._state == RUN) if (not hasattr(iterable, '__len__')): iterable = list(iterable) if (chunksize is None): (chunksize, extra) = divmod(len(iterable), (len(self._pool) * 4)) if extra: chunksize += 1 if (len(iterable) == 0): chunksize = 0 task_batc...
'Initialize and reset this instance.'
def __init__(self):
self.reset()
'Reset this instance. Loses all unprocessed data.'
def reset(self):
self.rawdata = '' self.lasttag = '???' self.interesting = interesting_normal markupbase.ParserBase.reset(self)
'Feed data to the parser. Call this as often as you want, with as little or as much text as you want (may include \'\n\').'
def feed(self, data):
self.rawdata = (self.rawdata + data) self.goahead(0)
'Handle any buffered data.'
def close(self):
self.goahead(1)
'Return full source of start tag: \'<...>\'.'
def get_starttag_text(self):
return self.__starttag_text
'Add a header to be used by the HTTP interface only e.g. u.addheader(\'Accept\', \'sound/basic\')'
def addheader(self, *args):
self.addheaders.append(args)
'Use URLopener().open(file) instead of open(file, \'r\').'
def open(self, fullurl, data=None):
fullurl = unwrap(toBytes(fullurl)) fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|") if (self.tempcache and (fullurl in self.tempcache)): (filename, headers) = self.tempcache[fullurl] fp = open(filename, 'rb') return addinfourl(fp, headers, fullurl) (urltype, url) = splittyp...
'Overridable interface to open unknown URL type.'
def open_unknown(self, fullurl, data=None):
(type, url) = splittype(fullurl) raise IOError, ('url error', 'unknown url type', type)
'Overridable interface to open unknown URL type.'
def open_unknown_proxy(self, proxy, fullurl, data=None):
(type, url) = splittype(fullurl) raise IOError, ('url error', ('invalid proxy for %s' % type), proxy)
'retrieve(url) returns (filename, headers) for a local object or (tempfilename, headers) for a remote object.'
def retrieve(self, url, filename=None, reporthook=None, data=None):
url = unwrap(toBytes(url)) if (self.tempcache and (url in self.tempcache)): return self.tempcache[url] (type, url1) = splittype(url) if ((filename is None) and ((not type) or (type == 'file'))): try: fp = self.open_local_file(url1) hdrs = fp.info() fp....