_id stringlengths 2 7 | title stringlengths 1 88 | partition stringclasses 3 values | text stringlengths 75 19.8k | language stringclasses 1 value | meta_information dict |
|---|---|---|---|---|---|
q31000 | are_exclusive | train | def are_exclusive(
stmt1, stmt2, exceptions=None
): # pylint: disable=redefined-outer-name
"""return true if the two given statements are mutually exclusive
`exceptions` may be a list of exception names. If specified, discard If
branches and check one of the statement is in an exception handler catching
one of the given exceptions.
algorithm :
1) index stmt1's parents
2) climb among stmt2's parents until we find a common parent
3) if the common parent is a If or TryExcept statement, look if nodes are
in exclusive branches
"""
# index stmt1's parents
stmt1_parents = {}
children = {}
node = stmt1.parent
previous = stmt1
while node:
stmt1_parents[node] = 1
children[node] = previous
previous = node
node = node.parent
# climb among stmt2's parents until we find a common parent
node = stmt2.parent
previous = stmt2
while node:
if node in stmt1_parents:
# if the common parent is a If or TryExcept statement, look if
# nodes are in exclusive branches
if isinstance(node, If) and exceptions is None:
if (
node.locate_child(previous)[1]
is not node.locate_child(children[node])[1]
):
return True
elif isinstance(node, TryExcept):
c2attr, c2node = node.locate_child(previous)
c1attr, c1node = node.locate_child(children[node])
if c1node is not c2node:
first_in_body_caught_by_handlers = (
c2attr == "handlers"
and c1attr == "body"
and previous.catch(exceptions)
)
second_in_body_caught_by_handlers = (
c2attr == "body"
and c1attr == "handlers"
and children[node].catch(exceptions)
)
first_in_else_other_in_handlers = (
c2attr == "handlers" and c1attr == "orelse"
)
second_in_else_other_in_handlers = (
c2attr == "orelse" and c1attr == "handlers"
)
if any(
(
first_in_body_caught_by_handlers,
second_in_body_caught_by_handlers,
first_in_else_other_in_handlers,
second_in_else_other_in_handlers,
)
):
return True
elif c2attr == "handlers" and c1attr == "handlers":
return previous is not children[node]
return False
previous = node
node = node.parent
return False | python | {
"resource": ""
} |
q31001 | _slice_value | train | def _slice_value(index, context=None):
"""Get the value of the given slice index."""
if isinstance(index, Const):
if isinstance(index.value, (int, type(None))):
return index.value
elif index is None:
return None
else:
# Try to infer what the index actually is.
# Since we can't return all the possible values,
# we'll stop at the first possible value.
try:
inferred = next(index.infer(context=context))
except exceptions.InferenceError:
pass
else:
if isinstance(inferred, Const):
if isinstance(inferred.value, (int, type(None))):
return inferred.value
# Use a sentinel, because None can be a valid
# value that this function can return,
# as it is the case for unspecified bounds.
return _SLICE_SENTINEL | python | {
"resource": ""
} |
q31002 | _update_const_classes | train | def _update_const_classes():
"""update constant classes, so the keys of CONST_CLS can be reused"""
klasses = (bool, int, float, complex, str, bytes)
for kls in klasses:
CONST_CLS[kls] = Const | python | {
"resource": ""
} |
q31003 | const_factory | train | def const_factory(value):
"""return an astroid node for a python value"""
# XXX we should probably be stricter here and only consider stuff in
# CONST_CLS or do better treatment: in case where value is not in CONST_CLS,
# we should rather recall the builder on this value than returning an empty
# node (another option being that const_factory shouldn't be called with something
# not in CONST_CLS)
assert not isinstance(value, NodeNG)
# Hack for ignoring elements of a sequence
# or a mapping, in order to avoid transforming
# each element to an AST. This is fixed in 2.0
# and this approach is a temporary hack.
if isinstance(value, (list, set, tuple, dict)):
elts = []
else:
elts = value
try:
initializer_cls = CONST_CLS[value.__class__]
initializer = _CONST_CLS_CONSTRUCTORS[initializer_cls]
return initializer(initializer_cls, elts)
except (KeyError, AttributeError):
node = EmptyNode()
node.object = value
return node | python | {
"resource": ""
} |
q31004 | is_from_decorator | train | def is_from_decorator(node):
"""Return True if the given node is the child of a decorator"""
parent = node.parent
while parent is not None:
if isinstance(parent, Decorators):
return True
parent = parent.parent
return False | python | {
"resource": ""
} |
q31005 | NodeNG.infer | train | def infer(self, context=None, **kwargs):
"""Get a generator of the inferred values.
This is the main entry point to the inference system.
.. seealso:: :ref:`inference`
If the instance has some explicit inference function set, it will be
called instead of the default interface.
:returns: The inferred values.
:rtype: iterable
"""
if context is not None:
context = context.extra_context.get(self, context)
if self._explicit_inference is not None:
# explicit_inference is not bound, give it self explicitly
try:
# pylint: disable=not-callable
return self._explicit_inference(self, context, **kwargs)
except exceptions.UseInferenceDefault:
pass
if not context:
return self._infer(context, **kwargs)
key = (self, context.lookupname, context.callcontext, context.boundnode)
if key in context.inferred:
return iter(context.inferred[key])
gen = context.cache_generator(key, self._infer(context, **kwargs))
return util.limit_inference(gen, MANAGER.max_inferable_values) | python | {
"resource": ""
} |
q31006 | NodeNG._repr_name | train | def _repr_name(self):
"""Get a name for nice representation.
This is either :attr:`name`, :attr:`attrname`, or the empty string.
:returns: The nice name.
:rtype: str
"""
names = {"name", "attrname"}
if all(name not in self._astroid_fields for name in names):
return getattr(self, "name", getattr(self, "attrname", ""))
return "" | python | {
"resource": ""
} |
q31007 | NodeNG.accept | train | def accept(self, visitor):
"""Visit this node using the given visitor."""
func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
return func(self) | python | {
"resource": ""
} |
q31008 | NodeNG.parent_of | train | def parent_of(self, node):
"""Check if this node is the parent of the given node.
:param node: The node to check if it is the child.
:type node: NodeNG
:returns: True if this node is the parent of the given node,
False otherwise.
:rtype: bool
"""
parent = node.parent
while parent is not None:
if self is parent:
return True
parent = parent.parent
return False | python | {
"resource": ""
} |
q31009 | NodeNG.child_sequence | train | def child_sequence(self, child):
"""Search for the sequence that contains this child.
:param child: The child node to search sequences for.
:type child: NodeNG
:returns: The sequence containing the given child node.
:rtype: iterable(NodeNG)
:raises AstroidError: If no sequence could be found that contains
the given child.
"""
for field in self._astroid_fields:
node_or_sequence = getattr(self, field)
if node_or_sequence is child:
return [node_or_sequence]
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
if (
isinstance(node_or_sequence, (tuple, list))
and child in node_or_sequence
):
return node_or_sequence
msg = "Could not find %s in %s's children"
raise exceptions.AstroidError(msg % (repr(child), repr(self))) | python | {
"resource": ""
} |
q31010 | NodeNG.nearest | train | def nearest(self, nodes):
"""Get the node closest to this one from the given list of nodes.
:param nodes: The list of nodes to search. All of these nodes must
belong to the same module as this one. The list should be
sorted by the line number of the nodes, smallest first.
:type nodes: iterable(NodeNG)
:returns: The node closest to this one in the source code,
or None if one could not be found.
:rtype: NodeNG or None
"""
myroot = self.root()
mylineno = self.fromlineno
nearest = None, 0
for node in nodes:
assert node.root() is myroot, (
"nodes %s and %s are not from the same module" % (self, node)
)
lineno = node.fromlineno
if node.fromlineno > mylineno:
break
if lineno > nearest[1]:
nearest = node, lineno
# FIXME: raise an exception if nearest is None ?
return nearest[0] | python | {
"resource": ""
} |
q31011 | NodeNG.tolineno | train | def tolineno(self):
"""The last line that this node appears on in the source code.
:type: int or None
"""
if not self._astroid_fields:
# can't have children
lastchild = None
else:
lastchild = self.last_child()
if lastchild is None:
return self.fromlineno
return lastchild.tolineno | python | {
"resource": ""
} |
q31012 | NodeNG._fixed_source_line | train | def _fixed_source_line(self):
"""Attempt to find the line that this node appears on.
We need this method since not all nodes have :attr:`lineno` set.
:returns: The line number of this node,
or None if this could not be determined.
:rtype: int or None
"""
line = self.lineno
_node = self
try:
while line is None:
_node = next(_node.get_children())
line = _node.lineno
except StopIteration:
_node = self.parent
while _node and line is None:
line = _node.lineno
_node = _node.parent
return line | python | {
"resource": ""
} |
q31013 | Statement.next_sibling | train | def next_sibling(self):
"""The next sibling statement node.
:returns: The next sibling statement node.
:rtype: NodeNG or None
"""
stmts = self.parent.child_sequence(self)
index = stmts.index(self)
try:
return stmts[index + 1]
except IndexError:
pass | python | {
"resource": ""
} |
q31014 | Statement.previous_sibling | train | def previous_sibling(self):
"""The previous sibling statement.
:returns: The previous sibling statement node.
:rtype: NodeNG or None
"""
stmts = self.parent.child_sequence(self)
index = stmts.index(self)
if index >= 1:
return stmts[index - 1]
return None | python | {
"resource": ""
} |
q31015 | _BaseContainer.from_elements | train | def from_elements(cls, elts=None):
"""Create a node of this type from the given list of elements.
:param elts: The list of elements that the node should contain.
:type elts: list(NodeNG)
:returns: A new node containing the given elements.
:rtype: NodeNG
"""
node = cls()
if elts is None:
node.elts = []
else:
node.elts = [const_factory(e) if _is_const(e) else e for e in elts]
return node | python | {
"resource": ""
} |
q31016 | LookupMixIn.ilookup | train | def ilookup(self, name):
"""Lookup the inferred values of the given variable.
:param name: The variable name to find values for.
:type name: str
:returns: The inferred values of the statements returned from
:meth:`lookup`.
:rtype: iterable
"""
frame, stmts = self.lookup(name)
context = contextmod.InferenceContext()
return bases._infer_stmts(stmts, context, frame) | python | {
"resource": ""
} |
q31017 | LookupMixIn._filter_stmts | train | def _filter_stmts(self, stmts, frame, offset):
"""Filter the given list of statements to remove ignorable statements.
If self is not a frame itself and the name is found in the inner
frame locals, statements will be filtered to remove ignorable
statements according to self's location.
:param stmts: The statements to filter.
:type stmts: list(NodeNG)
:param frame: The frame that all of the given statements belong to.
:type frame: NodeNG
:param offset: The line offset to filter statements up to.
:type offset: int
:returns: The filtered statements.
:rtype: list(NodeNG)
"""
# if offset == -1, my actual frame is not the inner frame but its parent
#
# class A(B): pass
#
# we need this to resolve B correctly
if offset == -1:
myframe = self.frame().parent.frame()
else:
myframe = self.frame()
# If the frame of this node is the same as the statement
# of this node, then the node is part of a class or
# a function definition and the frame of this node should be the
# the upper frame, not the frame of the definition.
# For more information why this is important,
# see Pylint issue #295.
# For example, for 'b', the statement is the same
# as the frame / scope:
#
# def test(b=1):
# ...
if self.statement() is myframe and myframe.parent:
myframe = myframe.parent.frame()
mystmt = self.statement()
# line filtering if we are in the same frame
#
# take care node may be missing lineno information (this is the case for
# nodes inserted for living objects)
if myframe is frame and mystmt.fromlineno is not None:
assert mystmt.fromlineno is not None, mystmt
mylineno = mystmt.fromlineno + offset
else:
# disabling lineno filtering
mylineno = 0
_stmts = []
_stmt_parents = []
statements = self._get_filtered_node_statements(stmts)
for node, stmt in statements:
# line filtering is on and we have reached our location, break
if stmt.fromlineno > mylineno > 0:
break
# Ignore decorators with the same name as the
# decorated function
# Fixes issue #375
if mystmt is stmt and is_from_decorator(self):
continue
assert hasattr(node, "assign_type"), (
node,
node.scope(),
node.scope().locals,
)
assign_type = node.assign_type()
if node.has_base(self):
break
_stmts, done = assign_type._get_filtered_stmts(self, node, _stmts, mystmt)
if done:
break
optional_assign = assign_type.optional_assign
if optional_assign and assign_type.parent_of(self):
# we are inside a loop, loop var assignment is hiding previous
# assignment
_stmts = [node]
_stmt_parents = [stmt.parent]
continue
# XXX comment various branches below!!!
try:
pindex = _stmt_parents.index(stmt.parent)
except ValueError:
pass
else:
# we got a parent index, this means the currently visited node
# is at the same block level as a previously visited node
if _stmts[pindex].assign_type().parent_of(assign_type):
# both statements are not at the same block level
continue
# if currently visited node is following previously considered
# assignment and both are not exclusive, we can drop the
# previous one. For instance in the following code ::
#
# if a:
# x = 1
# else:
# x = 2
# print x
#
# we can't remove neither x = 1 nor x = 2 when looking for 'x'
# of 'print x'; while in the following ::
#
# x = 1
# x = 2
# print x
#
# we can remove x = 1 when we see x = 2
#
# moreover, on loop assignment types, assignment won't
# necessarily be done if the loop has no iteration, so we don't
# want to clear previous assignments if any (hence the test on
# optional_assign)
if not (optional_assign or are_exclusive(_stmts[pindex], node)):
if (
# In case of partial function node, if the statement is different
# from the origin function then it can be deleted otherwise it should
# remain to be able to correctly infer the call to origin function.
not node.is_function
or node.qname() != "PartialFunction"
or node.name != _stmts[pindex].name
):
del _stmt_parents[pindex]
del _stmts[pindex]
if isinstance(node, AssignName):
if not optional_assign and stmt.parent is mystmt.parent:
_stmts = []
_stmt_parents = []
elif isinstance(node, DelName):
_stmts = []
_stmt_parents = []
continue
if not are_exclusive(self, node):
_stmts.append(node)
_stmt_parents.append(stmt.parent)
return _stmts | python | {
"resource": ""
} |
q31018 | Arguments.format_args | train | def format_args(self):
"""Get the arguments formatted as string.
:returns: The formatted arguments.
:rtype: str
"""
result = []
if self.args:
result.append(
_format_args(
self.args, self.defaults, getattr(self, "annotations", None)
)
)
if self.vararg:
result.append("*%s" % self.vararg)
if self.kwonlyargs:
if not self.vararg:
result.append("*")
result.append(
_format_args(
self.kwonlyargs, self.kw_defaults, self.kwonlyargs_annotations
)
)
if self.kwarg:
result.append("**%s" % self.kwarg)
return ", ".join(result) | python | {
"resource": ""
} |
q31019 | Arguments.default_value | train | def default_value(self, argname):
"""Get the default value for an argument.
:param argname: The name of the argument to get the default value for.
:type argname: str
:raises NoDefault: If there is no default value defined for the
given argument.
"""
i = _find_arg(argname, self.args)[0]
if i is not None:
idx = i - (len(self.args) - len(self.defaults))
if idx >= 0:
return self.defaults[idx]
i = _find_arg(argname, self.kwonlyargs)[0]
if i is not None and self.kw_defaults[i] is not None:
return self.kw_defaults[i]
raise exceptions.NoDefault(func=self.parent, name=argname) | python | {
"resource": ""
} |
q31020 | Arguments.is_argument | train | def is_argument(self, name):
"""Check if the given name is defined in the arguments.
:param name: The name to check for.
:type name: str
:returns: True if the given name is defined in the arguments,
False otherwise.
:rtype: bool
"""
if name == self.vararg:
return True
if name == self.kwarg:
return True
return (
self.find_argname(name, True)[1] is not None
or self.kwonlyargs
and _find_arg(name, self.kwonlyargs, True)[1] is not None
) | python | {
"resource": ""
} |
q31021 | Call.starargs | train | def starargs(self):
"""The positional arguments that unpack something.
:type: list(Starred)
"""
args = self.args or []
return [arg for arg in args if isinstance(arg, Starred)] | python | {
"resource": ""
} |
q31022 | Call.kwargs | train | def kwargs(self):
"""The keyword arguments that unpack something.
:type: list(Keyword)
"""
keywords = self.keywords or []
return [keyword for keyword in keywords if keyword.arg is None] | python | {
"resource": ""
} |
q31023 | Const.getitem | train | def getitem(self, index, context=None):
"""Get an item from this node if subscriptable.
:param index: The node to use as a subscript index.
:type index: Const or Slice
:raises AstroidTypeError: When the given index cannot be used as a
subscript index, or if this node is not subscriptable.
"""
if isinstance(index, Const):
index_value = index.value
elif isinstance(index, Slice):
index_value = _infer_slice(index, context=context)
else:
raise exceptions.AstroidTypeError(
"Could not use type {} as subscript index".format(type(index))
)
try:
if isinstance(self.value, (str, bytes)):
return Const(self.value[index_value])
except IndexError as exc:
raise exceptions.AstroidIndexError(
message="Index {index!r} out of range",
node=self,
index=index,
context=context,
) from exc
except TypeError as exc:
raise exceptions.AstroidTypeError(
message="Type error {error!r}", node=self, index=index, context=context
) from exc
raise exceptions.AstroidTypeError("%r (value=%s)" % (self, self.value)) | python | {
"resource": ""
} |
q31024 | ExceptHandler.blockstart_tolineno | train | def blockstart_tolineno(self):
"""The line on which the beginning of this block ends.
:type: int
"""
if self.name:
return self.name.tolineno
if self.type:
return self.type.tolineno
return self.lineno | python | {
"resource": ""
} |
q31025 | ExceptHandler.catch | train | def catch(self, exceptions): # pylint: disable=redefined-outer-name
"""Check if this node handles any of the given exceptions.
If ``exceptions`` is empty, this will default to ``True``.
:param exceptions: The name of the exceptions to check for.
:type exceptions: list(str)
"""
if self.type is None or exceptions is None:
return True
for node in self.type._get_name_nodes():
if node.name in exceptions:
return True
return False | python | {
"resource": ""
} |
q31026 | Slice._wrap_attribute | train | def _wrap_attribute(self, attr):
"""Wrap the empty attributes of the Slice in a Const node."""
if not attr:
const = const_factory(attr)
const.parent = self
return const
return attr | python | {
"resource": ""
} |
q31027 | Slice.igetattr | train | def igetattr(self, attrname, context=None):
"""Infer the possible values of the given attribute on the slice.
:param attrname: The name of the attribute to infer.
:type attrname: str
:returns: The inferred possible values.
:rtype: iterable(NodeNG)
"""
if attrname == "start":
yield self._wrap_attribute(self.lower)
elif attrname == "stop":
yield self._wrap_attribute(self.upper)
elif attrname == "step":
yield self._wrap_attribute(self.step)
else:
yield from self.getattr(attrname, context=context) | python | {
"resource": ""
} |
q31028 | _gi_build_stub | train | def _gi_build_stub(parent):
"""
Inspect the passed module recursively and build stubs for functions,
classes, etc.
"""
classes = {}
functions = {}
constants = {}
methods = {}
for name in dir(parent):
if name.startswith("__"):
continue
# Check if this is a valid name in python
if not re.match(_identifier_re, name):
continue
try:
obj = getattr(parent, name)
except:
continue
if inspect.isclass(obj):
classes[name] = obj
elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
functions[name] = obj
elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
methods[name] = obj
elif (
str(obj).startswith("<flags")
or str(obj).startswith("<enum ")
or str(obj).startswith("<GType ")
or inspect.isdatadescriptor(obj)
):
constants[name] = 0
elif isinstance(obj, (int, str)):
constants[name] = obj
elif callable(obj):
# Fall back to a function for anything callable
functions[name] = obj
else:
# Assume everything else is some manner of constant
constants[name] = 0
ret = ""
if constants:
ret += "# %s constants\n\n" % parent.__name__
for name in sorted(constants):
if name[0].isdigit():
# GDK has some busted constant names like
# Gdk.EventType.2BUTTON_PRESS
continue
val = constants[name]
strval = str(val)
if isinstance(val, str):
strval = '"%s"' % str(val).replace("\\", "\\\\")
ret += "%s = %s\n" % (name, strval)
if ret:
ret += "\n\n"
if functions:
ret += "# %s functions\n\n" % parent.__name__
for name in sorted(functions):
ret += "def %s(*args, **kwargs):\n" % name
ret += " pass\n"
if ret:
ret += "\n\n"
if methods:
ret += "# %s methods\n\n" % parent.__name__
for name in sorted(methods):
ret += "def %s(self, *args, **kwargs):\n" % name
ret += " pass\n"
if ret:
ret += "\n\n"
if classes:
ret += "# %s classes\n\n" % parent.__name__
for name, obj in sorted(classes.items()):
base = "object"
if issubclass(obj, Exception):
base = "Exception"
ret += "class %s(%s):\n" % (name, base)
classret = _gi_build_stub(obj)
if not classret:
classret = "pass\n"
for line in classret.splitlines():
ret += " " + line + "\n"
ret += "\n"
return ret | python | {
"resource": ""
} |
q31029 | object_type | train | def object_type(node, context=None):
"""Obtain the type of the given node
This is used to implement the ``type`` builtin, which means that it's
used for inferring type calls, as well as used in a couple of other places
in the inference.
The node will be inferred first, so this function can support all
sorts of objects, as long as they support inference.
"""
try:
types = set(_object_type(node, context))
except exceptions.InferenceError:
return util.Uninferable
if len(types) > 1 or not types:
return util.Uninferable
return list(types)[0] | python | {
"resource": ""
} |
q31030 | object_isinstance | train | def object_isinstance(node, class_or_seq, context=None):
"""Check if a node 'isinstance' any node in class_or_seq
:param node: A given node
:param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]]
:rtype: bool
:raises AstroidTypeError: if the given ``classes_or_seq`` are not types
"""
obj_type = object_type(node, context)
if obj_type is util.Uninferable:
return util.Uninferable
return _object_type_is_subclass(obj_type, class_or_seq, context=context) | python | {
"resource": ""
} |
q31031 | object_issubclass | train | def object_issubclass(node, class_or_seq, context=None):
"""Check if a type is a subclass of any node in class_or_seq
:param node: A given node
:param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]]
:rtype: bool
:raises AstroidTypeError: if the given ``classes_or_seq`` are not types
:raises AstroidError: if the type of the given node cannot be inferred
or its type's mro doesn't work
"""
if not isinstance(node, nodes.ClassDef):
raise TypeError("{node} needs to be a ClassDef node".format(node=node))
return _object_type_is_subclass(node, class_or_seq, context=context) | python | {
"resource": ""
} |
q31032 | safe_infer | train | def safe_infer(node, context=None):
"""Return the inferred value for the given node.
Return None if inference failed or if there is some ambiguity (more than
one node has been inferred).
"""
try:
inferit = node.infer(context=context)
value = next(inferit)
except exceptions.InferenceError:
return None
try:
next(inferit)
return None # None if there is ambiguity on the inferred node
except exceptions.InferenceError:
return None # there is some kind of ambiguity
except StopIteration:
return value | python | {
"resource": ""
} |
q31033 | has_known_bases | train | def has_known_bases(klass, context=None):
"""Return true if all base classes of a class could be inferred."""
try:
return klass._all_bases_known
except AttributeError:
pass
for base in klass.bases:
result = safe_infer(base, context=context)
# TODO: check for A->B->A->B pattern in class structure too?
if (
not isinstance(result, scoped_nodes.ClassDef)
or result is klass
or not has_known_bases(result, context=context)
):
klass._all_bases_known = False
return False
klass._all_bases_known = True
return True | python | {
"resource": ""
} |
q31034 | class_instance_as_index | train | def class_instance_as_index(node):
"""Get the value as an index for the given instance.
If an instance provides an __index__ method, then it can
be used in some scenarios where an integer is expected,
for instance when multiplying or subscripting a list.
"""
context = contextmod.InferenceContext()
context.callcontext = contextmod.CallContext(args=[node])
try:
for inferred in node.igetattr("__index__", context=context):
if not isinstance(inferred, bases.BoundMethod):
continue
for result in inferred.infer_call_result(node, context=context):
if isinstance(result, nodes.Const) and isinstance(result.value, int):
return result
except exceptions.InferenceError:
pass
return None | python | {
"resource": ""
} |
q31035 | object_len | train | def object_len(node, context=None):
"""Infer length of given node object
:param Union[nodes.ClassDef, nodes.Instance] node:
:param node: Node to infer length of
:raises AstroidTypeError: If an invalid node is returned
from __len__ method or no __len__ method exists
:raises InferenceError: If the given node cannot be inferred
or if multiple nodes are inferred
:rtype int: Integer length of node
"""
from astroid.objects import FrozenSet
inferred_node = safe_infer(node, context=context)
if inferred_node is None or inferred_node is util.Uninferable:
raise exceptions.InferenceError(node=node)
if isinstance(inferred_node, nodes.Const) and isinstance(
inferred_node.value, (bytes, str)
):
return len(inferred_node.value)
if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)):
return len(inferred_node.elts)
if isinstance(inferred_node, nodes.Dict):
return len(inferred_node.items)
try:
node_type = object_type(inferred_node, context=context)
len_call = next(node_type.igetattr("__len__", context=context))
except exceptions.AttributeInferenceError:
raise exceptions.AstroidTypeError(
"object of type '{}' has no len()".format(len_call.pytype())
)
result_of_len = next(len_call.infer_call_result(node, context))
if (
isinstance(result_of_len, nodes.Const)
and result_of_len.pytype() == "builtins.int"
):
return result_of_len.value
raise exceptions.AstroidTypeError(
"'{}' object cannot be interpreted as an integer".format(result_of_len)
) | python | {
"resource": ""
} |
q31036 | _indent | train | def _indent(text, prefix, predicate=None):
"""Adds 'prefix' to the beginning of selected lines in 'text'.
If 'predicate' is provided, 'prefix' will only be added to the lines
where 'predicate(line)' is True. If 'predicate' is not provided,
it will default to adding 'prefix' to all non-empty lines that do not
consist solely of whitespace characters.
"""
if predicate is None:
predicate = lambda line: line.strip()
def prefixed_lines():
for line in text.splitlines(True):
yield prefix + line if predicate(line) else line
return "".join(prefixed_lines()) | python | {
"resource": ""
} |
q31037 | _six_fail_hook | train | def _six_fail_hook(modname):
"""Fix six.moves imports due to the dynamic nature of this
class.
Construct a pseudo-module which contains all the necessary imports
for six
:param modname: Name of failed module
:type modname: str
:return: An astroid module
:rtype: nodes.Module
"""
attribute_of = modname != "six.moves" and modname.startswith("six.moves")
if modname != "six.moves" and not attribute_of:
raise AstroidBuildingError(modname=modname)
module = AstroidBuilder(MANAGER).string_build(_IMPORTS)
module.name = "six.moves"
if attribute_of:
# Facilitate import of submodules in Moves
start_index = len(module.name)
attribute = modname[start_index:].lstrip(".").replace(".", "_")
try:
import_attr = module.getattr(attribute)[0]
except AttributeInferenceError:
raise AstroidBuildingError(modname=modname)
if isinstance(import_attr, nodes.Import):
submodule = MANAGER.ast_from_module_name(import_attr.names[0][0])
return submodule
# Let dummy submodule imports pass through
# This will cause an Uninferable result, which is okay
return module | python | {
"resource": ""
} |
q31038 | find_spec | train | def find_spec(modpath, path=None):
"""Find a spec for the given module.
:type modpath: list or tuple
:param modpath:
split module's name (i.e name of a module or package split
on '.'), with leading empty strings for explicit relative import
:type path: list or None
:param path:
optional list of path where the module or package should be
searched (use sys.path if nothing or None is given)
:rtype: ModuleSpec
:return: A module spec, which describes how the module was
found and where.
"""
_path = path or sys.path
# Need a copy for not mutating the argument.
modpath = modpath[:]
submodule_path = None
module_parts = modpath[:]
processed = []
while modpath:
modname = modpath.pop(0)
finder, spec = _find_spec_with_path(
_path, modname, module_parts, processed, submodule_path or path
)
processed.append(modname)
if modpath:
submodule_path = finder.contribute_to_path(spec, processed)
if spec.type == ModuleType.PKG_DIRECTORY:
spec = spec._replace(submodule_search_locations=submodule_path)
return spec | python | {
"resource": ""
} |
q31039 | Super.super_mro | train | def super_mro(self):
"""Get the MRO which will be used to lookup attributes in this super."""
if not isinstance(self.mro_pointer, scoped_nodes.ClassDef):
raise exceptions.SuperError(
"The first argument to super must be a subtype of "
"type, not {mro_pointer}.",
super_=self,
)
if isinstance(self.type, scoped_nodes.ClassDef):
# `super(type, type)`, most likely in a class method.
self._class_based = True
mro_type = self.type
else:
mro_type = getattr(self.type, "_proxied", None)
if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)):
raise exceptions.SuperError(
"The second argument to super must be an "
"instance or subtype of type, not {type}.",
super_=self,
)
if not mro_type.newstyle:
raise exceptions.SuperError(
"Unable to call super on old-style classes.", super_=self
)
mro = mro_type.mro()
if self.mro_pointer not in mro:
raise exceptions.SuperError(
"The second argument to super must be an "
"instance or subtype of type, not {type}.",
super_=self,
)
index = mro.index(self.mro_pointer)
return mro[index + 1 :] | python | {
"resource": ""
} |
q31040 | Super.igetattr | train | def igetattr(self, name, context=None):
"""Retrieve the inferred values of the given attribute name."""
if name in self.special_attributes:
yield self.special_attributes.lookup(name)
return
try:
mro = self.super_mro()
# Don't let invalid MROs or invalid super calls
# leak out as is from this function.
except exceptions.SuperError as exc:
raise exceptions.AttributeInferenceError(
(
"Lookup for {name} on {target!r} because super call {super!r} "
"is invalid."
),
target=self,
attribute=name,
context=context,
super_=exc.super_,
) from exc
except exceptions.MroError as exc:
raise exceptions.AttributeInferenceError(
(
"Lookup for {name} on {target!r} failed because {cls!r} has an "
"invalid MRO."
),
target=self,
attribute=name,
context=context,
mros=exc.mros,
cls=exc.cls,
) from exc
found = False
for cls in mro:
if name not in cls.locals:
continue
found = True
for inferred in bases._infer_stmts([cls[name]], context, frame=self):
if not isinstance(inferred, scoped_nodes.FunctionDef):
yield inferred
continue
# We can obtain different descriptors from a super depending
# on what we are accessing and where the super call is.
if inferred.type == "classmethod":
yield bases.BoundMethod(inferred, cls)
elif self._scope.type == "classmethod" and inferred.type == "method":
yield inferred
elif self._class_based or inferred.type == "staticmethod":
yield inferred
elif bases._is_property(inferred):
# TODO: support other descriptors as well.
try:
yield from inferred.infer_call_result(self, context)
except exceptions.InferenceError:
yield util.Uninferable
else:
yield bases.BoundMethod(inferred, cls)
if not found:
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
) | python | {
"resource": ""
} |
q31041 | TransformVisitor._transform | train | def _transform(self, node):
"""Call matching transforms for the given node if any and return the
transformed node.
"""
cls = node.__class__
if cls not in self.transforms:
# no transform registered for this class of node
return node
transforms = self.transforms[cls]
for transform_func, predicate in transforms:
if predicate is None or predicate(node):
ret = transform_func(node)
# if the transformation function returns something, it's
# expected to be a replacement for the node
if ret is not None:
node = ret
if ret.__class__ != cls:
# Can no longer apply the rest of the transforms.
break
return node | python | {
"resource": ""
} |
q31042 | TransformVisitor.unregister_transform | train | def unregister_transform(self, node_class, transform, predicate=None):
"""Unregister the given transform."""
self.transforms[node_class].remove((transform, predicate)) | python | {
"resource": ""
} |
q31043 | _looks_like_lru_cache | train | def _looks_like_lru_cache(node):
"""Check if the given function node is decorated with lru_cache."""
if not node.decorators:
return False
for decorator in node.decorators.nodes:
if not isinstance(decorator, astroid.Call):
continue
if _looks_like_functools_member(decorator, "lru_cache"):
return True
return False | python | {
"resource": ""
} |
q31044 | _looks_like_functools_member | train | def _looks_like_functools_member(node, member):
"""Check if the given Call node is a functools.partial call"""
if isinstance(node.func, astroid.Name):
return node.func.name == member
elif isinstance(node.func, astroid.Attribute):
return (
node.func.attrname == member
and isinstance(node.func.expr, astroid.Name)
and node.func.expr.name == "functools"
) | python | {
"resource": ""
} |
q31045 | FilterStmtsMixin._get_filtered_stmts | train | def _get_filtered_stmts(self, _, node, _stmts, mystmt):
"""method used in _filter_stmts to get statements and trigger break"""
if self.statement() is mystmt:
# original node's statement is the assignment, only keep
# current node (gen exp, list comp)
return [node], True
return _stmts, False | python | {
"resource": ""
} |
q31046 | ImportFromMixin.real_name | train | def real_name(self, asname):
"""get name from 'as' name"""
for name, _asname in self.names:
if name == "*":
return asname
if not _asname:
name = name.split(".", 1)[0]
_asname = name
if asname == _asname:
return name
raise exceptions.AttributeInferenceError(
"Could not find original name for {attribute} in {target!r}",
target=self,
attribute=asname,
) | python | {
"resource": ""
} |
q31047 | parse_function_type_comment | train | def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
"""Given a correct type comment, obtain a FunctionType object"""
if _ast_py3 is None:
return None
func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type")
return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns) | python | {
"resource": ""
} |
q31048 | is_decorated_with_attrs | train | def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
"""Return True if a decorated node has
an attr decorator applied."""
if not node.decorators:
return False
for decorator_attribute in node.decorators.nodes:
if isinstance(decorator_attribute, astroid.Call): # decorator with arguments
decorator_attribute = decorator_attribute.func
if decorator_attribute.as_string() in decorator_names:
return True
return False | python | {
"resource": ""
} |
q31049 | attr_attributes_transform | train | def attr_attributes_transform(node):
"""Given that the ClassNode has an attr decorator,
rewrite class attributes as instance attributes
"""
# Astroid can't infer this attribute properly
# Prevents https://github.com/PyCQA/pylint/issues/1884
node.locals["__attrs_attrs__"] = [astroid.Unknown(parent=node)]
for cdefbodynode in node.body:
if not isinstance(cdefbodynode, astroid.Assign):
continue
if isinstance(cdefbodynode.value, astroid.Call):
if cdefbodynode.value.func.as_string() not in ATTRIB_NAMES:
continue
else:
continue
for target in cdefbodynode.targets:
rhs_node = astroid.Unknown(
lineno=cdefbodynode.lineno,
col_offset=cdefbodynode.col_offset,
parent=cdefbodynode,
)
node.locals[target.name] = [rhs_node] | python | {
"resource": ""
} |
q31050 | proxy_alias | train | def proxy_alias(alias_name, node_type):
"""Get a Proxy from the given name to the given node type."""
proxy = type(
alias_name,
(lazy_object_proxy.Proxy,),
{
"__class__": object.__dict__["__class__"],
"__instancecheck__": _instancecheck,
},
)
return proxy(lambda: node_type) | python | {
"resource": ""
} |
q31051 | limit_inference | train | def limit_inference(iterator, size):
"""Limit inference amount.
Limit inference amount to help with performance issues with
exponentially exploding possible results.
:param iterator: Inference generator to limit
:type iterator: Iterator(NodeNG)
:param size: Maximum mount of nodes yielded plus an
Uninferable at the end if limit reached
:type size: int
:yields: A possibly modified generator
:rtype param: Iterable
"""
yield from islice(iterator, size)
has_more = next(iterator, False)
if has_more is not False:
yield Uninferable
return | python | {
"resource": ""
} |
q31052 | builtin_lookup | train | def builtin_lookup(name):
"""lookup a name into the builtin module
return the list of matching statements and the astroid for the builtin
module
"""
builtin_astroid = MANAGER.ast_from_module(builtins)
if name == "__dict__":
return builtin_astroid, ()
try:
stmts = builtin_astroid.locals[name]
except KeyError:
stmts = ()
return builtin_astroid, stmts | python | {
"resource": ""
} |
q31053 | _infer_decorator_callchain | train | def _infer_decorator_callchain(node):
"""Detect decorator call chaining and see if the end result is a
static or a classmethod.
"""
if not isinstance(node, FunctionDef):
return None
if not node.parent:
return None
try:
result = next(node.infer_call_result(node.parent))
except exceptions.InferenceError:
return None
if isinstance(result, bases.Instance):
result = result._proxied
if isinstance(result, ClassDef):
if result.is_subtype_of("%s.classmethod" % BUILTINS):
return "classmethod"
if result.is_subtype_of("%s.staticmethod" % BUILTINS):
return "staticmethod"
return None | python | {
"resource": ""
} |
q31054 | _rec_get_names | train | def _rec_get_names(args, names=None):
"""return a list of all argument names"""
if names is None:
names = []
for arg in args:
if isinstance(arg, node_classes.Tuple):
_rec_get_names(arg.elts, names)
else:
names.append(arg.name)
return names | python | {
"resource": ""
} |
q31055 | _is_metaclass | train | def _is_metaclass(klass, seen=None):
""" Return if the given class can be
used as a metaclass.
"""
if klass.name == "type":
return True
if seen is None:
seen = set()
for base in klass.bases:
try:
for baseobj in base.infer():
baseobj_name = baseobj.qname()
if baseobj_name in seen:
continue
else:
seen.add(baseobj_name)
if isinstance(baseobj, bases.Instance):
# not abstract
return False
if baseobj is util.Uninferable:
continue
if baseobj is klass:
continue
if not isinstance(baseobj, ClassDef):
continue
if baseobj._type == "metaclass":
return True
if _is_metaclass(baseobj, seen):
return True
except exceptions.InferenceError:
continue
return False | python | {
"resource": ""
} |
q31056 | _class_type | train | def _class_type(klass, ancestors=None):
"""return a ClassDef node type to differ metaclass and exception
from 'regular' classes
"""
# XXX we have to store ancestors in case we have an ancestor loop
if klass._type is not None:
return klass._type
if _is_metaclass(klass):
klass._type = "metaclass"
elif klass.name.endswith("Exception"):
klass._type = "exception"
else:
if ancestors is None:
ancestors = set()
klass_name = klass.qname()
if klass_name in ancestors:
# XXX we are in loop ancestors, and have found no type
klass._type = "class"
return "class"
ancestors.add(klass_name)
for base in klass.ancestors(recurs=False):
name = _class_type(base, ancestors)
if name != "class":
if name == "metaclass" and not _is_metaclass(klass):
# don't propagate it if the current class
# can't be a metaclass
continue
klass._type = base.type
break
if klass._type is None:
klass._type = "class"
return klass._type | python | {
"resource": ""
} |
q31057 | get_wrapping_class | train | def get_wrapping_class(node):
"""Get the class that wraps the given node.
We consider that a class wraps a node if the class
is a parent for the said node.
:returns: The class that wraps the given node
:rtype: ClassDef or None
"""
klass = node.frame()
while klass is not None and not isinstance(klass, ClassDef):
if klass.parent is None:
klass = None
else:
klass = klass.parent.frame()
return klass | python | {
"resource": ""
} |
q31058 | LocalsDictNodeNG.qname | train | def qname(self):
"""Get the 'qualified' name of the node.
For example: module.name, module.class.name ...
:returns: The qualified name.
:rtype: str
"""
# pylint: disable=no-member; github.com/pycqa/astroid/issues/278
if self.parent is None:
return self.name
return "%s.%s" % (self.parent.frame().qname(), self.name) | python | {
"resource": ""
} |
q31059 | LocalsDictNodeNG._scope_lookup | train | def _scope_lookup(self, node, name, offset=0):
"""XXX method for interfacing the scope lookup"""
try:
stmts = node._filter_stmts(self.locals[name], self, offset)
except KeyError:
stmts = ()
if stmts:
return self, stmts
if self.parent: # i.e. not Module
# nested scope: if parent scope is a function, that's fine
# else jump to the module
pscope = self.parent.scope()
if not pscope.is_function:
pscope = pscope.root()
return pscope.scope_lookup(node, name)
return builtin_lookup(name) | python | {
"resource": ""
} |
q31060 | LocalsDictNodeNG.set_local | train | def set_local(self, name, stmt):
"""Define that the given name is declared in the given statement node.
.. seealso:: :meth:`scope`
:param name: The name that is being defined.
:type name: str
:param stmt: The statement that defines the given name.
:type stmt: NodeNG
"""
# assert not stmt in self.locals.get(name, ()), (self, stmt)
self.locals.setdefault(name, []).append(stmt) | python | {
"resource": ""
} |
q31061 | LocalsDictNodeNG._append_node | train | def _append_node(self, child):
"""append a child, linking it in the tree"""
# pylint: disable=no-member; depending by the class
# which uses the current class as a mixin or base class.
# It's rewritten in 2.0, so it makes no sense for now
# to spend development time on it.
self.body.append(child)
child.parent = self | python | {
"resource": ""
} |
q31062 | LocalsDictNodeNG.add_local_node | train | def add_local_node(self, child_node, name=None):
"""Append a child that should alter the locals of this scope node.
:param child_node: The child node that will alter locals.
:type child_node: NodeNG
:param name: The name of the local that will be altered by
the given child node.
:type name: str or None
"""
if name != "__class__":
# add __class__ node as a child will cause infinite recursion later!
self._append_node(child_node)
self.set_local(name or child_node.name, child_node) | python | {
"resource": ""
} |
q31063 | Module.scope_lookup | train | def scope_lookup(self, node, name, offset=0):
"""Lookup where the given variable is assigned.
:param node: The node to look for assignments up to.
Any assignments after the given node are ignored.
:type node: NodeNG
:param name: The name of the variable to find assignments for.
:type name: str
:param offset: The line offset to filter statements up to.
:type offset: int
:returns: This scope node and the list of assignments associated to the
given name according to the scope where it has been found (locals,
globals or builtin).
:rtype: tuple(str, list(NodeNG))
"""
if name in self.scope_attrs and name not in self.locals:
try:
return self, self.getattr(name)
except exceptions.AttributeInferenceError:
return self, ()
return self._scope_lookup(node, name, offset) | python | {
"resource": ""
} |
q31064 | Module.import_module | train | def import_module(self, modname, relative_only=False, level=None):
"""Get the ast for a given module as if imported from this module.
:param modname: The name of the module to "import".
:type modname: str
:param relative_only: Whether to only consider relative imports.
:type relative_only: bool
:param level: The level of relative import.
:type level: int or None
:returns: The imported module ast.
:rtype: NodeNG
"""
if relative_only and level is None:
level = 0
absmodname = self.relative_to_absolute_name(modname, level)
try:
return MANAGER.ast_from_module_name(absmodname)
except exceptions.AstroidBuildingError:
# we only want to import a sub module or package of this module,
# skip here
if relative_only:
raise
return MANAGER.ast_from_module_name(modname) | python | {
"resource": ""
} |
q31065 | Module.relative_to_absolute_name | train | def relative_to_absolute_name(self, modname, level):
"""Get the absolute module name for a relative import.
The relative import can be implicit or explicit.
:param modname: The module name to convert.
:type modname: str
:param level: The level of relative import.
:type level: int
:returns: The absolute module name.
:rtype: str
:raises TooManyLevelsError: When the relative import refers to a
module too far above this one.
"""
# XXX this returns non sens when called on an absolute import
# like 'pylint.checkers.astroid.utils'
# XXX doesn't return absolute name if self.name isn't absolute name
if self.absolute_import_activated() and level is None:
return modname
if level:
if self.package:
level = level - 1
if level and self.name.count(".") < level:
raise exceptions.TooManyLevelsError(level=level, name=self.name)
package_name = self.name.rsplit(".", level)[0]
elif self.package:
package_name = self.name
else:
package_name = self.name.rsplit(".", 1)[0]
if package_name:
if not modname:
return package_name
return "%s.%s" % (package_name, modname)
return modname | python | {
"resource": ""
} |
q31066 | Module.wildcard_import_names | train | def wildcard_import_names(self):
"""The list of imported names when this module is 'wildcard imported'.
It doesn't include the '__builtins__' name which is added by the
current CPython implementation of wildcard imports.
:returns: The list of imported names.
:rtype: list(str)
"""
# We separate the different steps of lookup in try/excepts
# to avoid catching too many Exceptions
default = [name for name in self.keys() if not name.startswith("_")]
try:
all_values = self["__all__"]
except KeyError:
return default
try:
explicit = next(all_values.assigned_stmts())
except exceptions.InferenceError:
return default
except AttributeError:
# not an assignment node
# XXX infer?
return default
# Try our best to detect the exported name.
inferred = []
try:
explicit = next(explicit.infer())
except exceptions.InferenceError:
return default
if not isinstance(explicit, (node_classes.Tuple, node_classes.List)):
return default
str_const = lambda node: (
isinstance(node, node_classes.Const) and isinstance(node.value, str)
)
for node in explicit.elts:
if str_const(node):
inferred.append(node.value)
else:
try:
inferred_node = next(node.infer())
except exceptions.InferenceError:
continue
if str_const(inferred_node):
inferred.append(inferred_node.value)
return inferred | python | {
"resource": ""
} |
q31067 | Lambda.type | train | def type(self):
"""Whether this is a method or function.
:returns: 'method' if this is a method, 'function' otherwise.
:rtype: str
"""
# pylint: disable=no-member
if self.args.args and self.args.args[0].name == "self":
if isinstance(self.parent.scope(), ClassDef):
return "method"
return "function" | python | {
"resource": ""
} |
q31068 | Lambda.argnames | train | def argnames(self):
"""Get the names of each of the arguments.
:returns: The names of the arguments.
:rtype: list(str)
"""
# pylint: disable=no-member; github.com/pycqa/astroid/issues/291
# args is in fact redefined later on by postinit. Can't be changed
# to None due to a strong interaction between Lambda and FunctionDef.
if self.args.args: # maybe None with builtin functions
names = _rec_get_names(self.args.args)
else:
names = []
if self.args.vararg:
names.append(self.args.vararg)
if self.args.kwarg:
names.append(self.args.kwarg)
return names | python | {
"resource": ""
} |
q31069 | Lambda.scope_lookup | train | def scope_lookup(self, node, name, offset=0):
"""Lookup where the given names is assigned.
:param node: The node to look for assignments up to.
Any assignments after the given node are ignored.
:type node: NodeNG
:param name: The name to find assignments for.
:type name: str
:param offset: The line offset to filter statements up to.
:type offset: int
:returns: This scope node and the list of assignments associated to the
given name according to the scope where it has been found (locals,
globals or builtin).
:rtype: tuple(str, list(NodeNG))
"""
# pylint: disable=no-member; github.com/pycqa/astroid/issues/291
# args is in fact redefined later on by postinit. Can't be changed
# to None due to a strong interaction between Lambda and FunctionDef.
if node in self.args.defaults or node in self.args.kw_defaults:
frame = self.parent.frame()
# line offset to avoid that def func(f=func) resolve the default
# value to the defined function
offset = -1
else:
# check this is not used in function decorators
frame = self
return frame._scope_lookup(node, name, offset) | python | {
"resource": ""
} |
q31070 | FunctionDef.extra_decorators | train | def extra_decorators(self):
"""The extra decorators that this function can have.
Additional decorators are considered when they are used as
assignments, as in ``method = staticmethod(method)``.
The property will return all the callables that are used for
decoration.
:type: list(NodeNG)
"""
frame = self.parent.frame()
if not isinstance(frame, ClassDef):
return []
decorators = []
for assign in frame._get_assign_nodes():
if isinstance(assign.value, node_classes.Call) and isinstance(
assign.value.func, node_classes.Name
):
for assign_node in assign.targets:
if not isinstance(assign_node, node_classes.AssignName):
# Support only `name = callable(name)`
continue
if assign_node.name != self.name:
# Interested only in the assignment nodes that
# decorates the current method.
continue
try:
meth = frame[self.name]
except KeyError:
continue
else:
# Must be a function and in the same frame as the
# original method.
if (
isinstance(meth, FunctionDef)
and assign_node.frame() == frame
):
decorators.append(assign.value)
return decorators | python | {
"resource": ""
} |
q31071 | FunctionDef.type | train | def type(self):
"""The function type for this node.
Possible values are: method, function, staticmethod, classmethod.
:type: str
"""
builtin_descriptors = {"classmethod", "staticmethod"}
for decorator in self.extra_decorators:
if decorator.func.name in builtin_descriptors:
return decorator.func.name
frame = self.parent.frame()
type_name = "function"
if isinstance(frame, ClassDef):
if self.name == "__new__":
return "classmethod"
if sys.version_info >= (3, 6) and self.name == "__init_subclass__":
return "classmethod"
type_name = "method"
if not self.decorators:
return type_name
for node in self.decorators.nodes:
if isinstance(node, node_classes.Name):
if node.name in builtin_descriptors:
return node.name
if isinstance(node, node_classes.Call):
# Handle the following case:
# @some_decorator(arg1, arg2)
# def func(...)
#
try:
current = next(node.func.infer())
except exceptions.InferenceError:
continue
_type = _infer_decorator_callchain(current)
if _type is not None:
return _type
try:
for inferred in node.infer():
# Check to see if this returns a static or a class method.
_type = _infer_decorator_callchain(inferred)
if _type is not None:
return _type
if not isinstance(inferred, ClassDef):
continue
for ancestor in inferred.ancestors():
if not isinstance(ancestor, ClassDef):
continue
if ancestor.is_subtype_of("%s.classmethod" % BUILTINS):
return "classmethod"
if ancestor.is_subtype_of("%s.staticmethod" % BUILTINS):
return "staticmethod"
except exceptions.InferenceError:
pass
return type_name | python | {
"resource": ""
} |
q31072 | FunctionDef.getattr | train | def getattr(self, name, context=None):
"""this method doesn't look in the instance_attrs dictionary since it's
done by an Instance proxy at inference time.
"""
if name in self.instance_attrs:
return self.instance_attrs[name]
if name in self.special_attributes:
return [self.special_attributes.lookup(name)]
raise exceptions.AttributeInferenceError(target=self, attribute=name) | python | {
"resource": ""
} |
q31073 | FunctionDef.igetattr | train | def igetattr(self, name, context=None):
"""Inferred getattr, which returns an iterator of inferred statements."""
try:
return bases._infer_stmts(self.getattr(name, context), context, frame=self)
except exceptions.AttributeInferenceError as error:
raise exceptions.InferenceError(
error.message, target=self, attribute=name, context=context
) from error | python | {
"resource": ""
} |
q31074 | FunctionDef.decoratornames | train | def decoratornames(self):
"""Get the qualified names of each of the decorators on this function.
:returns: The names of the decorators.
:rtype: set(str)
"""
result = set()
decoratornodes = []
if self.decorators is not None:
decoratornodes += self.decorators.nodes
decoratornodes += self.extra_decorators
for decnode in decoratornodes:
try:
for infnode in decnode.infer():
result.add(infnode.qname())
except exceptions.InferenceError:
continue
return result | python | {
"resource": ""
} |
q31075 | FunctionDef.is_abstract | train | def is_abstract(self, pass_is_abstract=True):
"""Check if the method is abstract.
A method is considered abstract if any of the following is true:
* The only statement is 'raise NotImplementedError'
* The only statement is 'pass' and pass_is_abstract is True
* The method is annotated with abc.astractproperty/abc.abstractmethod
:returns: True if the method is abstract, False otherwise.
:rtype: bool
"""
if self.decorators:
for node in self.decorators.nodes:
try:
inferred = next(node.infer())
except exceptions.InferenceError:
continue
if inferred and inferred.qname() in (
"abc.abstractproperty",
"abc.abstractmethod",
):
return True
for child_node in self.body:
if isinstance(child_node, node_classes.Raise):
if child_node.raises_not_implemented():
return True
return pass_is_abstract and isinstance(child_node, node_classes.Pass)
# empty function is the same as function with a single "pass" statement
if pass_is_abstract:
return True | python | {
"resource": ""
} |
q31076 | FunctionDef.infer_call_result | train | def infer_call_result(self, caller=None, context=None):
"""Infer what the function returns when called.
:returns: What the function returns.
:rtype: iterable(NodeNG or Uninferable) or None
"""
if self.is_generator():
if isinstance(self, AsyncFunctionDef):
generator_cls = bases.AsyncGenerator
else:
generator_cls = bases.Generator
result = generator_cls(self)
yield result
return
# This is really a gigantic hack to work around metaclass generators
# that return transient class-generating functions. Pylint's AST structure
# cannot handle a base class object that is only used for calling __new__,
# but does not contribute to the inheritance structure itself. We inject
# a fake class into the hierarchy here for several well-known metaclass
# generators, and filter it out later.
if (
self.name == "with_metaclass"
and len(self.args.args) == 1
and self.args.vararg is not None
):
metaclass = next(caller.args[0].infer(context))
if isinstance(metaclass, ClassDef):
class_bases = [next(arg.infer(context)) for arg in caller.args[1:]]
new_class = ClassDef(name="temporary_class")
new_class.hide = True
new_class.parent = self
new_class.postinit(
bases=[base for base in class_bases if base != util.Uninferable],
body=[],
decorators=[],
metaclass=metaclass,
)
yield new_class
return
returns = self._get_return_nodes_skip_functions()
first_return = next(returns, None)
if not first_return:
raise exceptions.InferenceError("Empty return iterator")
for returnnode in itertools.chain((first_return,), returns):
if returnnode.value is None:
yield node_classes.Const(None)
else:
try:
yield from returnnode.value.infer(context)
except exceptions.InferenceError:
yield util.Uninferable | python | {
"resource": ""
} |
q31077 | ClassDef.implicit_locals | train | def implicit_locals(self):
"""Get implicitly defined class definition locals.
:returns: the the name and Const pair for each local
:rtype: tuple(tuple(str, node_classes.Const), ...)
"""
locals_ = (("__module__", self.special_attributes.attr___module__),)
if sys.version_info >= (3, 3):
# __qualname__ is defined in PEP3155
locals_ += (("__qualname__", self.special_attributes.attr___qualname__),)
return locals_ | python | {
"resource": ""
} |
q31078 | ClassDef.is_subtype_of | train | def is_subtype_of(self, type_name, context=None):
"""Whether this class is a subtype of the given type.
:param type_name: The name of the type of check against.
:type type_name: str
:returns: True if this class is a subtype of the given type,
False otherwise.
:rtype: bool
"""
if self.qname() == type_name:
return True
for anc in self.ancestors(context=context):
if anc.qname() == type_name:
return True
return False | python | {
"resource": ""
} |
q31079 | ClassDef.infer_call_result | train | def infer_call_result(self, caller, context=None):
"""infer what a class is returning when called"""
if (
self.is_subtype_of("%s.type" % (BUILTINS,), context)
and len(caller.args) == 3
):
result = self._infer_type_call(caller, context)
yield result
return
dunder_call = None
try:
metaclass = self.metaclass(context=context)
if metaclass is not None:
dunder_call = next(metaclass.igetattr("__call__", context))
except exceptions.AttributeInferenceError:
pass
if dunder_call and dunder_call.qname() != "builtins.type.__call__":
context = contextmod.bind_context_to_node(context, self)
yield from dunder_call.infer_call_result(caller, context)
else:
# Call type.__call__ if not set metaclass
# (since type is the default metaclass)
yield bases.Instance(self) | python | {
"resource": ""
} |
q31080 | ClassDef.scope_lookup | train | def scope_lookup(self, node, name, offset=0):
"""Lookup where the given name is assigned.
:param node: The node to look for assignments up to.
Any assignments after the given node are ignored.
:type node: NodeNG
:param name: The name to find assignments for.
:type name: str
:param offset: The line offset to filter statements up to.
:type offset: int
:returns: This scope node and the list of assignments associated to the
given name according to the scope where it has been found (locals,
globals or builtin).
:rtype: tuple(str, list(NodeNG))
"""
# If the name looks like a builtin name, just try to look
# into the upper scope of this class. We might have a
# decorator that it's poorly named after a builtin object
# inside this class.
lookup_upper_frame = (
isinstance(node.parent, node_classes.Decorators)
and name in MANAGER.builtins_module
)
if (
any(node == base or base.parent_of(node) for base in self.bases)
or lookup_upper_frame
):
# Handle the case where we have either a name
# in the bases of a class, which exists before
# the actual definition or the case where we have
# a Getattr node, with that name.
#
# name = ...
# class A(name):
# def name(self): ...
#
# import name
# class A(name.Name):
# def name(self): ...
frame = self.parent.frame()
# line offset to avoid that class A(A) resolve the ancestor to
# the defined class
offset = -1
else:
frame = self
return frame._scope_lookup(node, name, offset) | python | {
"resource": ""
} |
q31081 | ClassDef.ancestors | train | def ancestors(self, recurs=True, context=None):
"""Iterate over the base classes in prefixed depth first order.
:param recurs: Whether to recurse or return direct ancestors only.
:type recurs: bool
:returns: The base classes
:rtype: iterable(NodeNG)
"""
# FIXME: should be possible to choose the resolution order
# FIXME: inference make infinite loops possible here
yielded = {self}
if context is None:
context = contextmod.InferenceContext()
if not self.bases and self.qname() != "builtins.object":
yield builtin_lookup("object")[1][0]
return
for stmt in self.bases:
with context.restore_path():
try:
for baseobj in stmt.infer(context):
if not isinstance(baseobj, ClassDef):
if isinstance(baseobj, bases.Instance):
baseobj = baseobj._proxied
else:
continue
if not baseobj.hide:
if baseobj in yielded:
continue
yielded.add(baseobj)
yield baseobj
if not recurs:
continue
for grandpa in baseobj.ancestors(recurs=True, context=context):
if grandpa is self:
# This class is the ancestor of itself.
break
if grandpa in yielded:
continue
yielded.add(grandpa)
yield grandpa
except exceptions.InferenceError:
continue | python | {
"resource": ""
} |
q31082 | ClassDef.local_attr_ancestors | train | def local_attr_ancestors(self, name, context=None):
"""Iterate over the parents that define the given name.
:param name: The name to find definitions for.
:type name: str
:returns: The parents that define the given name.
:rtype: iterable(NodeNG)
"""
# Look up in the mro if we can. This will result in the
# attribute being looked up just as Python does it.
try:
ancestors = self.mro(context)[1:]
except exceptions.MroError:
# Fallback to use ancestors, we can't determine
# a sane MRO.
ancestors = self.ancestors(context=context)
for astroid in ancestors:
if name in astroid:
yield astroid | python | {
"resource": ""
} |
q31083 | ClassDef.instance_attr_ancestors | train | def instance_attr_ancestors(self, name, context=None):
"""Iterate over the parents that define the given name as an attribute.
:param name: The name to find definitions for.
:type name: str
:returns: The parents that define the given name as
an instance attribute.
:rtype: iterable(NodeNG)
"""
for astroid in self.ancestors(context=context):
if name in astroid.instance_attrs:
yield astroid | python | {
"resource": ""
} |
q31084 | ClassDef.local_attr | train | def local_attr(self, name, context=None):
"""Get the list of assign nodes associated to the given name.
Assignments are looked for in both this class and in parents.
:returns: The list of assignments to the given name.
:rtype: list(NodeNG)
:raises AttributeInferenceError: If no attribute with this name
can be found in this class or parent classes.
"""
result = []
if name in self.locals:
result = self.locals[name]
else:
class_node = next(self.local_attr_ancestors(name, context), None)
if class_node:
result = class_node.locals[name]
result = [n for n in result if not isinstance(n, node_classes.DelAttr)]
if result:
return result
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
) | python | {
"resource": ""
} |
q31085 | ClassDef.instance_attr | train | def instance_attr(self, name, context=None):
"""Get the list of nodes associated to the given attribute name.
Assignments are looked for in both this class and in parents.
:returns: The list of assignments to the given name.
:rtype: list(NodeNG)
:raises AttributeInferenceError: If no attribute with this name
can be found in this class or parent classes.
"""
# Return a copy, so we don't modify self.instance_attrs,
# which could lead to infinite loop.
values = list(self.instance_attrs.get(name, []))
# get all values from parents
for class_node in self.instance_attr_ancestors(name, context):
values += class_node.instance_attrs[name]
values = [n for n in values if not isinstance(n, node_classes.DelAttr)]
if values:
return values
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
) | python | {
"resource": ""
} |
q31086 | ClassDef.getattr | train | def getattr(self, name, context=None, class_context=True):
"""Get an attribute from this class, using Python's attribute semantic.
This method doesn't look in the :attr:`instance_attrs` dictionary
since it is done by an :class:`Instance` proxy at inference time.
It may return an :class:`Uninferable` object if
the attribute has not been
found, but a ``__getattr__`` or ``__getattribute__`` method is defined.
If ``class_context`` is given, then it is considered that the
attribute is accessed from a class context,
e.g. ClassDef.attribute, otherwise it might have been accessed
from an instance as well. If ``class_context`` is used in that
case, then a lookup in the implicit metaclass and the explicit
metaclass will be done.
:param name: The attribute to look for.
:type name: str
:param class_context: Whether the attribute can be accessed statically.
:type class_context: bool
:returns: The attribute.
:rtype: list(NodeNG)
:raises AttributeInferenceError: If the attribute cannot be inferred.
"""
values = self.locals.get(name, [])
if name in self.special_attributes and class_context and not values:
result = [self.special_attributes.lookup(name)]
if name == "__bases__":
# Need special treatment, since they are mutable
# and we need to return all the values.
result += values
return result
# don't modify the list in self.locals!
values = list(values)
for classnode in self.ancestors(recurs=True, context=context):
values += classnode.locals.get(name, [])
if class_context:
values += self._metaclass_lookup_attribute(name, context)
if not values:
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
)
# Look for AnnAssigns, which are not attributes in the purest sense.
for value in values:
if isinstance(value, node_classes.AssignName):
stmt = value.statement()
if isinstance(stmt, node_classes.AnnAssign) and stmt.value is None:
raise exceptions.AttributeInferenceError(
target=self, attribute=name, context=context
)
return values | python | {
"resource": ""
} |
q31087 | ClassDef._metaclass_lookup_attribute | train | def _metaclass_lookup_attribute(self, name, context):
"""Search the given name in the implicit and the explicit metaclass."""
attrs = set()
implicit_meta = self.implicit_metaclass()
metaclass = self.metaclass()
for cls in {implicit_meta, metaclass}:
if cls and cls != self and isinstance(cls, ClassDef):
cls_attributes = self._get_attribute_from_metaclass(cls, name, context)
attrs.update(set(cls_attributes))
return attrs | python | {
"resource": ""
} |
q31088 | ClassDef.igetattr | train | def igetattr(self, name, context=None, class_context=True):
"""Infer the possible values of the given variable.
:param name: The name of the variable to infer.
:type name: str
:returns: The inferred possible values.
:rtype: iterable(NodeNG or Uninferable)
"""
# set lookup name since this is necessary to infer on import nodes for
# instance
context = contextmod.copy_context(context)
context.lookupname = name
try:
attr = self.getattr(name, context, class_context=class_context)[0]
for inferred in bases._infer_stmts([attr], context, frame=self):
# yield Uninferable object instead of descriptors when necessary
if not isinstance(inferred, node_classes.Const) and isinstance(
inferred, bases.Instance
):
try:
inferred._proxied.getattr("__get__", context)
except exceptions.AttributeInferenceError:
yield inferred
else:
yield util.Uninferable
else:
yield function_to_method(inferred, self)
except exceptions.AttributeInferenceError as error:
if not name.startswith("__") and self.has_dynamic_getattr(context):
# class handle some dynamic attributes, return a Uninferable object
yield util.Uninferable
else:
raise exceptions.InferenceError(
error.message, target=self, attribute=name, context=context
) | python | {
"resource": ""
} |
q31089 | ClassDef.getitem | train | def getitem(self, index, context=None):
"""Return the inference of a subscript.
This is basically looking up the method in the metaclass and calling it.
:returns: The inferred value of a subscript to this class.
:rtype: NodeNG
:raises AstroidTypeError: If this class does not define a
``__getitem__`` method.
"""
try:
methods = dunder_lookup.lookup(self, "__getitem__")
except exceptions.AttributeInferenceError as exc:
raise exceptions.AstroidTypeError(node=self, context=context) from exc
method = methods[0]
# Create a new callcontext for providing index as an argument.
new_context = contextmod.bind_context_to_node(context, self)
new_context.callcontext = contextmod.CallContext(args=[index])
try:
return next(method.infer_call_result(self, new_context))
except exceptions.InferenceError:
return util.Uninferable | python | {
"resource": ""
} |
q31090 | ClassDef.methods | train | def methods(self):
"""Iterate over all of the method defined in this class and its parents.
:returns: The methods defined on the class.
:rtype: iterable(FunctionDef)
"""
done = {}
for astroid in itertools.chain(iter((self,)), self.ancestors()):
for meth in astroid.mymethods():
if meth.name in done:
continue
done[meth.name] = None
yield meth | python | {
"resource": ""
} |
q31091 | ClassDef.declared_metaclass | train | def declared_metaclass(self, context=None):
"""Return the explicit declared metaclass for the current class.
An explicit declared metaclass is defined
either by passing the ``metaclass`` keyword argument
in the class definition line (Python 3) or (Python 2) by
having a ``__metaclass__`` class attribute, or if there are
no explicit bases but there is a global ``__metaclass__`` variable.
:returns: The metaclass of this class,
or None if one could not be found.
:rtype: NodeNG or None
"""
for base in self.bases:
try:
for baseobj in base.infer(context=context):
if isinstance(baseobj, ClassDef) and baseobj.hide:
self._metaclass = baseobj._metaclass
self._metaclass_hack = True
break
except exceptions.InferenceError:
pass
if self._metaclass:
# Expects this from Py3k TreeRebuilder
try:
return next(
node
for node in self._metaclass.infer(context=context)
if node is not util.Uninferable
)
except (exceptions.InferenceError, StopIteration):
return None
return None | python | {
"resource": ""
} |
q31092 | ClassDef._islots | train | def _islots(self):
""" Return an iterator with the inferred slots. """
if "__slots__" not in self.locals:
return None
for slots in self.igetattr("__slots__"):
# check if __slots__ is a valid type
for meth in ITER_METHODS:
try:
slots.getattr(meth)
break
except exceptions.AttributeInferenceError:
continue
else:
continue
if isinstance(slots, node_classes.Const):
# a string. Ignore the following checks,
# but yield the node, only if it has a value
if slots.value:
yield slots
continue
if not hasattr(slots, "itered"):
# we can't obtain the values, maybe a .deque?
continue
if isinstance(slots, node_classes.Dict):
values = [item[0] for item in slots.items]
else:
values = slots.itered()
if values is util.Uninferable:
continue
if not values:
# Stop the iteration, because the class
# has an empty list of slots.
return values
for elt in values:
try:
for inferred in elt.infer():
if inferred is util.Uninferable:
continue
if not isinstance(
inferred, node_classes.Const
) or not isinstance(inferred.value, str):
continue
if not inferred.value:
continue
yield inferred
except exceptions.InferenceError:
continue
return None | python | {
"resource": ""
} |
q31093 | ClassDef.slots | train | def slots(self):
"""Get all the slots for this node.
:returns: The names of slots for this class.
If the class doesn't define any slot, through the ``__slots__``
variable, then this function will return a None.
Also, it will return None in the case the slots were not inferred.
:rtype: list(str) or None
"""
def grouped_slots():
# Not interested in object, since it can't have slots.
for cls in self.mro()[:-1]:
try:
cls_slots = cls._slots()
except NotImplementedError:
continue
if cls_slots is not None:
yield from cls_slots
else:
yield None
if not self.newstyle:
raise NotImplementedError(
"The concept of slots is undefined for old-style classes."
)
slots = list(grouped_slots())
if not all(slot is not None for slot in slots):
return None
return sorted(slots, key=lambda item: item.value) | python | {
"resource": ""
} |
q31094 | infer_dict | train | def infer_dict(node, context=None):
"""Try to infer a dict call to a Dict node.
The function treats the following cases:
* dict()
* dict(mapping)
* dict(iterable)
* dict(iterable, **kwargs)
* dict(mapping, **kwargs)
* dict(**kwargs)
If a case can't be inferred, we'll fallback to default inference.
"""
call = arguments.CallSite.from_call(node)
if call.has_invalid_arguments() or call.has_invalid_keywords():
raise UseInferenceDefault
args = call.positional_arguments
kwargs = list(call.keyword_arguments.items())
if not args and not kwargs:
# dict()
return nodes.Dict()
elif kwargs and not args:
# dict(a=1, b=2, c=4)
items = [(nodes.Const(key), value) for key, value in kwargs]
elif len(args) == 1 and kwargs:
# dict(some_iterable, b=2, c=4)
elts = _get_elts(args[0], context)
keys = [(nodes.Const(key), value) for key, value in kwargs]
items = elts + keys
elif len(args) == 1:
items = _get_elts(args[0], context)
else:
raise UseInferenceDefault()
value = nodes.Dict(
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
)
value.postinit(items)
return value | python | {
"resource": ""
} |
q31095 | infer_super | train | def infer_super(node, context=None):
"""Understand super calls.
There are some restrictions for what can be understood:
* unbounded super (one argument form) is not understood.
* if the super call is not inside a function (classmethod or method),
then the default inference will be used.
* if the super arguments can't be inferred, the default inference
will be used.
"""
if len(node.args) == 1:
# Ignore unbounded super.
raise UseInferenceDefault
scope = node.scope()
if not isinstance(scope, nodes.FunctionDef):
# Ignore non-method uses of super.
raise UseInferenceDefault
if scope.type not in ("classmethod", "method"):
# Not interested in staticmethods.
raise UseInferenceDefault
cls = scoped_nodes.get_wrapping_class(scope)
if not len(node.args):
mro_pointer = cls
# In we are in a classmethod, the interpreter will fill
# automatically the class as the second argument, not an instance.
if scope.type == "classmethod":
mro_type = cls
else:
mro_type = cls.instantiate_class()
else:
try:
mro_pointer = next(node.args[0].infer(context=context))
except InferenceError:
raise UseInferenceDefault
try:
mro_type = next(node.args[1].infer(context=context))
except InferenceError:
raise UseInferenceDefault
if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
# No way we could understand this.
raise UseInferenceDefault
super_obj = objects.Super(
mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
)
super_obj.parent = node
return super_obj | python | {
"resource": ""
} |
q31096 | infer_getattr | train | def infer_getattr(node, context=None):
"""Understand getattr calls
If one of the arguments is an Uninferable object, then the
result will be an Uninferable object. Otherwise, the normal attribute
lookup will be done.
"""
obj, attr = _infer_getattr_args(node, context)
if (
obj is util.Uninferable
or attr is util.Uninferable
or not hasattr(obj, "igetattr")
):
return util.Uninferable
try:
return next(obj.igetattr(attr, context=context))
except (StopIteration, InferenceError, AttributeInferenceError):
if len(node.args) == 3:
# Try to infer the default and return it instead.
try:
return next(node.args[2].infer(context=context))
except InferenceError:
raise UseInferenceDefault
raise UseInferenceDefault | python | {
"resource": ""
} |
q31097 | infer_hasattr | train | def infer_hasattr(node, context=None):
"""Understand hasattr calls
This always guarantees three possible outcomes for calling
hasattr: Const(False) when we are sure that the object
doesn't have the intended attribute, Const(True) when
we know that the object has the attribute and Uninferable
when we are unsure of the outcome of the function call.
"""
try:
obj, attr = _infer_getattr_args(node, context)
if (
obj is util.Uninferable
or attr is util.Uninferable
or not hasattr(obj, "getattr")
):
return util.Uninferable
obj.getattr(attr, context=context)
except UseInferenceDefault:
# Can't infer something from this function call.
return util.Uninferable
except AttributeInferenceError:
# Doesn't have it.
return nodes.Const(False)
return nodes.Const(True) | python | {
"resource": ""
} |
q31098 | infer_callable | train | def infer_callable(node, context=None):
"""Understand callable calls
This follows Python's semantics, where an object
is callable if it provides an attribute __call__,
even though that attribute is something which can't be
called.
"""
if len(node.args) != 1:
# Invalid callable call.
raise UseInferenceDefault
argument = node.args[0]
try:
inferred = next(argument.infer(context=context))
except InferenceError:
return util.Uninferable
if inferred is util.Uninferable:
return util.Uninferable
return nodes.Const(inferred.callable()) | python | {
"resource": ""
} |
q31099 | infer_bool | train | def infer_bool(node, context=None):
"""Understand bool calls."""
if len(node.args) > 1:
# Invalid bool call.
raise UseInferenceDefault
if not node.args:
return nodes.Const(False)
argument = node.args[0]
try:
inferred = next(argument.infer(context=context))
except InferenceError:
return util.Uninferable
if inferred is util.Uninferable:
return util.Uninferable
bool_value = inferred.bool_value()
if bool_value is util.Uninferable:
return util.Uninferable
return nodes.Const(bool_value) | python | {
"resource": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.