id int64 0 458k | file_name stringlengths 4 119 | file_path stringlengths 14 227 | content stringlengths 24 9.96M | size int64 24 9.96M | language stringclasses 1 value | extension stringclasses 14 values | total_lines int64 1 219k | avg_line_length float64 2.52 4.63M | max_line_length int64 5 9.91M | alphanum_fraction float64 0 1 | repo_name stringlengths 7 101 | repo_stars int64 100 139k | repo_forks int64 0 26.4k | repo_open_issues int64 0 2.27k | repo_license stringclasses 12 values | repo_extraction_date stringclasses 433 values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
28,300 | cli.py | DamnWidget_anaconda/anaconda_lib/linting/pydocstyle/cli.py | """Command line interface for pydocstyle."""
import logging
import sys
from .utils import log
from .violations import Error
from .config import ConfigurationParser, IllegalConfiguration
from .checker import check
__all__ = ('main', )
class ReturnCode(object):
no_violations_found = 0
violations_found = 1
invalid_options = 2
def run_pydocstyle():
log.setLevel(logging.DEBUG)
conf = ConfigurationParser()
setup_stream_handlers(conf.get_default_run_configuration())
try:
conf.parse()
except IllegalConfiguration:
return ReturnCode.invalid_options
run_conf = conf.get_user_run_configuration()
# Reset the logger according to the command line arguments
setup_stream_handlers(run_conf)
log.debug("starting in debug mode.")
Error.explain = run_conf.explain
Error.source = run_conf.source
errors = []
try:
for filename, checked_codes, ignore_decorators in \
conf.get_files_to_check():
errors.extend(check((filename,), select=checked_codes,
ignore_decorators=ignore_decorators))
except IllegalConfiguration as error:
# An illegal configuration file was found during file generation.
log.error(error.args[0])
return ReturnCode.invalid_options
count = 0
for error in errors:
if hasattr(error, 'code'):
sys.stdout.write('%s\n' % error)
count += 1
if count == 0:
exit_code = ReturnCode.no_violations_found
else:
exit_code = ReturnCode.violations_found
if run_conf.count:
print(count)
return exit_code
def main():
"""Run pydocstyle as a script."""
try:
sys.exit(run_pydocstyle())
except KeyboardInterrupt:
pass
def setup_stream_handlers(conf):
"""Setup logging stream handlers according to the options."""
class StdoutFilter(logging.Filter):
def filter(self, record):
return record.levelno in (logging.DEBUG, logging.INFO)
log.handlers = []
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setLevel(logging.WARNING)
stdout_handler.addFilter(StdoutFilter())
if conf.debug:
stdout_handler.setLevel(logging.DEBUG)
elif conf.verbose:
stdout_handler.setLevel(logging.INFO)
else:
stdout_handler.setLevel(logging.WARNING)
log.addHandler(stdout_handler)
stderr_handler = logging.StreamHandler(sys.stderr)
stderr_handler.setLevel(logging.WARNING)
log.addHandler(stderr_handler)
| 2,570 | Python | .py | 73 | 28.767123 | 73 | 0.689015 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,301 | wordlists.py | DamnWidget_anaconda/anaconda_lib/linting/pydocstyle/wordlists.py | """Wordlists loaded from package data.
We can treat them as part of the code for the imperative mood check, and
therefore we load them at import time, rather than on-demand.
"""
import re
import pkgutil
import snowballstemmer
#: Regular expression for stripping comments from the wordlists
COMMENT_RE = re.compile(r'\s*#.*')
#: Stemmer function for stemming words in English
stem = snowballstemmer.stemmer('english').stemWord
def load_wordlist(name):
"""Iterate over lines of a wordlist data file.
`name` should be the name of a package data file within the data/
directory.
Whitespace and #-prefixed comments are stripped from each line.
"""
text = pkgutil.get_data('pydocstyle', 'data/' + name).decode('utf8')
for line in text.splitlines():
line = COMMENT_RE.sub('', line).strip()
if line:
yield line
#: A dict mapping stemmed verbs to the imperative form
IMPERATIVE_VERBS = {stem(v): v for v in load_wordlist('imperatives.txt')}
#: Words that are forbidden to appear as the first word in a docstring
IMPERATIVE_BLACKLIST = set(load_wordlist('imperatives_blacklist.txt'))
| 1,143 | Python | .py | 26 | 40.307692 | 73 | 0.737319 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,302 | utils.py | DamnWidget_anaconda/anaconda_lib/linting/pydocstyle/utils.py | """General shared utilities."""
import logging
from itertools import tee
try:
from itertools import zip_longest
except ImportError:
from itertools import izip_longest as zip_longest
# Do not update the version manually - it is managed by `bumpversion`.
__version__ = '2.0.1rc'
log = logging.getLogger(__name__)
def is_blank(string):
"""Return True iff the string contains only whitespaces."""
return not string.strip()
def pairwise(iterable, default_value):
"""Return pairs of items from `iterable`.
pairwise([1, 2, 3], default_value=None) -> (1, 2) (2, 3), (3, None)
"""
a, b = tee(iterable)
_ = next(b, default_value)
return zip_longest(a, b, fillvalue=default_value)
| 717 | Python | .py | 20 | 32.5 | 71 | 0.7 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,303 | __init__.py | DamnWidget_anaconda/anaconda_lib/linting/pydocstyle/__init__.py | from .checker import check
from .violations import Error, conventions
from .utils import __version__
# Temporary hotfix for flake8-docstrings
from .checker import ConventionChecker, tokenize_open
from .parser import AllError
| 226 | Python | .py | 6 | 36.5 | 53 | 0.840183 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,304 | parser.py | DamnWidget_anaconda/anaconda_lib/linting/pydocstyle/parser.py | """Python code parser."""
import logging
import six
import textwrap
import tokenize as tk
from itertools import chain, dropwhile
from re import compile as re
from .utils import log
try:
from StringIO import StringIO
except ImportError: # Python 3.0 and later
from io import StringIO
try:
next
except NameError: # Python 2.5 and earlier
nothing = object()
def next(obj, default=nothing):
if default == nothing:
return obj.next()
else:
try:
return obj.next()
except StopIteration:
return default
__all__ = ('Parser', 'Definition', 'Module', 'Package', 'Function',
'NestedFunction', 'Method', 'Class', 'NestedClass', 'AllError',
'StringIO', 'ParseError')
class ParseError(Exception):
def __str__(self):
return "Cannot parse file."
def humanize(string):
return re(r'(.)([A-Z]+)').sub(r'\1 \2', string).lower()
class Value(object):
"""A generic object with a list of preset fields."""
def __init__(self, *args):
if len(self._fields) != len(args):
raise ValueError('got {} arguments for {} fields for {}: {}'
.format(len(args), len(self._fields),
self.__class__.__name__, self._fields))
vars(self).update(zip(self._fields, args))
def __hash__(self):
return hash(repr(self))
def __eq__(self, other):
return other and vars(self) == vars(other)
def __repr__(self):
kwargs = ', '.join('{}={!r}'.format(field, getattr(self, field))
for field in self._fields)
return '{}({})'.format(self.__class__.__name__, kwargs)
class Definition(Value):
"""A Python source code definition (could be class, function, etc)."""
_fields = ('name', '_source', 'start', 'end', 'decorators', 'docstring',
'children', 'parent', 'skipped_error_codes')
_human = property(lambda self: humanize(type(self).__name__))
kind = property(lambda self: self._human.split()[-1])
module = property(lambda self: self.parent.module)
all = property(lambda self: self.module.all)
_slice = property(lambda self: slice(self.start - 1, self.end))
is_class = False
def __iter__(self):
return chain([self], *self.children)
@property
def _publicity(self):
return {True: 'public', False: 'private'}[self.is_public]
@property
def source(self):
"""Return the source code for the definition."""
full_src = self._source[self._slice]
def is_empty_or_comment(line):
return line.strip() == '' or line.strip().startswith('#')
filtered_src = dropwhile(is_empty_or_comment, reversed(full_src))
return ''.join(reversed(list(filtered_src)))
def __str__(self):
out = 'in {} {} `{}`'.format(self._publicity, self._human, self.name)
if self.skipped_error_codes:
out += ' (skipping {})'.format(self.skipped_error_codes)
return out
class Module(Definition):
"""A Python source code module."""
_fields = ('name', '_source', 'start', 'end', 'decorators', 'docstring',
'children', 'parent', '_all', 'future_imports',
'skipped_error_codes')
_nest = staticmethod(lambda s: {'def': Function, 'class': Class}[s])
module = property(lambda self: self)
all = property(lambda self: self._all)
@property
def is_public(self):
return not self.name.startswith('_') or self.name.startswith('__')
def __str__(self):
return 'at module level'
class Package(Module):
"""A package is a __init__.py module."""
class Function(Definition):
"""A Python source code function."""
_nest = staticmethod(lambda s: {'def': NestedFunction,
'class': NestedClass}[s])
@property
def is_public(self):
"""Return True iff this function should be considered public."""
if self.all is not None:
return self.name in self.all
else:
return not self.name.startswith('_')
@property
def is_test(self):
"""Return True if this function is a test function/method.
We exclude tests from the imperative mood check, because to phrase
their docstring in the imperative mood, they would have to start with
a highly redundant "Test that ...".
"""
return self.name.startswith('test') or self.name == 'runTest'
class NestedFunction(Function):
"""A Python source code nested function."""
is_public = False
class Method(Function):
"""A Python source code method."""
@property
def is_magic(self):
"""Return True iff this method is a magic method (e.g., `__str__`)."""
return (self.name.startswith('__') and
self.name.endswith('__') and
self.name not in VARIADIC_MAGIC_METHODS)
@property
def is_public(self):
"""Return True iff this method should be considered public."""
# Check if we are a setter/deleter method, and mark as private if so.
for decorator in self.decorators:
# Given 'foo', match 'foo.bar' but not 'foobar' or 'sfoo'
if re(r"^{}\.".format(self.name)).match(decorator.name):
return False
name_is_public = (not self.name.startswith('_') or
self.name in VARIADIC_MAGIC_METHODS or
self.is_magic)
return self.parent.is_public and name_is_public
class Class(Definition):
"""A Python source code class."""
_nest = staticmethod(lambda s: {'def': Method, 'class': NestedClass}[s])
is_public = Function.is_public
is_class = True
class NestedClass(Class):
"""A Python source code nested class."""
@property
def is_public(self):
"""Return True iff this class should be considered public."""
return (not self.name.startswith('_') and
self.parent.is_class and
self.parent.is_public)
class Decorator(Value):
"""A decorator for function, method or class."""
_fields = 'name arguments'.split()
VARIADIC_MAGIC_METHODS = ('__init__', '__call__', '__new__')
class AllError(Exception):
"""Raised when there is a problem with __all__ when parsing."""
def __init__(self, message):
"""Initialize the error with a more specific message."""
Exception.__init__(
self, message + textwrap.dedent("""
That means pydocstyle cannot decide which definitions are
public. Variable __all__ should be present at most once in
each file, in form
`__all__ = ('a_public_function', 'APublicClass', ...)`.
More info on __all__: http://stackoverflow.com/q/44834/. ')
"""))
class TokenStream(object):
# A logical newline is where a new expression or statement begins. When
# there is a physical new line, but not a logical one, for example:
# (x +
# y)
# The token will be tk.NL, not tk.NEWLINE.
LOGICAL_NEWLINES = {tk.NEWLINE, tk.INDENT, tk.DEDENT}
def __init__(self, filelike):
self._generator = tk.generate_tokens(filelike.readline)
self.current = Token(*next(self._generator, None))
self.line = self.current.start[0]
self.log = log
self.got_logical_newline = True
def move(self):
previous = self.current
current = self._next_from_generator()
self.current = None if current is None else Token(*current)
self.line = self.current.start[0] if self.current else self.line
self.got_logical_newline = (previous.kind in self.LOGICAL_NEWLINES)
return previous
def _next_from_generator(self):
try:
return next(self._generator, None)
except (SyntaxError, tk.TokenError):
self.log.warning('error generating tokens', exc_info=True)
return None
def __iter__(self):
while True:
if self.current is not None:
yield self.current
else:
return
self.move()
class TokenKind(int):
def __repr__(self):
return "tk.{}".format(tk.tok_name[self])
class Token(Value):
_fields = 'kind value start end source'.split()
def __init__(self, *args):
super(Token, self).__init__(*args)
self.kind = TokenKind(self.kind)
class Parser(object):
"""A Python source code parser."""
def parse(self, filelike, filename):
"""Parse the given file-like object and return its Module object."""
self.log = log
self.source = filelike.readlines()
src = ''.join(self.source)
try:
compile(src, filename, 'exec')
except SyntaxError as error:
six.raise_from(ParseError(), error)
self.stream = TokenStream(StringIO(src))
self.filename = filename
self.all = None
self.future_imports = set()
self._accumulated_decorators = []
return self.parse_module()
# TODO: remove
def __call__(self, *args, **kwargs):
"""Call the parse method."""
return self.parse(*args, **kwargs)
current = property(lambda self: self.stream.current)
line = property(lambda self: self.stream.line)
def consume(self, kind):
"""Consume one token and verify it is of the expected kind."""
next_token = self.stream.move()
assert next_token.kind == kind
def leapfrog(self, kind, value=None):
"""Skip tokens in the stream until a certain token kind is reached.
If `value` is specified, tokens whose values are different will also
be skipped.
"""
while self.current is not None:
if (self.current.kind == kind and
(value is None or self.current.value == value)):
self.consume(kind)
return
self.stream.move()
def parse_docstring(self):
"""Parse a single docstring and return its value."""
self.log.debug("parsing docstring, token is %r (%s)",
self.current.kind, self.current.value)
while self.current.kind in (tk.COMMENT, tk.NEWLINE, tk.NL):
self.stream.move()
self.log.debug("parsing docstring, token is %r (%s)",
self.current.kind, self.current.value)
if self.current.kind == tk.STRING:
docstring = self.current.value
self.stream.move()
return docstring
return None
def parse_decorators(self):
"""Called after first @ is found.
Parse decorators into self._accumulated_decorators.
Continue to do so until encountering the 'def' or 'class' start token.
"""
name = []
arguments = []
at_arguments = False
while self.current is not None:
self.log.debug("parsing decorators, current token is %r (%s)",
self.current.kind, self.current.value)
if (self.current.kind == tk.NAME and
self.current.value in ['def', 'class']):
# Done with decorators - found function or class proper
break
elif self.current.kind == tk.OP and self.current.value == '@':
# New decorator found. Store the decorator accumulated so far:
self._accumulated_decorators.append(
Decorator(''.join(name), ''.join(arguments)))
# Now reset to begin accumulating the new decorator:
name = []
arguments = []
at_arguments = False
elif self.current.kind == tk.OP and self.current.value == '(':
at_arguments = True
elif self.current.kind == tk.OP and self.current.value == ')':
# Ignore close parenthesis
pass
elif self.current.kind == tk.NEWLINE or self.current.kind == tk.NL:
# Ignore newlines
pass
else:
# Keep accumulating current decorator's name or argument.
if not at_arguments:
name.append(self.current.value)
else:
arguments.append(self.current.value)
self.stream.move()
# Add decorator accumulated so far
self._accumulated_decorators.append(
Decorator(''.join(name), ''.join(arguments)))
def parse_definitions(self, class_, all=False):
"""Parse multiple definitions and yield them."""
while self.current is not None:
self.log.debug("parsing definition list, current token is %r (%s)",
self.current.kind, self.current.value)
self.log.debug('got_newline: %s', self.stream.got_logical_newline)
if all and self.current.value == '__all__':
self.parse_all()
elif (self.current.kind == tk.OP and
self.current.value == '@' and
self.stream.got_logical_newline):
self.consume(tk.OP)
self.parse_decorators()
elif self.current.value in ['def', 'class']:
yield self.parse_definition(class_._nest(self.current.value))
elif self.current.kind == tk.INDENT:
self.consume(tk.INDENT)
for definition in self.parse_definitions(class_):
yield definition
elif self.current.kind == tk.DEDENT:
self.consume(tk.DEDENT)
return
elif self.current.value == 'from':
self.parse_from_import_statement()
else:
self.stream.move()
def parse_all(self):
"""Parse the __all__ definition in a module."""
assert self.current.value == '__all__'
self.consume(tk.NAME)
if self.current.value != '=':
raise AllError('Could not evaluate contents of __all__. ')
self.consume(tk.OP)
if self.current.value not in '([':
raise AllError('Could not evaluate contents of __all__. ')
self.consume(tk.OP)
self.all = []
all_content = "("
while self.current.kind != tk.OP or self.current.value not in ")]":
if self.current.kind in (tk.NL, tk.COMMENT):
pass
elif (self.current.kind == tk.STRING or
self.current.value == ','):
all_content += self.current.value
else:
raise AllError('Unexpected token kind in __all__: {!r}. '
.format(self.current.kind))
self.stream.move()
self.consume(tk.OP)
all_content += ")"
try:
self.all = eval(all_content, {})
except BaseException as e:
raise AllError('Could not evaluate contents of __all__.'
'\bThe value was {}. The exception was:\n{}'
.format(all_content, e))
def parse_module(self):
"""Parse a module (and its children) and return a Module object."""
self.log.debug("parsing module.")
start = self.line
docstring = self.parse_docstring()
children = list(self.parse_definitions(Module, all=True))
assert self.current is None, self.current
end = self.line
cls = Module
if self.filename.endswith('__init__.py'):
cls = Package
module = cls(self.filename, self.source, start, end,
[], docstring, children, None, self.all, None, '')
for child in module.children:
child.parent = module
module.future_imports = self.future_imports
self.log.debug("finished parsing module.")
return module
def parse_definition(self, class_):
"""Parse a definition and return its value in a `class_` object."""
start = self.line
self.consume(tk.NAME)
name = self.current.value
self.log.debug("parsing %s '%s'", class_.__name__, name)
self.stream.move()
if self.current.kind == tk.OP and self.current.value == '(':
parenthesis_level = 0
while True:
if self.current.kind == tk.OP:
if self.current.value == '(':
parenthesis_level += 1
elif self.current.value == ')':
parenthesis_level -= 1
if parenthesis_level == 0:
break
self.stream.move()
if self.current.kind != tk.OP or self.current.value != ':':
self.leapfrog(tk.OP, value=":")
else:
self.consume(tk.OP)
if self.current.kind in (tk.NEWLINE, tk.COMMENT):
skipped_error_codes = self.parse_skip_comment()
self.leapfrog(tk.INDENT)
assert self.current.kind != tk.INDENT
docstring = self.parse_docstring()
decorators = self._accumulated_decorators
self.log.debug("current accumulated decorators: %s", decorators)
self._accumulated_decorators = []
self.log.debug("parsing nested definitions.")
children = list(self.parse_definitions(class_))
self.log.debug("finished parsing nested definitions for '%s'",
name)
end = self.line - 1
else: # one-liner definition
skipped_error_codes = ''
docstring = self.parse_docstring()
decorators = [] # TODO
children = []
end = self.line
self.leapfrog(tk.NEWLINE)
definition = class_(name, self.source, start, end,
decorators, docstring, children, None,
skipped_error_codes)
for child in definition.children:
child.parent = definition
self.log.debug("finished parsing %s '%s'. Next token is %r (%s)",
class_.__name__, name, self.current.kind,
self.current.value)
return definition
def parse_skip_comment(self):
"""Parse a definition comment for noqa skips."""
skipped_error_codes = ''
if self.current.kind == tk.COMMENT:
if 'noqa: ' in self.current.value:
skipped_error_codes = ''.join(
self.current.value.split('noqa: ')[1:])
elif self.current.value.startswith('# noqa'):
skipped_error_codes = 'all'
return skipped_error_codes
def check_current(self, kind=None, value=None):
"""Verify the current token is of type `kind` and equals `value`."""
msg = textwrap.dedent("""
Unexpected token at line {self.line}:
In file: {self.filename}
Got kind {self.current.kind!r}
Got value {self.current.value}
""".format(self=self))
kind_valid = self.current.kind == kind if kind else True
value_valid = self.current.value == value if value else True
assert kind_valid and value_valid, msg
def parse_from_import_statement(self):
"""Parse a 'from x import y' statement.
The purpose is to find __future__ statements.
"""
self.log.debug('parsing from/import statement.')
is_future_import = self._parse_from_import_source()
self._parse_from_import_names(is_future_import)
def _parse_from_import_source(self):
"""Parse the 'from x import' part in a 'from x import y' statement.
Return true iff `x` is __future__.
"""
assert self.current.value == 'from', self.current.value
self.stream.move()
is_future_import = self.current.value == '__future__'
self.stream.move()
while (self.current is not None and
self.current.kind in (tk.DOT, tk.NAME, tk.OP) and
self.current.value != 'import'):
self.stream.move()
if self.current is None or self.current.value != 'import':
return False
self.check_current(value='import')
assert self.current.value == 'import', self.current.value
self.stream.move()
return is_future_import
def _parse_from_import_names(self, is_future_import):
"""Parse the 'y' part in a 'from x import y' statement."""
if self.current.value == '(':
self.consume(tk.OP)
expected_end_kinds = (tk.OP, )
else:
expected_end_kinds = (tk.NEWLINE, tk.ENDMARKER)
while self.current.kind not in expected_end_kinds and not (
self.current.kind == tk.OP and self.current.value == ';'):
if self.current.kind != tk.NAME:
self.stream.move()
continue
self.log.debug("parsing import, token is %r (%s)",
self.current.kind, self.current.value)
if is_future_import:
self.log.debug('found future import: %s', self.current.value)
self.future_imports.add(self.current.value)
self.consume(tk.NAME)
self.log.debug("parsing import, token is %r (%s)",
self.current.kind, self.current.value)
if self.current.kind == tk.NAME and self.current.value == 'as':
self.consume(tk.NAME) # as
if self.current.kind == tk.NAME:
self.consume(tk.NAME) # new name, irrelevant
if self.current.value == ',':
self.consume(tk.OP)
self.log.debug("parsing import, token is %r (%s)",
self.current.kind, self.current.value)
| 21,969 | Python | .py | 494 | 33.433198 | 79 | 0.572304 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,305 | messages.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/messages.py | """
Provide the class Message and its subclasses.
"""
class Message(object):
message = ''
message_args = ()
def __init__(self, filename, loc):
self.filename = filename
self.lineno = loc.lineno
self.col = getattr(loc, 'col_offset', 0)
def __str__(self):
return '%s:%s:%s %s' % (self.filename, self.lineno, self.col+1,
self.message % self.message_args)
class UnusedImport(Message):
message = '%r imported but unused'
def __init__(self, filename, loc, name):
Message.__init__(self, filename, loc)
self.message_args = (name,)
class RedefinedWhileUnused(Message):
message = 'redefinition of unused %r from line %r'
def __init__(self, filename, loc, name, orig_loc):
Message.__init__(self, filename, loc)
self.message_args = (name, orig_loc.lineno)
class RedefinedInListComp(Message):
message = 'list comprehension redefines %r from line %r'
def __init__(self, filename, loc, name, orig_loc):
Message.__init__(self, filename, loc)
self.message_args = (name, orig_loc.lineno)
class ImportShadowedByLoopVar(Message):
message = 'import %r from line %r shadowed by loop variable'
def __init__(self, filename, loc, name, orig_loc):
Message.__init__(self, filename, loc)
self.message_args = (name, orig_loc.lineno)
class ImportStarNotPermitted(Message):
message = "'from %s import *' only allowed at module level"
def __init__(self, filename, loc, modname):
Message.__init__(self, filename, loc)
self.message_args = (modname,)
class ImportStarUsed(Message):
message = "'from %s import *' used; unable to detect undefined names"
def __init__(self, filename, loc, modname):
Message.__init__(self, filename, loc)
self.message_args = (modname,)
class ImportStarUsage(Message):
message = "%r may be undefined, or defined from star imports: %s"
def __init__(self, filename, loc, name, from_list):
Message.__init__(self, filename, loc)
self.message_args = (name, from_list)
class UndefinedName(Message):
message = 'undefined name %r'
def __init__(self, filename, loc, name):
Message.__init__(self, filename, loc)
self.message_args = (name,)
class DoctestSyntaxError(Message):
message = 'syntax error in doctest'
def __init__(self, filename, loc, position=None):
Message.__init__(self, filename, loc)
if position:
(self.lineno, self.col) = position
self.message_args = ()
class UndefinedExport(Message):
message = 'undefined name %r in __all__'
def __init__(self, filename, loc, name):
Message.__init__(self, filename, loc)
self.message_args = (name,)
class UndefinedLocal(Message):
message = 'local variable %r {0} referenced before assignment'
default = 'defined in enclosing scope on line %r'
builtin = 'defined as a builtin'
def __init__(self, filename, loc, name, orig_loc):
Message.__init__(self, filename, loc)
if orig_loc is None:
self.message = self.message.format(self.builtin)
self.message_args = name
else:
self.message = self.message.format(self.default)
self.message_args = (name, orig_loc.lineno)
class DuplicateArgument(Message):
message = 'duplicate argument %r in function definition'
def __init__(self, filename, loc, name):
Message.__init__(self, filename, loc)
self.message_args = (name,)
class MultiValueRepeatedKeyLiteral(Message):
message = 'dictionary key %r repeated with different values'
def __init__(self, filename, loc, key):
Message.__init__(self, filename, loc)
self.message_args = (key,)
class MultiValueRepeatedKeyVariable(Message):
message = 'dictionary key variable %s repeated with different values'
def __init__(self, filename, loc, key):
Message.__init__(self, filename, loc)
self.message_args = (key,)
class LateFutureImport(Message):
message = 'from __future__ imports must occur at the beginning of the file'
def __init__(self, filename, loc, names):
Message.__init__(self, filename, loc)
self.message_args = ()
class FutureFeatureNotDefined(Message):
"""An undefined __future__ feature name was imported."""
message = 'future feature %s is not defined'
def __init__(self, filename, loc, name):
Message.__init__(self, filename, loc)
self.message_args = (name,)
class UnusedVariable(Message):
"""
Indicates that a variable has been explicitly assigned to but not actually
used.
"""
message = 'local variable %r is assigned to but never used'
def __init__(self, filename, loc, names):
Message.__init__(self, filename, loc)
self.message_args = (names,)
class ReturnWithArgsInsideGenerator(Message):
"""
Indicates a return statement with arguments inside a generator.
"""
message = '\'return\' with argument inside generator'
class ReturnOutsideFunction(Message):
"""
Indicates a return statement outside of a function/method.
"""
message = '\'return\' outside function'
class YieldOutsideFunction(Message):
"""
Indicates a yield or yield from statement outside of a function/method.
"""
message = '\'yield\' outside function'
# For whatever reason, Python gives different error messages for these two. We
# match the Python error message exactly.
class ContinueOutsideLoop(Message):
"""
Indicates a continue statement outside of a while or for loop.
"""
message = '\'continue\' not properly in loop'
class BreakOutsideLoop(Message):
"""
Indicates a break statement outside of a while or for loop.
"""
message = '\'break\' outside loop'
class ContinueInFinally(Message):
"""
Indicates a continue statement in a finally block in a while or for loop.
"""
message = '\'continue\' not supported inside \'finally\' clause'
class DefaultExceptNotLast(Message):
"""
Indicates an except: block as not the last exception handler.
"""
message = 'default \'except:\' must be last'
class TwoStarredExpressions(Message):
"""
Two or more starred expressions in an assignment (a, *b, *c = d).
"""
message = 'two starred expressions in assignment'
class TooManyExpressionsInStarredAssignment(Message):
"""
Too many expressions in an assignment with star-unpacking
"""
message = 'too many expressions in star-unpacking assignment'
class IfTuple(Message):
"""
Conditional test is a non-empty tuple literal, which are always True.
"""
message = '\'if tuple literal\' is always true, perhaps remove accidental comma?'
class AssertTuple(Message):
"""
Assertion test is a non-empty tuple literal, which are always True.
"""
message = 'assertion is always true, perhaps remove parentheses?'
class ForwardAnnotationSyntaxError(Message):
message = 'syntax error in forward annotation %r'
def __init__(self, filename, loc, annotation):
Message.__init__(self, filename, loc)
self.message_args = (annotation,)
class CommentAnnotationSyntaxError(Message):
message = 'syntax error in type comment %r'
def __init__(self, filename, loc, annotation):
Message.__init__(self, filename, loc)
self.message_args = (annotation,)
class RaiseNotImplemented(Message):
message = "'raise NotImplemented' should be 'raise NotImplementedError'"
class InvalidPrintSyntax(Message):
message = 'use of >> is invalid with print function'
class IsLiteral(Message):
message = 'use ==/!= to compare constant literals (str, bytes, int, float, tuple)'
class FStringMissingPlaceholders(Message):
message = 'f-string is missing placeholders'
class StringDotFormatExtraPositionalArguments(Message):
message = "'...'.format(...) has unused arguments at position(s): %s"
def __init__(self, filename, loc, extra_positions):
Message.__init__(self, filename, loc)
self.message_args = (extra_positions,)
class StringDotFormatExtraNamedArguments(Message):
message = "'...'.format(...) has unused named argument(s): %s"
def __init__(self, filename, loc, extra_keywords):
Message.__init__(self, filename, loc)
self.message_args = (extra_keywords,)
class StringDotFormatMissingArgument(Message):
message = "'...'.format(...) is missing argument(s) for placeholder(s): %s"
def __init__(self, filename, loc, missing_arguments):
Message.__init__(self, filename, loc)
self.message_args = (missing_arguments,)
class StringDotFormatMixingAutomatic(Message):
message = "'...'.format(...) mixes automatic and manual numbering"
class StringDotFormatInvalidFormat(Message):
message = "'...'.format(...) has invalid format string: %s"
def __init__(self, filename, loc, error):
Message.__init__(self, filename, loc)
self.message_args = (error,)
class PercentFormatInvalidFormat(Message):
message = "'...' %% ... has invalid format string: %s"
def __init__(self, filename, loc, error):
Message.__init__(self, filename, loc)
self.message_args = (error,)
class PercentFormatMixedPositionalAndNamed(Message):
message = "'...' %% ... has mixed positional and named placeholders"
class PercentFormatUnsupportedFormatCharacter(Message):
message = "'...' %% ... has unsupported format character %r"
def __init__(self, filename, loc, c):
Message.__init__(self, filename, loc)
self.message_args = (c,)
class PercentFormatPositionalCountMismatch(Message):
message = "'...' %% ... has %d placeholder(s) but %d substitution(s)"
def __init__(self, filename, loc, n_placeholders, n_substitutions):
Message.__init__(self, filename, loc)
self.message_args = (n_placeholders, n_substitutions)
class PercentFormatExtraNamedArguments(Message):
message = "'...' %% ... has unused named argument(s): %s"
def __init__(self, filename, loc, extra_keywords):
Message.__init__(self, filename, loc)
self.message_args = (extra_keywords,)
class PercentFormatMissingArgument(Message):
message = "'...' %% ... is missing argument(s) for placeholder(s): %s"
def __init__(self, filename, loc, missing_arguments):
Message.__init__(self, filename, loc)
self.message_args = (missing_arguments,)
class PercentFormatExpectedMapping(Message):
message = "'...' %% ... expected mapping but got sequence"
class PercentFormatExpectedSequence(Message):
message = "'...' %% ... expected sequence but got mapping"
class PercentFormatStarRequiresSequence(Message):
message = "'...' %% ... `*` specifier requires sequence"
| 10,908 | Python | .py | 242 | 39.128099 | 86 | 0.672962 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,306 | __main__.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/__main__.py | from pyflakes.api import main
# python -m pyflakes
if __name__ == '__main__':
main(prog='pyflakes')
| 105 | Python | .py | 4 | 24 | 29 | 0.66 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,307 | checker.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/checker.py | """
Main module.
Implement the central Checker class.
Also, it models the Bindings and Scopes.
"""
import __future__
import ast
import bisect
import collections
import contextlib
import doctest
import functools
import os
import re
import string
import sys
import tokenize
from pyflakes import messages
PY2 = sys.version_info < (3, 0)
PY35_PLUS = sys.version_info >= (3, 5) # Python 3.5 and above
PY36_PLUS = sys.version_info >= (3, 6) # Python 3.6 and above
PY38_PLUS = sys.version_info >= (3, 8)
try:
sys.pypy_version_info
PYPY = True
except AttributeError:
PYPY = False
builtin_vars = dir(__import__('__builtin__' if PY2 else 'builtins'))
parse_format_string = string.Formatter().parse
if PY2:
tokenize_tokenize = tokenize.generate_tokens
else:
tokenize_tokenize = tokenize.tokenize
if PY2:
def getNodeType(node_class):
# workaround str.upper() which is locale-dependent
return str(unicode(node_class.__name__).upper())
def get_raise_argument(node):
return node.type
else:
def getNodeType(node_class):
return node_class.__name__.upper()
def get_raise_argument(node):
return node.exc
# Silence `pyflakes` from reporting `undefined name 'unicode'` in Python 3.
unicode = str
# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally)
if PY2:
def getAlternatives(n):
if isinstance(n, (ast.If, ast.TryFinally)):
return [n.body]
if isinstance(n, ast.TryExcept):
return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
else:
def getAlternatives(n):
if isinstance(n, ast.If):
return [n.body]
if isinstance(n, ast.Try):
return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
if PY35_PLUS:
FOR_TYPES = (ast.For, ast.AsyncFor)
LOOP_TYPES = (ast.While, ast.For, ast.AsyncFor)
FUNCTION_TYPES = (ast.FunctionDef, ast.AsyncFunctionDef)
else:
FOR_TYPES = (ast.For,)
LOOP_TYPES = (ast.While, ast.For)
FUNCTION_TYPES = (ast.FunctionDef,)
if PY36_PLUS:
ANNASSIGN_TYPES = (ast.AnnAssign,)
else:
ANNASSIGN_TYPES = ()
if PY38_PLUS:
def _is_singleton(node): # type: (ast.AST) -> bool
return (
isinstance(node, ast.Constant) and
isinstance(node.value, (bool, type(Ellipsis), type(None)))
)
elif not PY2:
def _is_singleton(node): # type: (ast.AST) -> bool
return isinstance(node, (ast.NameConstant, ast.Ellipsis))
else:
def _is_singleton(node): # type: (ast.AST) -> bool
return (
isinstance(node, ast.Name) and
node.id in {'True', 'False', 'Ellipsis', 'None'}
)
def _is_tuple_constant(node): # type: (ast.AST) -> bool
return (
isinstance(node, ast.Tuple) and
all(_is_constant(elt) for elt in node.elts)
)
if PY38_PLUS:
def _is_constant(node):
return isinstance(node, ast.Constant) or _is_tuple_constant(node)
else:
_const_tps = (ast.Str, ast.Num)
if not PY2:
_const_tps += (ast.Bytes,)
def _is_constant(node):
return (
isinstance(node, _const_tps) or
_is_singleton(node) or
_is_tuple_constant(node)
)
def _is_const_non_singleton(node): # type: (ast.AST) -> bool
return _is_constant(node) and not _is_singleton(node)
def _is_name_or_attr(node, name): # type: (ast.Ast, str) -> bool
return (
(isinstance(node, ast.Name) and node.id == name) or
(isinstance(node, ast.Attribute) and node.attr == name)
)
# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L102-L104
TYPE_COMMENT_RE = re.compile(r'^#\s*type:\s*')
# https://github.com/python/typed_ast/blob/1.4.0/ast27/Parser/tokenizer.c#L1408-L1413
ASCII_NON_ALNUM = ''.join([chr(i) for i in range(128) if not chr(i).isalnum()])
TYPE_IGNORE_RE = re.compile(
TYPE_COMMENT_RE.pattern + r'ignore([{}]|$)'.format(ASCII_NON_ALNUM))
# https://github.com/python/typed_ast/blob/1.4.0/ast27/Grammar/Grammar#L147
TYPE_FUNC_RE = re.compile(r'^(\(.*?\))\s*->\s*(.*)$')
MAPPING_KEY_RE = re.compile(r'\(([^()]*)\)')
CONVERSION_FLAG_RE = re.compile('[#0+ -]*')
WIDTH_RE = re.compile(r'(?:\*|\d*)')
PRECISION_RE = re.compile(r'(?:\.(?:\*|\d*))?')
LENGTH_RE = re.compile('[hlL]?')
# https://docs.python.org/3/library/stdtypes.html#old-string-formatting
VALID_CONVERSIONS = frozenset('diouxXeEfFgGcrsa%')
def _must_match(regex, string, pos):
# type: (Pattern[str], str, int) -> Match[str]
match = regex.match(string, pos)
assert match is not None
return match
def parse_percent_format(s): # type: (str) -> Tuple[PercentFormat, ...]
"""Parses the string component of a `'...' % ...` format call
Copied from https://github.com/asottile/pyupgrade at v1.20.1
"""
def _parse_inner():
# type: () -> Generator[PercentFormat, None, None]
string_start = 0
string_end = 0
in_fmt = False
i = 0
while i < len(s):
if not in_fmt:
try:
i = s.index('%', i)
except ValueError: # no more % fields!
yield s[string_start:], None
return
else:
string_end = i
i += 1
in_fmt = True
else:
key_match = MAPPING_KEY_RE.match(s, i)
if key_match:
key = key_match.group(1) # type: Optional[str]
i = key_match.end()
else:
key = None
conversion_flag_match = _must_match(CONVERSION_FLAG_RE, s, i)
conversion_flag = conversion_flag_match.group() or None
i = conversion_flag_match.end()
width_match = _must_match(WIDTH_RE, s, i)
width = width_match.group() or None
i = width_match.end()
precision_match = _must_match(PRECISION_RE, s, i)
precision = precision_match.group() or None
i = precision_match.end()
# length modifier is ignored
i = _must_match(LENGTH_RE, s, i).end()
try:
conversion = s[i]
except IndexError:
raise ValueError('end-of-string while parsing format')
i += 1
fmt = (key, conversion_flag, width, precision, conversion)
yield s[string_start:string_end], fmt
in_fmt = False
string_start = i
if in_fmt:
raise ValueError('end-of-string while parsing format')
return tuple(_parse_inner())
class _FieldsOrder(dict):
"""Fix order of AST node fields."""
def _get_fields(self, node_class):
# handle iter before target, and generators before element
fields = node_class._fields
if 'iter' in fields:
key_first = 'iter'.find
elif 'generators' in fields:
key_first = 'generators'.find
else:
key_first = 'value'.find
return tuple(sorted(fields, key=key_first, reverse=True))
def __missing__(self, node_class):
self[node_class] = fields = self._get_fields(node_class)
return fields
def counter(items):
"""
Simplest required implementation of collections.Counter. Required as 2.6
does not have Counter in collections.
"""
results = {}
for item in items:
results[item] = results.get(item, 0) + 1
return results
def iter_child_nodes(node, omit=None, _fields_order=_FieldsOrder()):
"""
Yield all direct child nodes of *node*, that is, all fields that
are nodes and all items of fields that are lists of nodes.
:param node: AST node to be iterated upon
:param omit: String or tuple of strings denoting the
attributes of the node to be omitted from
further parsing
:param _fields_order: Order of AST node fields
"""
for name in _fields_order[node.__class__]:
if omit and name in omit:
continue
field = getattr(node, name, None)
if isinstance(field, ast.AST):
yield field
elif isinstance(field, list):
for item in field:
if isinstance(item, ast.AST):
yield item
def convert_to_value(item):
if isinstance(item, ast.Str):
return item.s
elif hasattr(ast, 'Bytes') and isinstance(item, ast.Bytes):
return item.s
elif isinstance(item, ast.Tuple):
return tuple(convert_to_value(i) for i in item.elts)
elif isinstance(item, ast.Num):
return item.n
elif isinstance(item, ast.Name):
result = VariableKey(item=item)
constants_lookup = {
'True': True,
'False': False,
'None': None,
}
return constants_lookup.get(
result.name,
result,
)
elif (not PY2) and isinstance(item, ast.NameConstant):
# None, True, False are nameconstants in python3, but names in 2
return item.value
else:
return UnhandledKeyType()
def is_notimplemented_name_node(node):
return isinstance(node, ast.Name) and getNodeName(node) == 'NotImplemented'
class Binding(object):
"""
Represents the binding of a value to a name.
The checker uses this to keep track of which names have been bound and
which names have not. See L{Assignment} for a special type of binding that
is checked with stricter rules.
@ivar used: pair of (L{Scope}, node) indicating the scope and
the node that this binding was last used.
"""
def __init__(self, name, source):
self.name = name
self.source = source
self.used = False
def __str__(self):
return self.name
def __repr__(self):
return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
self.name,
self.source.lineno,
id(self))
def redefines(self, other):
return isinstance(other, Definition) and self.name == other.name
class Definition(Binding):
"""
A binding that defines a function or a class.
"""
class Builtin(Definition):
"""A definition created for all Python builtins."""
def __init__(self, name):
super(Builtin, self).__init__(name, None)
def __repr__(self):
return '<%s object %r at 0x%x>' % (self.__class__.__name__,
self.name,
id(self))
class UnhandledKeyType(object):
"""
A dictionary key of a type that we cannot or do not check for duplicates.
"""
class VariableKey(object):
"""
A dictionary key which is a variable.
@ivar item: The variable AST object.
"""
def __init__(self, item):
self.name = item.id
def __eq__(self, compare):
return (
compare.__class__ == self.__class__ and
compare.name == self.name
)
def __hash__(self):
return hash(self.name)
class Importation(Definition):
"""
A binding created by an import statement.
@ivar fullName: The complete name given to the import statement,
possibly including multiple dotted components.
@type fullName: C{str}
"""
def __init__(self, name, source, full_name=None):
self.fullName = full_name or name
self.redefined = []
super(Importation, self).__init__(name, source)
def redefines(self, other):
if isinstance(other, SubmoduleImportation):
# See note in SubmoduleImportation about RedefinedWhileUnused
return self.fullName == other.fullName
return isinstance(other, Definition) and self.name == other.name
def _has_alias(self):
"""Return whether importation needs an as clause."""
return not self.fullName.split('.')[-1] == self.name
@property
def source_statement(self):
"""Generate a source statement equivalent to the import."""
if self._has_alias():
return 'import %s as %s' % (self.fullName, self.name)
else:
return 'import %s' % self.fullName
def __str__(self):
"""Return import full name with alias."""
if self._has_alias():
return self.fullName + ' as ' + self.name
else:
return self.fullName
class SubmoduleImportation(Importation):
"""
A binding created by a submodule import statement.
A submodule import is a special case where the root module is implicitly
imported, without an 'as' clause, and the submodule is also imported.
Python does not restrict which attributes of the root module may be used.
This class is only used when the submodule import is without an 'as' clause.
pyflakes handles this case by registering the root module name in the scope,
allowing any attribute of the root module to be accessed.
RedefinedWhileUnused is suppressed in `redefines` unless the submodule
name is also the same, to avoid false positives.
"""
def __init__(self, name, source):
# A dot should only appear in the name when it is a submodule import
assert '.' in name and (not source or isinstance(source, ast.Import))
package_name = name.split('.')[0]
super(SubmoduleImportation, self).__init__(package_name, source)
self.fullName = name
def redefines(self, other):
if isinstance(other, Importation):
return self.fullName == other.fullName
return super(SubmoduleImportation, self).redefines(other)
def __str__(self):
return self.fullName
@property
def source_statement(self):
return 'import ' + self.fullName
class ImportationFrom(Importation):
def __init__(self, name, source, module, real_name=None):
self.module = module
self.real_name = real_name or name
if module.endswith('.'):
full_name = module + self.real_name
else:
full_name = module + '.' + self.real_name
super(ImportationFrom, self).__init__(name, source, full_name)
def __str__(self):
"""Return import full name with alias."""
if self.real_name != self.name:
return self.fullName + ' as ' + self.name
else:
return self.fullName
@property
def source_statement(self):
if self.real_name != self.name:
return 'from %s import %s as %s' % (self.module,
self.real_name,
self.name)
else:
return 'from %s import %s' % (self.module, self.name)
class StarImportation(Importation):
"""A binding created by a 'from x import *' statement."""
def __init__(self, name, source):
super(StarImportation, self).__init__('*', source)
# Each star importation needs a unique name, and
# may not be the module name otherwise it will be deemed imported
self.name = name + '.*'
self.fullName = name
@property
def source_statement(self):
return 'from ' + self.fullName + ' import *'
def __str__(self):
# When the module ends with a ., avoid the ambiguous '..*'
if self.fullName.endswith('.'):
return self.source_statement
else:
return self.name
class FutureImportation(ImportationFrom):
"""
A binding created by a from `__future__` import statement.
`__future__` imports are implicitly used.
"""
def __init__(self, name, source, scope):
super(FutureImportation, self).__init__(name, source, '__future__')
self.used = (scope, source)
class Argument(Binding):
"""
Represents binding a name as an argument.
"""
class Assignment(Binding):
"""
Represents binding a name with an explicit assignment.
The checker will raise warnings for any Assignment that isn't used. Also,
the checker does not consider assignments in tuple/list unpacking to be
Assignments, rather it treats them as simple Bindings.
"""
class Annotation(Binding):
"""
Represents binding a name to a type without an associated value.
As long as this name is not assigned a value in another binding, it is considered
undefined for most purposes. One notable exception is using the name as a type
annotation.
"""
def redefines(self, other):
"""An Annotation doesn't define any name, so it cannot redefine one."""
return False
class FunctionDefinition(Definition):
pass
class ClassDefinition(Definition):
pass
class ExportBinding(Binding):
"""
A binding created by an C{__all__} assignment. If the names in the list
can be determined statically, they will be treated as names for export and
additional checking applied to them.
The only recognized C{__all__} assignment via list/tuple concatenation is in the
following format:
__all__ = ['a'] + ['b'] + ['c']
Names which are imported and not otherwise used but appear in the value of
C{__all__} will not have an unused import warning reported for them.
"""
def __init__(self, name, source, scope):
if '__all__' in scope and isinstance(source, ast.AugAssign):
self.names = list(scope['__all__'].names)
else:
self.names = []
def _add_to_names(container):
for node in container.elts:
if isinstance(node, ast.Str):
self.names.append(node.s)
if isinstance(source.value, (ast.List, ast.Tuple)):
_add_to_names(source.value)
# If concatenating lists or tuples
elif isinstance(source.value, ast.BinOp):
currentValue = source.value
while isinstance(currentValue.right, (ast.List, ast.Tuple)):
left = currentValue.left
right = currentValue.right
_add_to_names(right)
# If more lists are being added
if isinstance(left, ast.BinOp):
currentValue = left
# If just two lists are being added
elif isinstance(left, (ast.List, ast.Tuple)):
_add_to_names(left)
# All lists accounted for - done
break
# If not list concatenation
else:
break
super(ExportBinding, self).__init__(name, source)
class Scope(dict):
importStarred = False # set to True when import * is found
def __repr__(self):
scope_cls = self.__class__.__name__
return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self))
class ClassScope(Scope):
pass
class FunctionScope(Scope):
"""
I represent a name scope for a function.
@ivar globals: Names declared 'global' in this function.
"""
usesLocals = False
alwaysUsed = {'__tracebackhide__', '__traceback_info__',
'__traceback_supplement__'}
def __init__(self):
super(FunctionScope, self).__init__()
# Simplify: manage the special locals as globals
self.globals = self.alwaysUsed.copy()
self.returnValue = None # First non-empty return
self.isGenerator = False # Detect a generator
def unusedAssignments(self):
"""
Return a generator for the assignments which have not been used.
"""
for name, binding in self.items():
if (not binding.used and
name != '_' and # see issue #202
name not in self.globals and
not self.usesLocals and
isinstance(binding, Assignment)):
yield name, binding
class GeneratorScope(Scope):
pass
class ModuleScope(Scope):
"""Scope for a module."""
_futures_allowed = True
_annotations_future_enabled = False
class DoctestScope(ModuleScope):
"""Scope for a doctest."""
class DummyNode(object):
"""Used in place of an `ast.AST` to set error message positions"""
def __init__(self, lineno, col_offset):
self.lineno = lineno
self.col_offset = col_offset
class DetectClassScopedMagic:
names = dir()
# Globally defined names which are not attributes of the builtins module, or
# are only present on some platforms.
_MAGIC_GLOBALS = ['__file__', '__builtins__', 'WindowsError']
# module scope annotation will store in `__annotations__`, see also PEP 526.
if PY36_PLUS:
_MAGIC_GLOBALS.append('__annotations__')
def getNodeName(node):
# Returns node.id, or node.name, or None
if hasattr(node, 'id'): # One of the many nodes with an id
return node.id
if hasattr(node, 'name'): # an ExceptHandler node
return node.name
if hasattr(node, 'rest'): # a MatchMapping node
return node.rest
TYPING_MODULES = frozenset(('typing', 'typing_extensions'))
def _is_typing_helper(node, is_name_match_fn, scope_stack):
"""
Internal helper to determine whether or not something is a member of a
typing module. This is used as part of working out whether we are within a
type annotation context.
Note: you probably don't want to use this function directly. Instead see the
utils below which wrap it (`_is_typing` and `_is_any_typing_member`).
"""
def _bare_name_is_attr(name):
for scope in reversed(scope_stack):
if name in scope:
return (
isinstance(scope[name], ImportationFrom) and
scope[name].module in TYPING_MODULES and
is_name_match_fn(scope[name].real_name)
)
return False
def _module_scope_is_typing(name):
for scope in reversed(scope_stack):
if name in scope:
return (
isinstance(scope[name], Importation) and
scope[name].fullName in TYPING_MODULES
)
return False
return (
(
isinstance(node, ast.Name) and
_bare_name_is_attr(node.id)
) or (
isinstance(node, ast.Attribute) and
isinstance(node.value, ast.Name) and
_module_scope_is_typing(node.value.id) and
is_name_match_fn(node.attr)
)
)
def _is_typing(node, typing_attr, scope_stack):
"""
Determine whether `node` represents the member of a typing module specified
by `typing_attr`.
This is used as part of working out whether we are within a type annotation
context.
"""
return _is_typing_helper(node, lambda x: x == typing_attr, scope_stack)
def _is_any_typing_member(node, scope_stack):
"""
Determine whether `node` represents any member of a typing module.
This is used as part of working out whether we are within a type annotation
context.
"""
return _is_typing_helper(node, lambda x: True, scope_stack)
def is_typing_overload(value, scope_stack):
return (
isinstance(value.source, FUNCTION_TYPES) and
any(
_is_typing(dec, 'overload', scope_stack)
for dec in value.source.decorator_list
)
)
class AnnotationState:
NONE = 0
STRING = 1
BARE = 2
def in_annotation(func):
@functools.wraps(func)
def in_annotation_func(self, *args, **kwargs):
with self._enter_annotation():
return func(self, *args, **kwargs)
return in_annotation_func
def in_string_annotation(func):
@functools.wraps(func)
def in_annotation_func(self, *args, **kwargs):
with self._enter_annotation(AnnotationState.STRING):
return func(self, *args, **kwargs)
return in_annotation_func
def make_tokens(code):
# PY3: tokenize.tokenize requires readline of bytes
if not isinstance(code, bytes):
code = code.encode('UTF-8')
lines = iter(code.splitlines(True))
# next(lines, b'') is to prevent an error in pypy3
return tuple(tokenize_tokenize(lambda: next(lines, b'')))
class _TypeableVisitor(ast.NodeVisitor):
"""Collect the line number and nodes which are deemed typeable by
PEP 484
https://www.python.org/dev/peps/pep-0484/#type-comments
"""
def __init__(self):
self.typeable_lines = [] # type: List[int]
self.typeable_nodes = {} # type: Dict[int, ast.AST]
def _typeable(self, node):
# if there is more than one typeable thing on a line last one wins
self.typeable_lines.append(node.lineno)
self.typeable_nodes[node.lineno] = node
self.generic_visit(node)
visit_Assign = visit_For = visit_FunctionDef = visit_With = _typeable
visit_AsyncFor = visit_AsyncFunctionDef = visit_AsyncWith = _typeable
def _collect_type_comments(tree, tokens):
visitor = _TypeableVisitor()
visitor.visit(tree)
type_comments = collections.defaultdict(list)
for tp, text, start, _, _ in tokens:
if (
tp != tokenize.COMMENT or # skip non comments
not TYPE_COMMENT_RE.match(text) or # skip non-type comments
TYPE_IGNORE_RE.match(text) # skip ignores
):
continue
# search for the typeable node at or before the line number of the
# type comment.
# if the bisection insertion point is before any nodes this is an
# invalid type comment which is ignored.
lineno, _ = start
idx = bisect.bisect_right(visitor.typeable_lines, lineno)
if idx == 0:
continue
node = visitor.typeable_nodes[visitor.typeable_lines[idx - 1]]
type_comments[node].append((start, text))
return type_comments
class Checker(object):
"""
I check the cleanliness and sanity of Python code.
@ivar _deferredFunctions: Tracking list used by L{deferFunction}. Elements
of the list are two-tuples. The first element is the callable passed
to L{deferFunction}. The second element is a copy of the scope stack
at the time L{deferFunction} was called.
@ivar _deferredAssignments: Similar to C{_deferredFunctions}, but for
callables which are deferred assignment checks.
"""
_ast_node_scope = {
ast.Module: ModuleScope,
ast.ClassDef: ClassScope,
ast.FunctionDef: FunctionScope,
ast.Lambda: FunctionScope,
ast.ListComp: GeneratorScope,
ast.SetComp: GeneratorScope,
ast.GeneratorExp: GeneratorScope,
ast.DictComp: GeneratorScope,
}
if PY35_PLUS:
_ast_node_scope[ast.AsyncFunctionDef] = FunctionScope
nodeDepth = 0
offset = None
_in_annotation = AnnotationState.NONE
_in_deferred = False
builtIns = set(builtin_vars).union(_MAGIC_GLOBALS)
_customBuiltIns = os.environ.get('PYFLAKES_BUILTINS')
if _customBuiltIns:
builtIns.update(_customBuiltIns.split(','))
del _customBuiltIns
# TODO: file_tokens= is required to perform checks on type comments,
# eventually make this a required positional argument. For now it
# is defaulted to `()` for api compatibility.
def __init__(self, tree, filename='(none)', builtins=None,
withDoctest='PYFLAKES_DOCTEST' in os.environ, file_tokens=()):
self._nodeHandlers = {}
self._deferredFunctions = []
self._deferredAssignments = []
self.deadScopes = []
self.messages = []
self.filename = filename
if builtins:
self.builtIns = self.builtIns.union(builtins)
self.withDoctest = withDoctest
try:
self.scopeStack = [Checker._ast_node_scope[type(tree)]()]
except KeyError:
raise RuntimeError('No scope implemented for the node %r' % tree)
self.exceptHandlers = [()]
self.root = tree
self._type_comments = _collect_type_comments(tree, file_tokens)
for builtin in self.builtIns:
self.addBinding(None, Builtin(builtin))
self.handleChildren(tree)
self._in_deferred = True
self.runDeferred(self._deferredFunctions)
# Set _deferredFunctions to None so that deferFunction will fail
# noisily if called after we've run through the deferred functions.
self._deferredFunctions = None
self.runDeferred(self._deferredAssignments)
# Set _deferredAssignments to None so that deferAssignment will fail
# noisily if called after we've run through the deferred assignments.
self._deferredAssignments = None
del self.scopeStack[1:]
self.popScope()
self.checkDeadScopes()
def deferFunction(self, callable):
"""
Schedule a function handler to be called just before completion.
This is used for handling function bodies, which must be deferred
because code later in the file might modify the global scope. When
`callable` is called, the scope at the time this is called will be
restored, however it will contain any new bindings added to it.
"""
self._deferredFunctions.append((callable, self.scopeStack[:], self.offset))
def deferAssignment(self, callable):
"""
Schedule an assignment handler to be called just after deferred
function handlers.
"""
self._deferredAssignments.append((callable, self.scopeStack[:], self.offset))
def runDeferred(self, deferred):
"""
Run the callables in C{deferred} using their associated scope stack.
"""
for handler, scope, offset in deferred:
self.scopeStack = scope
self.offset = offset
handler()
def _in_doctest(self):
return (len(self.scopeStack) >= 2 and
isinstance(self.scopeStack[1], DoctestScope))
@property
def futuresAllowed(self):
if not all(isinstance(scope, ModuleScope)
for scope in self.scopeStack):
return False
return self.scope._futures_allowed
@futuresAllowed.setter
def futuresAllowed(self, value):
assert value is False
if isinstance(self.scope, ModuleScope):
self.scope._futures_allowed = False
@property
def annotationsFutureEnabled(self):
scope = self.scopeStack[0]
if not isinstance(scope, ModuleScope):
return False
return scope._annotations_future_enabled
@annotationsFutureEnabled.setter
def annotationsFutureEnabled(self, value):
assert value is True
assert isinstance(self.scope, ModuleScope)
self.scope._annotations_future_enabled = True
@property
def scope(self):
return self.scopeStack[-1]
def popScope(self):
self.deadScopes.append(self.scopeStack.pop())
def checkDeadScopes(self):
"""
Look at scopes which have been fully examined and report names in them
which were imported but unused.
"""
for scope in self.deadScopes:
# imports in classes are public members
if isinstance(scope, ClassScope):
continue
all_binding = scope.get('__all__')
if all_binding and not isinstance(all_binding, ExportBinding):
all_binding = None
if all_binding:
all_names = set(all_binding.names)
undefined = [
name for name in all_binding.names
if name not in scope
]
else:
all_names = undefined = []
if undefined:
if not scope.importStarred and \
os.path.basename(self.filename) != '__init__.py':
# Look for possible mistakes in the export list
for name in undefined:
self.report(messages.UndefinedExport,
scope['__all__'].source, name)
# mark all import '*' as used by the undefined in __all__
if scope.importStarred:
from_list = []
for binding in scope.values():
if isinstance(binding, StarImportation):
binding.used = all_binding
from_list.append(binding.fullName)
# report * usage, with a list of possible sources
from_list = ', '.join(sorted(from_list))
for name in undefined:
self.report(messages.ImportStarUsage,
scope['__all__'].source, name, from_list)
# Look for imported names that aren't used.
for value in scope.values():
if isinstance(value, Importation):
used = value.used or value.name in all_names
if not used:
messg = messages.UnusedImport
self.report(messg, value.source, str(value))
for node in value.redefined:
if isinstance(self.getParent(node), FOR_TYPES):
messg = messages.ImportShadowedByLoopVar
elif used:
continue
else:
messg = messages.RedefinedWhileUnused
self.report(messg, node, value.name, value.source)
def pushScope(self, scopeClass=FunctionScope):
self.scopeStack.append(scopeClass())
def report(self, messageClass, *args, **kwargs):
self.messages.append(messageClass(self.filename, *args, **kwargs))
def getParent(self, node):
# Lookup the first parent which is not Tuple, List or Starred
while True:
node = node._pyflakes_parent
if not hasattr(node, 'elts') and not hasattr(node, 'ctx'):
return node
def getCommonAncestor(self, lnode, rnode, stop):
if (
stop in (lnode, rnode) or
not (
hasattr(lnode, '_pyflakes_parent') and
hasattr(rnode, '_pyflakes_parent')
)
):
return None
if lnode is rnode:
return lnode
if (lnode._pyflakes_depth > rnode._pyflakes_depth):
return self.getCommonAncestor(lnode._pyflakes_parent, rnode, stop)
if (lnode._pyflakes_depth < rnode._pyflakes_depth):
return self.getCommonAncestor(lnode, rnode._pyflakes_parent, stop)
return self.getCommonAncestor(
lnode._pyflakes_parent,
rnode._pyflakes_parent,
stop,
)
def descendantOf(self, node, ancestors, stop):
for a in ancestors:
if self.getCommonAncestor(node, a, stop):
return True
return False
def _getAncestor(self, node, ancestor_type):
parent = node
while True:
if parent is self.root:
return None
parent = self.getParent(parent)
if isinstance(parent, ancestor_type):
return parent
def getScopeNode(self, node):
return self._getAncestor(node, tuple(Checker._ast_node_scope.keys()))
def differentForks(self, lnode, rnode):
"""True, if lnode and rnode are located on different forks of IF/TRY"""
ancestor = self.getCommonAncestor(lnode, rnode, self.root)
parts = getAlternatives(ancestor)
if parts:
for items in parts:
if self.descendantOf(lnode, items, ancestor) ^ \
self.descendantOf(rnode, items, ancestor):
return True
return False
def addBinding(self, node, value):
"""
Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the new value, a Binding instance
"""
# assert value.source in (node, node._pyflakes_parent):
for scope in self.scopeStack[::-1]:
if value.name in scope:
break
existing = scope.get(value.name)
if (existing and not isinstance(existing, Builtin) and
not self.differentForks(node, existing.source)):
parent_stmt = self.getParent(value.source)
if isinstance(existing, Importation) and isinstance(parent_stmt, FOR_TYPES):
self.report(messages.ImportShadowedByLoopVar,
node, value.name, existing.source)
elif scope is self.scope:
if (isinstance(parent_stmt, ast.comprehension) and
not isinstance(self.getParent(existing.source),
(FOR_TYPES, ast.comprehension))):
self.report(messages.RedefinedInListComp,
node, value.name, existing.source)
elif not existing.used and value.redefines(existing):
if value.name != '_' or isinstance(existing, Importation):
if not is_typing_overload(existing, self.scopeStack):
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
elif isinstance(existing, Importation) and value.redefines(existing):
existing.redefined.append(node)
if value.name in self.scope:
# then assume the rebound name is used as a global or within a loop
value.used = self.scope[value.name].used
# don't treat annotations as assignments if there is an existing value
# in scope
if value.name not in self.scope or not isinstance(value, Annotation):
self.scope[value.name] = value
def _unknown_handler(self, node):
# this environment variable configures whether to error on unknown
# ast types.
#
# this is silent by default but the error is enabled for the pyflakes
# testsuite.
#
# this allows new syntax to be added to python without *requiring*
# changes from the pyflakes side. but will still produce an error
# in the pyflakes testsuite (so more specific handling can be added if
# needed).
if os.environ.get('PYFLAKES_ERROR_UNKNOWN'):
raise NotImplementedError('Unexpected type: {}'.format(type(node)))
else:
self.handleChildren(node)
def getNodeHandler(self, node_class):
try:
return self._nodeHandlers[node_class]
except KeyError:
nodeType = getNodeType(node_class)
self._nodeHandlers[node_class] = handler = getattr(
self, nodeType, self._unknown_handler,
)
return handler
def handleNodeLoad(self, node):
name = getNodeName(node)
if not name:
return
in_generators = None
importStarred = None
# try enclosing function scopes and global scope
for scope in self.scopeStack[-1::-1]:
if isinstance(scope, ClassScope):
if not PY2 and name == '__class__':
return
elif in_generators is False:
# only generators used in a class scope can access the
# names of the class. this is skipped during the first
# iteration
continue
binding = scope.get(name, None)
if isinstance(binding, Annotation) and not self._in_postponed_annotation:
continue
if name == 'print' and isinstance(binding, Builtin):
parent = self.getParent(node)
if (isinstance(parent, ast.BinOp) and
isinstance(parent.op, ast.RShift)):
self.report(messages.InvalidPrintSyntax, node)
try:
scope[name].used = (self.scope, node)
# if the name of SubImportation is same as
# alias of other Importation and the alias
# is used, SubImportation also should be marked as used.
n = scope[name]
if isinstance(n, Importation) and n._has_alias():
try:
scope[n.fullName].used = (self.scope, node)
except KeyError:
pass
except KeyError:
pass
else:
return
importStarred = importStarred or scope.importStarred
if in_generators is not False:
in_generators = isinstance(scope, GeneratorScope)
if importStarred:
from_list = []
for scope in self.scopeStack[-1::-1]:
for binding in scope.values():
if isinstance(binding, StarImportation):
# mark '*' imports as used for each scope
binding.used = (self.scope, node)
from_list.append(binding.fullName)
# report * usage, with a list of possible sources
from_list = ', '.join(sorted(from_list))
self.report(messages.ImportStarUsage, node, name, from_list)
return
if name == '__path__' and os.path.basename(self.filename) == '__init__.py':
# the special name __path__ is valid only in packages
return
if name in DetectClassScopedMagic.names and isinstance(self.scope, ClassScope):
return
# protected with a NameError handler?
if 'NameError' not in self.exceptHandlers[-1]:
self.report(messages.UndefinedName, node, name)
def handleNodeStore(self, node):
name = getNodeName(node)
if not name:
return
# if the name hasn't already been defined in the current scope
if isinstance(self.scope, FunctionScope) and name not in self.scope:
# for each function or module scope above us
for scope in self.scopeStack[:-1]:
if not isinstance(scope, (FunctionScope, ModuleScope)):
continue
# if the name was defined in that scope, and the name has
# been accessed already in the current scope, and hasn't
# been declared global
used = name in scope and scope[name].used
if used and used[0] is self.scope and name not in self.scope.globals:
# then it's probably a mistake
self.report(messages.UndefinedLocal,
scope[name].used[1], name, scope[name].source)
break
parent_stmt = self.getParent(node)
if isinstance(parent_stmt, ANNASSIGN_TYPES) and parent_stmt.value is None:
binding = Annotation(name, node)
elif isinstance(parent_stmt, (FOR_TYPES, ast.comprehension)) or (
parent_stmt != node._pyflakes_parent and
not self.isLiteralTupleUnpacking(parent_stmt)):
binding = Binding(name, node)
elif name == '__all__' and isinstance(self.scope, ModuleScope):
binding = ExportBinding(name, node._pyflakes_parent, self.scope)
elif PY2 and isinstance(getattr(node, 'ctx', None), ast.Param):
binding = Argument(name, self.getScopeNode(node))
else:
binding = Assignment(name, node)
self.addBinding(node, binding)
def handleNodeDelete(self, node):
def on_conditional_branch():
"""
Return `True` if node is part of a conditional body.
"""
current = getattr(node, '_pyflakes_parent', None)
while current:
if isinstance(current, (ast.If, ast.While, ast.IfExp)):
return True
current = getattr(current, '_pyflakes_parent', None)
return False
name = getNodeName(node)
if not name:
return
if on_conditional_branch():
# We cannot predict if this conditional branch is going to
# be executed.
return
if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
self.scope.globals.remove(name)
else:
try:
del self.scope[name]
except KeyError:
self.report(messages.UndefinedName, node, name)
@contextlib.contextmanager
def _enter_annotation(self, ann_type=AnnotationState.BARE):
orig, self._in_annotation = self._in_annotation, ann_type
try:
yield
finally:
self._in_annotation = orig
@property
def _in_postponed_annotation(self):
return (
self._in_annotation == AnnotationState.STRING or
self.annotationsFutureEnabled
)
def _handle_type_comments(self, node):
for (lineno, col_offset), comment in self._type_comments.get(node, ()):
comment = comment.split(':', 1)[1].strip()
func_match = TYPE_FUNC_RE.match(comment)
if func_match:
parts = (
func_match.group(1).replace('*', ''),
func_match.group(2).strip(),
)
else:
parts = (comment,)
for part in parts:
if PY2:
part = part.replace('...', 'Ellipsis')
self.deferFunction(functools.partial(
self.handleStringAnnotation,
part, DummyNode(lineno, col_offset), lineno, col_offset,
messages.CommentAnnotationSyntaxError,
))
def handleChildren(self, tree, omit=None):
self._handle_type_comments(tree)
for node in iter_child_nodes(tree, omit=omit):
self.handleNode(node, tree)
def isLiteralTupleUnpacking(self, node):
if isinstance(node, ast.Assign):
for child in node.targets + [node.value]:
if not hasattr(child, 'elts'):
return False
return True
def isDocstring(self, node):
"""
Determine if the given node is a docstring, as long as it is at the
correct place in the node tree.
"""
return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and
isinstance(node.value, ast.Str))
def getDocstring(self, node):
if isinstance(node, ast.Expr):
node = node.value
if not isinstance(node, ast.Str):
return (None, None)
if PYPY or PY38_PLUS:
doctest_lineno = node.lineno - 1
else:
# Computed incorrectly if the docstring has backslash
doctest_lineno = node.lineno - node.s.count('\n') - 1
return (node.s, doctest_lineno)
def handleNode(self, node, parent):
if node is None:
return
if self.offset and getattr(node, 'lineno', None) is not None:
node.lineno += self.offset[0]
node.col_offset += self.offset[1]
if self.futuresAllowed and not (isinstance(node, ast.ImportFrom) or
self.isDocstring(node)):
self.futuresAllowed = False
self.nodeDepth += 1
node._pyflakes_depth = self.nodeDepth
node._pyflakes_parent = parent
try:
handler = self.getNodeHandler(node.__class__)
handler(node)
finally:
self.nodeDepth -= 1
_getDoctestExamples = doctest.DocTestParser().get_examples
def handleDoctests(self, node):
try:
if hasattr(node, 'docstring'):
docstring = node.docstring
# This is just a reasonable guess. In Python 3.7, docstrings no
# longer have line numbers associated with them. This will be
# incorrect if there are empty lines between the beginning
# of the function and the docstring.
node_lineno = node.lineno
if hasattr(node, 'args'):
node_lineno = max([node_lineno] +
[arg.lineno for arg in node.args.args])
else:
(docstring, node_lineno) = self.getDocstring(node.body[0])
examples = docstring and self._getDoctestExamples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
# leading whitespace: ...
return
if not examples:
return
# Place doctest in module scope
saved_stack = self.scopeStack
self.scopeStack = [self.scopeStack[0]]
node_offset = self.offset or (0, 0)
self.pushScope(DoctestScope)
if '_' not in self.scopeStack[0]:
self.addBinding(None, Builtin('_'))
for example in examples:
try:
tree = ast.parse(example.source, "<doctest>")
except SyntaxError:
e = sys.exc_info()[1]
if PYPY:
e.offset += 1
position = (node_lineno + example.lineno + e.lineno,
example.indent + 4 + (e.offset or 0))
self.report(messages.DoctestSyntaxError, node, position)
else:
self.offset = (node_offset[0] + node_lineno + example.lineno,
node_offset[1] + example.indent + 4)
self.handleChildren(tree)
self.offset = node_offset
self.popScope()
self.scopeStack = saved_stack
@in_string_annotation
def handleStringAnnotation(self, s, node, ref_lineno, ref_col_offset, err):
try:
tree = ast.parse(s)
except SyntaxError:
self.report(err, node, s)
return
body = tree.body
if len(body) != 1 or not isinstance(body[0], ast.Expr):
self.report(err, node, s)
return
parsed_annotation = tree.body[0].value
for descendant in ast.walk(parsed_annotation):
if (
'lineno' in descendant._attributes and
'col_offset' in descendant._attributes
):
descendant.lineno = ref_lineno
descendant.col_offset = ref_col_offset
self.handleNode(parsed_annotation, node)
@in_annotation
def handleAnnotation(self, annotation, node):
if isinstance(annotation, ast.Str):
# Defer handling forward annotation.
self.deferFunction(functools.partial(
self.handleStringAnnotation,
annotation.s,
node,
annotation.lineno,
annotation.col_offset,
messages.ForwardAnnotationSyntaxError,
))
elif self.annotationsFutureEnabled:
fn = in_annotation(Checker.handleNode)
self.deferFunction(lambda: fn(self, annotation, node))
else:
self.handleNode(annotation, node)
def ignore(self, node):
pass
# "stmt" type nodes
DELETE = PRINT = FOR = ASYNCFOR = WHILE = WITH = WITHITEM = \
ASYNCWITH = ASYNCWITHITEM = TRYFINALLY = EXEC = \
EXPR = ASSIGN = handleChildren
PASS = ignore
# "expr" type nodes
BOOLOP = UNARYOP = SET = \
REPR = ATTRIBUTE = \
STARRED = NAMECONSTANT = NAMEDEXPR = handleChildren
def SUBSCRIPT(self, node):
if _is_name_or_attr(node.value, 'Literal'):
with self._enter_annotation(AnnotationState.NONE):
self.handleChildren(node)
elif _is_name_or_attr(node.value, 'Annotated'):
self.handleNode(node.value, node)
# py39+
if isinstance(node.slice, ast.Tuple):
slice_tuple = node.slice
# <py39
elif (
isinstance(node.slice, ast.Index) and
isinstance(node.slice.value, ast.Tuple)
):
slice_tuple = node.slice.value
else:
slice_tuple = None
# not a multi-arg `Annotated`
if slice_tuple is None or len(slice_tuple.elts) < 2:
self.handleNode(node.slice, node)
else:
# the first argument is the type
self.handleNode(slice_tuple.elts[0], node)
# the rest of the arguments are not
with self._enter_annotation(AnnotationState.NONE):
for arg in slice_tuple.elts[1:]:
self.handleNode(arg, node)
self.handleNode(node.ctx, node)
else:
if _is_any_typing_member(node.value, self.scopeStack):
with self._enter_annotation():
self.handleChildren(node)
else:
self.handleChildren(node)
def _handle_string_dot_format(self, node):
try:
placeholders = tuple(parse_format_string(node.func.value.s))
except ValueError as e:
self.report(messages.StringDotFormatInvalidFormat, node, e)
return
class state: # py2-compatible `nonlocal`
auto = None
next_auto = 0
placeholder_positional = set()
placeholder_named = set()
def _add_key(fmtkey):
"""Returns True if there is an error which should early-exit"""
if fmtkey is None: # end of string or `{` / `}` escapes
return False
# attributes / indices are allowed in `.format(...)`
fmtkey, _, _ = fmtkey.partition('.')
fmtkey, _, _ = fmtkey.partition('[')
try:
fmtkey = int(fmtkey)
except ValueError:
pass
else: # fmtkey was an integer
if state.auto is True:
self.report(messages.StringDotFormatMixingAutomatic, node)
return True
else:
state.auto = False
if fmtkey == '':
if state.auto is False:
self.report(messages.StringDotFormatMixingAutomatic, node)
return True
else:
state.auto = True
fmtkey = state.next_auto
state.next_auto += 1
if isinstance(fmtkey, int):
placeholder_positional.add(fmtkey)
else:
placeholder_named.add(fmtkey)
return False
for _, fmtkey, spec, _ in placeholders:
if _add_key(fmtkey):
return
# spec can also contain format specifiers
if spec is not None:
try:
spec_placeholders = tuple(parse_format_string(spec))
except ValueError as e:
self.report(messages.StringDotFormatInvalidFormat, node, e)
return
for _, spec_fmtkey, spec_spec, _ in spec_placeholders:
# can't recurse again
if spec_spec is not None and '{' in spec_spec:
self.report(
messages.StringDotFormatInvalidFormat,
node,
'Max string recursion exceeded',
)
return
if _add_key(spec_fmtkey):
return
# bail early if there is *args or **kwargs
if (
# python 2.x *args / **kwargs
getattr(node, 'starargs', None) or
getattr(node, 'kwargs', None) or
# python 3.x *args
any(
isinstance(arg, getattr(ast, 'Starred', ()))
for arg in node.args
) or
# python 3.x **kwargs
any(kwd.arg is None for kwd in node.keywords)
):
return
substitution_positional = set(range(len(node.args)))
substitution_named = {kwd.arg for kwd in node.keywords}
extra_positional = substitution_positional - placeholder_positional
extra_named = substitution_named - placeholder_named
missing_arguments = (
(placeholder_positional | placeholder_named) -
(substitution_positional | substitution_named)
)
if extra_positional:
self.report(
messages.StringDotFormatExtraPositionalArguments,
node,
', '.join(sorted(str(x) for x in extra_positional)),
)
if extra_named:
self.report(
messages.StringDotFormatExtraNamedArguments,
node,
', '.join(sorted(extra_named)),
)
if missing_arguments:
self.report(
messages.StringDotFormatMissingArgument,
node,
', '.join(sorted(str(x) for x in missing_arguments)),
)
def CALL(self, node):
if (
isinstance(node.func, ast.Attribute) and
isinstance(node.func.value, ast.Str) and
node.func.attr == 'format'
):
self._handle_string_dot_format(node)
omit = []
annotated = []
not_annotated = []
if (
_is_typing(node.func, 'cast', self.scopeStack) and
len(node.args) >= 1
):
with self._enter_annotation():
self.handleNode(node.args[0], node)
elif _is_typing(node.func, 'TypeVar', self.scopeStack):
# TypeVar("T", "int", "str")
omit += ["args"]
annotated += [arg for arg in node.args[1:]]
# TypeVar("T", bound="str")
omit += ["keywords"]
annotated += [k.value for k in node.keywords if k.arg == "bound"]
not_annotated += [
(k, ["value"] if k.arg == "bound" else None)
for k in node.keywords
]
elif _is_typing(node.func, "TypedDict", self.scopeStack):
# TypedDict("a", {"a": int})
if len(node.args) > 1 and isinstance(node.args[1], ast.Dict):
omit += ["args"]
annotated += node.args[1].values
not_annotated += [
(arg, ["values"] if i == 1 else None)
for i, arg in enumerate(node.args)
]
# TypedDict("a", a=int)
omit += ["keywords"]
annotated += [k.value for k in node.keywords]
not_annotated += [(k, ["value"]) for k in node.keywords]
elif _is_typing(node.func, "NamedTuple", self.scopeStack):
# NamedTuple("a", [("a", int)])
if (
len(node.args) > 1 and
isinstance(node.args[1], (ast.Tuple, ast.List)) and
all(isinstance(x, (ast.Tuple, ast.List)) and
len(x.elts) == 2 for x in node.args[1].elts)
):
omit += ["args"]
annotated += [elt.elts[1] for elt in node.args[1].elts]
not_annotated += [(elt.elts[0], None) for elt in node.args[1].elts]
not_annotated += [
(arg, ["elts"] if i == 1 else None)
for i, arg in enumerate(node.args)
]
not_annotated += [(elt, "elts") for elt in node.args[1].elts]
# NamedTuple("a", a=int)
omit += ["keywords"]
annotated += [k.value for k in node.keywords]
not_annotated += [(k, ["value"]) for k in node.keywords]
if omit:
with self._enter_annotation(AnnotationState.NONE):
for na_node, na_omit in not_annotated:
self.handleChildren(na_node, omit=na_omit)
self.handleChildren(node, omit=omit)
with self._enter_annotation():
for annotated_node in annotated:
self.handleNode(annotated_node, node)
else:
self.handleChildren(node)
def _handle_percent_format(self, node):
try:
placeholders = parse_percent_format(node.left.s)
except ValueError:
self.report(
messages.PercentFormatInvalidFormat,
node,
'incomplete format',
)
return
named = set()
positional_count = 0
positional = None
for _, placeholder in placeholders:
if placeholder is None:
continue
name, _, width, precision, conversion = placeholder
if conversion == '%':
continue
if conversion not in VALID_CONVERSIONS:
self.report(
messages.PercentFormatUnsupportedFormatCharacter,
node,
conversion,
)
if positional is None and conversion:
positional = name is None
for part in (width, precision):
if part is not None and '*' in part:
if not positional:
self.report(
messages.PercentFormatStarRequiresSequence,
node,
)
else:
positional_count += 1
if positional and name is not None:
self.report(
messages.PercentFormatMixedPositionalAndNamed,
node,
)
return
elif not positional and name is None:
self.report(
messages.PercentFormatMixedPositionalAndNamed,
node,
)
return
if positional:
positional_count += 1
else:
named.add(name)
if (
isinstance(node.right, (ast.List, ast.Tuple)) and
# does not have any *splats (py35+ feature)
not any(
isinstance(elt, getattr(ast, 'Starred', ()))
for elt in node.right.elts
)
):
substitution_count = len(node.right.elts)
if positional and positional_count != substitution_count:
self.report(
messages.PercentFormatPositionalCountMismatch,
node,
positional_count,
substitution_count,
)
elif not positional:
self.report(messages.PercentFormatExpectedMapping, node)
if (
isinstance(node.right, ast.Dict) and
all(isinstance(k, ast.Str) for k in node.right.keys)
):
if positional and positional_count > 1:
self.report(messages.PercentFormatExpectedSequence, node)
return
substitution_keys = {k.s for k in node.right.keys}
extra_keys = substitution_keys - named
missing_keys = named - substitution_keys
if not positional and extra_keys:
self.report(
messages.PercentFormatExtraNamedArguments,
node,
', '.join(sorted(extra_keys)),
)
if not positional and missing_keys:
self.report(
messages.PercentFormatMissingArgument,
node,
', '.join(sorted(missing_keys)),
)
def BINOP(self, node):
if (
isinstance(node.op, ast.Mod) and
isinstance(node.left, ast.Str)
):
self._handle_percent_format(node)
self.handleChildren(node)
def STR(self, node):
if self._in_annotation:
fn = functools.partial(
self.handleStringAnnotation,
node.s,
node,
node.lineno,
node.col_offset,
messages.ForwardAnnotationSyntaxError,
)
if self._in_deferred:
fn()
else:
self.deferFunction(fn)
if PY38_PLUS:
def CONSTANT(self, node):
if isinstance(node.value, str):
return self.STR(node)
else:
NUM = BYTES = ELLIPSIS = CONSTANT = ignore
# "slice" type nodes
SLICE = EXTSLICE = INDEX = handleChildren
# expression contexts are node instances too, though being constants
LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore
# same for operators
AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = \
BITOR = BITXOR = BITAND = FLOORDIV = INVERT = NOT = UADD = USUB = \
EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = \
MATMULT = ignore
def RAISE(self, node):
self.handleChildren(node)
arg = get_raise_argument(node)
if isinstance(arg, ast.Call):
if is_notimplemented_name_node(arg.func):
# Handle "raise NotImplemented(...)"
self.report(messages.RaiseNotImplemented, node)
elif is_notimplemented_name_node(arg):
# Handle "raise NotImplemented"
self.report(messages.RaiseNotImplemented, node)
# additional node types
COMPREHENSION = KEYWORD = FORMATTEDVALUE = handleChildren
_in_fstring = False
def JOINEDSTR(self, node):
if (
# the conversion / etc. flags are parsed as f-strings without
# placeholders
not self._in_fstring and
not any(isinstance(x, ast.FormattedValue) for x in node.values)
):
self.report(messages.FStringMissingPlaceholders, node)
self._in_fstring, orig = True, self._in_fstring
try:
self.handleChildren(node)
finally:
self._in_fstring = orig
def DICT(self, node):
# Complain if there are duplicate keys with different values
# If they have the same value it's not going to cause potentially
# unexpected behaviour so we'll not complain.
keys = [
convert_to_value(key) for key in node.keys
]
key_counts = counter(keys)
duplicate_keys = [
key for key, count in key_counts.items()
if count > 1
]
for key in duplicate_keys:
key_indices = [i for i, i_key in enumerate(keys) if i_key == key]
values = counter(
convert_to_value(node.values[index])
for index in key_indices
)
if any(count == 1 for value, count in values.items()):
for key_index in key_indices:
key_node = node.keys[key_index]
if isinstance(key, VariableKey):
self.report(messages.MultiValueRepeatedKeyVariable,
key_node,
key.name)
else:
self.report(
messages.MultiValueRepeatedKeyLiteral,
key_node,
key,
)
self.handleChildren(node)
def IF(self, node):
if isinstance(node.test, ast.Tuple) and node.test.elts != []:
self.report(messages.IfTuple, node)
self.handleChildren(node)
IFEXP = IF
def ASSERT(self, node):
if isinstance(node.test, ast.Tuple) and node.test.elts != []:
self.report(messages.AssertTuple, node)
self.handleChildren(node)
def GLOBAL(self, node):
"""
Keep track of globals declarations.
"""
global_scope_index = 1 if self._in_doctest() else 0
global_scope = self.scopeStack[global_scope_index]
# Ignore 'global' statement in global scope.
if self.scope is not global_scope:
# One 'global' statement can bind multiple (comma-delimited) names.
for node_name in node.names:
node_value = Assignment(node_name, node)
# Remove UndefinedName messages already reported for this name.
# TODO: if the global is not used in this scope, it does not
# become a globally defined name. See test_unused_global.
self.messages = [
m for m in self.messages if not
isinstance(m, messages.UndefinedName) or
m.message_args[0] != node_name]
# Bind name to global scope if it doesn't exist already.
global_scope.setdefault(node_name, node_value)
# Bind name to non-global scopes, but as already "used".
node_value.used = (global_scope, node)
for scope in self.scopeStack[global_scope_index + 1:]:
scope[node_name] = node_value
NONLOCAL = GLOBAL
def GENERATOREXP(self, node):
self.pushScope(GeneratorScope)
self.handleChildren(node)
self.popScope()
LISTCOMP = handleChildren if PY2 else GENERATOREXP
DICTCOMP = SETCOMP = GENERATOREXP
def NAME(self, node):
"""
Handle occurrence of Name (which can be a load/store/delete access.)
"""
# Locate the name in locals / function / globals scopes.
if isinstance(node.ctx, ast.Load):
self.handleNodeLoad(node)
if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and
isinstance(node._pyflakes_parent, ast.Call)):
# we are doing locals() call in current scope
self.scope.usesLocals = True
elif isinstance(node.ctx, ast.Store):
self.handleNodeStore(node)
elif PY2 and isinstance(node.ctx, ast.Param):
self.handleNodeStore(node)
elif isinstance(node.ctx, ast.Del):
self.handleNodeDelete(node)
else:
# Unknown context
raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
def CONTINUE(self, node):
# Walk the tree up until we see a loop (OK), a function or class
# definition (not OK), for 'continue', a finally block (not OK), or
# the top module scope (not OK)
n = node
while hasattr(n, '_pyflakes_parent'):
n, n_child = n._pyflakes_parent, n
if isinstance(n, LOOP_TYPES):
# Doesn't apply unless it's in the loop itself
if n_child not in n.orelse:
return
if isinstance(n, (ast.FunctionDef, ast.ClassDef)):
break
# Handle Try/TryFinally difference in Python < and >= 3.3
if hasattr(n, 'finalbody') and isinstance(node, ast.Continue):
if n_child in n.finalbody and not PY38_PLUS:
self.report(messages.ContinueInFinally, node)
return
if isinstance(node, ast.Continue):
self.report(messages.ContinueOutsideLoop, node)
else: # ast.Break
self.report(messages.BreakOutsideLoop, node)
BREAK = CONTINUE
def RETURN(self, node):
if isinstance(self.scope, (ClassScope, ModuleScope)):
self.report(messages.ReturnOutsideFunction, node)
return
if (
node.value and
hasattr(self.scope, 'returnValue') and
not self.scope.returnValue
):
self.scope.returnValue = node.value
self.handleNode(node.value, node)
def YIELD(self, node):
if isinstance(self.scope, (ClassScope, ModuleScope)):
self.report(messages.YieldOutsideFunction, node)
return
self.scope.isGenerator = True
self.handleNode(node.value, node)
AWAIT = YIELDFROM = YIELD
def FUNCTIONDEF(self, node):
for deco in node.decorator_list:
self.handleNode(deco, node)
self.LAMBDA(node)
self.addBinding(node, FunctionDefinition(node.name, node))
# doctest does not process doctest within a doctest,
# or in nested functions.
if (self.withDoctest and
not self._in_doctest() and
not isinstance(self.scope, FunctionScope)):
self.deferFunction(lambda: self.handleDoctests(node))
ASYNCFUNCTIONDEF = FUNCTIONDEF
def LAMBDA(self, node):
args = []
annotations = []
if PY2:
def addArgs(arglist):
for arg in arglist:
if isinstance(arg, ast.Tuple):
addArgs(arg.elts)
else:
args.append(arg.id)
addArgs(node.args.args)
defaults = node.args.defaults
else:
if PY38_PLUS:
for arg in node.args.posonlyargs:
args.append(arg.arg)
annotations.append(arg.annotation)
for arg in node.args.args + node.args.kwonlyargs:
args.append(arg.arg)
annotations.append(arg.annotation)
defaults = node.args.defaults + node.args.kw_defaults
# Only for Python3 FunctionDefs
is_py3_func = hasattr(node, 'returns')
for arg_name in ('vararg', 'kwarg'):
wildcard = getattr(node.args, arg_name)
if not wildcard:
continue
args.append(wildcard if PY2 else wildcard.arg)
if is_py3_func:
if PY2: # Python 2.7
argannotation = arg_name + 'annotation'
annotations.append(getattr(node.args, argannotation))
else: # Python >= 3.4
annotations.append(wildcard.annotation)
if is_py3_func:
annotations.append(node.returns)
if len(set(args)) < len(args):
for (idx, arg) in enumerate(args):
if arg in args[:idx]:
self.report(messages.DuplicateArgument, node, arg)
for annotation in annotations:
self.handleAnnotation(annotation, node)
for default in defaults:
self.handleNode(default, node)
def runFunction():
self.pushScope()
self.handleChildren(node, omit=['decorator_list', 'returns'])
def checkUnusedAssignments():
"""
Check to see if any assignments have not been used.
"""
for name, binding in self.scope.unusedAssignments():
self.report(messages.UnusedVariable, binding.source, name)
self.deferAssignment(checkUnusedAssignments)
if PY2:
def checkReturnWithArgumentInsideGenerator():
"""
Check to see if there is any return statement with
arguments but the function is a generator.
"""
if self.scope.isGenerator and self.scope.returnValue:
self.report(messages.ReturnWithArgsInsideGenerator,
self.scope.returnValue)
self.deferAssignment(checkReturnWithArgumentInsideGenerator)
self.popScope()
self.deferFunction(runFunction)
def ARGUMENTS(self, node):
self.handleChildren(node, omit=('defaults', 'kw_defaults'))
if PY2:
scope_node = self.getScopeNode(node)
if node.vararg:
self.addBinding(node, Argument(node.vararg, scope_node))
if node.kwarg:
self.addBinding(node, Argument(node.kwarg, scope_node))
def ARG(self, node):
self.addBinding(node, Argument(node.arg, self.getScopeNode(node)))
def CLASSDEF(self, node):
"""
Check names used in a class definition, including its decorators, base
classes, and the body of its definition. Additionally, add its name to
the current scope.
"""
for deco in node.decorator_list:
self.handleNode(deco, node)
for baseNode in node.bases:
self.handleNode(baseNode, node)
if not PY2:
for keywordNode in node.keywords:
self.handleNode(keywordNode, node)
self.pushScope(ClassScope)
# doctest does not process doctest within a doctest
# classes within classes are processed.
if (self.withDoctest and
not self._in_doctest() and
not isinstance(self.scope, FunctionScope)):
self.deferFunction(lambda: self.handleDoctests(node))
for stmt in node.body:
self.handleNode(stmt, node)
self.popScope()
self.addBinding(node, ClassDefinition(node.name, node))
def AUGASSIGN(self, node):
self.handleNodeLoad(node.target)
self.handleNode(node.value, node)
self.handleNode(node.target, node)
def TUPLE(self, node):
if not PY2 and isinstance(node.ctx, ast.Store):
# Python 3 advanced tuple unpacking: a, *b, c = d.
# Only one starred expression is allowed, and no more than 1<<8
# assignments are allowed before a stared expression. There is
# also a limit of 1<<24 expressions after the starred expression,
# which is impossible to test due to memory restrictions, but we
# add it here anyway
has_starred = False
star_loc = -1
for i, n in enumerate(node.elts):
if isinstance(n, ast.Starred):
if has_starred:
self.report(messages.TwoStarredExpressions, node)
# The SyntaxError doesn't distinguish two from more
# than two.
break
has_starred = True
star_loc = i
if star_loc >= 1 << 8 or len(node.elts) - star_loc - 1 >= 1 << 24:
self.report(messages.TooManyExpressionsInStarredAssignment, node)
self.handleChildren(node)
LIST = TUPLE
def IMPORT(self, node):
for alias in node.names:
if '.' in alias.name and not alias.asname:
importation = SubmoduleImportation(alias.name, node)
else:
name = alias.asname or alias.name
importation = Importation(name, node, alias.name)
self.addBinding(node, importation)
def IMPORTFROM(self, node):
if node.module == '__future__':
if not self.futuresAllowed:
self.report(messages.LateFutureImport,
node, [n.name for n in node.names])
else:
self.futuresAllowed = False
module = ('.' * node.level) + (node.module or '')
for alias in node.names:
name = alias.asname or alias.name
if node.module == '__future__':
importation = FutureImportation(name, node, self.scope)
if alias.name not in __future__.all_feature_names:
self.report(messages.FutureFeatureNotDefined,
node, alias.name)
if alias.name == 'annotations':
self.annotationsFutureEnabled = True
elif alias.name == '*':
# Only Python 2, local import * is a SyntaxWarning
if not PY2 and not isinstance(self.scope, ModuleScope):
self.report(messages.ImportStarNotPermitted,
node, module)
continue
self.scope.importStarred = True
self.report(messages.ImportStarUsed, node, module)
importation = StarImportation(module, node)
else:
importation = ImportationFrom(name, node,
module, alias.name)
self.addBinding(node, importation)
def TRY(self, node):
handler_names = []
# List the exception handlers
for i, handler in enumerate(node.handlers):
if isinstance(handler.type, ast.Tuple):
for exc_type in handler.type.elts:
handler_names.append(getNodeName(exc_type))
elif handler.type:
handler_names.append(getNodeName(handler.type))
if handler.type is None and i < len(node.handlers) - 1:
self.report(messages.DefaultExceptNotLast, handler)
# Memorize the except handlers and process the body
self.exceptHandlers.append(handler_names)
for child in node.body:
self.handleNode(child, node)
self.exceptHandlers.pop()
# Process the other nodes: "except:", "else:", "finally:"
self.handleChildren(node, omit='body')
TRYEXCEPT = TRY
def EXCEPTHANDLER(self, node):
if PY2 or node.name is None:
self.handleChildren(node)
return
# If the name already exists in the scope, modify state of existing
# binding.
if node.name in self.scope:
self.handleNodeStore(node)
# 3.x: the name of the exception, which is not a Name node, but a
# simple string, creates a local that is only bound within the scope of
# the except: block. As such, temporarily remove the existing binding
# to more accurately determine if the name is used in the except:
# block.
try:
prev_definition = self.scope.pop(node.name)
except KeyError:
prev_definition = None
self.handleNodeStore(node)
self.handleChildren(node)
# See discussion on https://github.com/PyCQA/pyflakes/pull/59
# We're removing the local name since it's being unbound after leaving
# the except: block and it's always unbound if the except: block is
# never entered. This will cause an "undefined name" error raised if
# the checked code tries to use the name afterwards.
#
# Unless it's been removed already. Then do nothing.
try:
binding = self.scope.pop(node.name)
except KeyError:
pass
else:
if not binding.used:
self.report(messages.UnusedVariable, node, node.name)
# Restore.
if prev_definition:
self.scope[node.name] = prev_definition
def ANNASSIGN(self, node):
self.handleNode(node.target, node)
self.handleAnnotation(node.annotation, node)
if node.value:
# If the assignment has value, handle the *value* now.
self.handleNode(node.value, node)
def COMPARE(self, node):
left = node.left
for op, right in zip(node.ops, node.comparators):
if (
isinstance(op, (ast.Is, ast.IsNot)) and (
_is_const_non_singleton(left) or
_is_const_non_singleton(right)
)
):
self.report(messages.IsLiteral, node)
left = right
self.handleChildren(node)
MATCH = MATCH_CASE = MATCHCLASS = MATCHOR = MATCHSEQUENCE = handleChildren
MATCHSINGLETON = MATCHVALUE = handleChildren
def _match_target(self, node):
self.handleNodeStore(node)
self.handleChildren(node)
MATCHAS = MATCHMAPPING = MATCHSTAR = _match_target
| 84,639 | Python | .py | 1,989 | 30.698844 | 88 | 0.57389 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,308 | api.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/api.py | """
API for the command-line I{pyflakes} tool.
"""
import ast
import os
import platform
import re
import sys
from pyflakes import checker, __version__
from pyflakes import reporter as modReporter
__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
PYTHON_SHEBANG_REGEX = re.compile(br'^#!.*\bpython([23](\.\d+)?|w)?[dmu]?\s')
def check(codeString, filename, reporter=None):
"""
Check the Python source given by C{codeString} for flakes.
@param codeString: The Python source to check.
@type codeString: C{str}
@param filename: The name of the file the source came from, used to report
errors.
@type filename: C{str}
@param reporter: A L{Reporter} instance, where errors and warnings will be
reported.
@return: The number of warnings emitted.
@rtype: C{int}
"""
if reporter is None:
reporter = modReporter._makeDefaultReporter()
# First, compile into an AST and handle syntax errors.
try:
tree = ast.parse(codeString, filename=filename)
except SyntaxError:
value = sys.exc_info()[1]
msg = value.args[0]
(lineno, offset, text) = value.lineno, value.offset, value.text
if checker.PYPY:
if text is None:
lines = codeString.splitlines()
if len(lines) >= lineno:
text = lines[lineno - 1]
if sys.version_info >= (3, ) and isinstance(text, bytes):
try:
text = text.decode('ascii')
except UnicodeDecodeError:
text = None
offset -= 1
# If there's an encoding problem with the file, the text is None.
if text is None:
# Avoid using msg, since for the only known case, it contains a
# bogus message that claims the encoding the file declared was
# unknown.
reporter.unexpectedError(filename, 'problem decoding source')
else:
reporter.syntaxError(filename, msg, lineno, offset, text)
return 1
except Exception:
reporter.unexpectedError(filename, 'problem decoding source')
return 1
# Okay, it's syntactically valid. Now check it.
file_tokens = checker.make_tokens(codeString)
w = checker.Checker(tree, file_tokens=file_tokens, filename=filename)
w.messages.sort(key=lambda m: m.lineno)
for warning in w.messages:
reporter.flake(warning)
return len(w.messages)
def checkPath(filename, reporter=None):
"""
Check the given path, printing out any warnings detected.
@param reporter: A L{Reporter} instance, where errors and warnings will be
reported.
@return: the number of warnings printed
"""
if reporter is None:
reporter = modReporter._makeDefaultReporter()
try:
with open(filename, 'rb') as f:
codestr = f.read()
except IOError:
msg = sys.exc_info()[1]
reporter.unexpectedError(filename, msg.args[1])
return 1
return check(codestr, filename, reporter)
def isPythonFile(filename):
"""Return True if filename points to a Python file."""
if filename.endswith('.py'):
return True
# Avoid obvious Emacs backup files
if filename.endswith("~"):
return False
max_bytes = 128
try:
with open(filename, 'rb') as f:
text = f.read(max_bytes)
if not text:
return False
except IOError:
return False
return PYTHON_SHEBANG_REGEX.match(text)
def iterSourceCode(paths):
"""
Iterate over all Python source files in C{paths}.
@param paths: A list of paths. Directories will be recursed into and
any .py files found will be yielded. Any non-directories will be
yielded as-is.
"""
for path in paths:
if os.path.isdir(path):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
if isPythonFile(full_path):
yield full_path
else:
yield path
def checkRecursive(paths, reporter):
"""
Recursively check all source files in C{paths}.
@param paths: A list of paths to Python source files and directories
containing Python source files.
@param reporter: A L{Reporter} where all of the warnings and errors
will be reported to.
@return: The number of warnings found.
"""
warnings = 0
for sourcePath in iterSourceCode(paths):
warnings += checkPath(sourcePath, reporter)
return warnings
def _exitOnSignal(sigName, message):
"""Handles a signal with sys.exit.
Some of these signals (SIGPIPE, for example) don't exist or are invalid on
Windows. So, ignore errors that might arise.
"""
import signal
try:
sigNumber = getattr(signal, sigName)
except AttributeError:
# the signal constants defined in the signal module are defined by
# whether the C library supports them or not. So, SIGPIPE might not
# even be defined.
return
def handler(sig, f):
sys.exit(message)
try:
signal.signal(sigNumber, handler)
except ValueError:
# It's also possible the signal is defined, but then it's invalid. In
# this case, signal.signal raises ValueError.
pass
def _get_version():
"""
Retrieve and format package version along with python version & OS used
"""
return ('%s Python %s on %s' %
(__version__, platform.python_version(), platform.system()))
def main(prog=None, args=None):
"""Entry point for the script "pyflakes"."""
import argparse
# Handle "Keyboard Interrupt" and "Broken pipe" gracefully
_exitOnSignal('SIGINT', '... stopped')
_exitOnSignal('SIGPIPE', 1)
parser = argparse.ArgumentParser(prog=prog,
description='Check Python source files for errors')
parser.add_argument('-V', '--version', action='version', version=_get_version())
parser.add_argument('path', nargs='*',
help='Path(s) of Python file(s) to check. STDIN if not given.')
args = parser.parse_args(args=args).path
reporter = modReporter._makeDefaultReporter()
if args:
warnings = checkRecursive(args, reporter)
else:
warnings = check(sys.stdin.read(), '<stdin>', reporter)
raise SystemExit(warnings > 0)
| 6,608 | Python | .py | 171 | 30.643275 | 88 | 0.635454 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,309 | reporter.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/reporter.py | """
Provide the Reporter class.
"""
import re
import sys
class Reporter(object):
"""
Formats the results of pyflakes checks to users.
"""
def __init__(self, warningStream, errorStream):
"""
Construct a L{Reporter}.
@param warningStream: A file-like object where warnings will be
written to. The stream's C{write} method must accept unicode.
C{sys.stdout} is a good value.
@param errorStream: A file-like object where error output will be
written to. The stream's C{write} method must accept unicode.
C{sys.stderr} is a good value.
"""
self._stdout = warningStream
self._stderr = errorStream
def unexpectedError(self, filename, msg):
"""
An unexpected error occurred trying to process C{filename}.
@param filename: The path to a file that we could not process.
@ptype filename: C{unicode}
@param msg: A message explaining the problem.
@ptype msg: C{unicode}
"""
self._stderr.write("%s: %s\n" % (filename, msg))
def syntaxError(self, filename, msg, lineno, offset, text):
"""
There was a syntax error in C{filename}.
@param filename: The path to the file with the syntax error.
@ptype filename: C{unicode}
@param msg: An explanation of the syntax error.
@ptype msg: C{unicode}
@param lineno: The line number where the syntax error occurred.
@ptype lineno: C{int}
@param offset: The column on which the syntax error occurred, or None.
@ptype offset: C{int}
@param text: The source code containing the syntax error.
@ptype text: C{unicode}
"""
line = text.splitlines()[-1]
if offset is not None:
if sys.version_info < (3, 8):
offset = offset - (len(text) - len(line)) + 1
self._stderr.write('%s:%d:%d: %s\n' %
(filename, lineno, offset, msg))
else:
self._stderr.write('%s:%d: %s\n' % (filename, lineno, msg))
self._stderr.write(line)
self._stderr.write('\n')
if offset is not None:
self._stderr.write(re.sub(r'\S', ' ', line[:offset - 1]) +
"^\n")
def flake(self, message):
"""
pyflakes found something wrong with the code.
@param: A L{pyflakes.messages.Message}.
"""
self._stdout.write(str(message))
self._stdout.write('\n')
def _makeDefaultReporter():
"""
Make a reporter that can be used when no reporter is specified.
"""
return Reporter(sys.stdout, sys.stderr)
| 2,715 | Python | .py | 69 | 30.362319 | 78 | 0.589821 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,310 | test_type_annotations.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_type_annotations.py | """
Tests for behaviour related to type annotations.
"""
from sys import version_info
from pyflakes import messages as m
from pyflakes.test.harness import TestCase, skipIf
class TestTypeAnnotations(TestCase):
def test_typingOverload(self):
"""Allow intentional redefinitions via @typing.overload"""
self.flakes("""
import typing
from typing import overload
@overload
def f(s): # type: (None) -> None
pass
@overload
def f(s): # type: (int) -> int
pass
def f(s):
return s
@typing.overload
def g(s): # type: (None) -> None
pass
@typing.overload
def g(s): # type: (int) -> int
pass
def g(s):
return s
""")
def test_typingExtensionsOverload(self):
"""Allow intentional redefinitions via @typing_extensions.overload"""
self.flakes("""
import typing_extensions
from typing_extensions import overload
@overload
def f(s): # type: (None) -> None
pass
@overload
def f(s): # type: (int) -> int
pass
def f(s):
return s
@typing_extensions.overload
def g(s): # type: (None) -> None
pass
@typing_extensions.overload
def g(s): # type: (int) -> int
pass
def g(s):
return s
""")
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_typingOverloadAsync(self):
"""Allow intentional redefinitions via @typing.overload (async)"""
self.flakes("""
from typing import overload
@overload
async def f(s): # type: (None) -> None
pass
@overload
async def f(s): # type: (int) -> int
pass
async def f(s):
return s
""")
def test_overload_with_multiple_decorators(self):
self.flakes("""
from typing import overload
dec = lambda f: f
@dec
@overload
def f(x): # type: (int) -> int
pass
@dec
@overload
def f(x): # type: (str) -> str
pass
@dec
def f(x): return x
""")
def test_overload_in_class(self):
self.flakes("""
from typing import overload
class C:
@overload
def f(self, x): # type: (int) -> int
pass
@overload
def f(self, x): # type: (str) -> str
pass
def f(self, x): return x
""")
def test_aliased_import(self):
"""Detect when typing is imported as another name"""
self.flakes("""
import typing as t
@t.overload
def f(s): # type: (None) -> None
pass
@t.overload
def f(s): # type: (int) -> int
pass
def f(s):
return s
""")
def test_not_a_typing_overload(self):
"""regression test for @typing.overload detection bug in 2.1.0"""
self.flakes("""
def foo(x):
return x
@foo
def bar():
pass
def bar():
pass
""", m.RedefinedWhileUnused)
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_variable_annotations(self):
self.flakes('''
name: str
age: int
''')
self.flakes('''
name: str = 'Bob'
age: int = 18
''')
self.flakes('''
class C:
name: str
age: int
''')
self.flakes('''
class C:
name: str = 'Bob'
age: int = 18
''')
self.flakes('''
def f():
name: str
age: int
''')
self.flakes('''
def f():
name: str = 'Bob'
age: int = 18
foo: not_a_real_type = None
''', m.UnusedVariable, m.UnusedVariable, m.UnusedVariable, m.UndefinedName)
self.flakes('''
def f():
name: str
print(name)
''', m.UndefinedName)
self.flakes('''
from typing import Any
def f():
a: Any
''')
self.flakes('''
foo: not_a_real_type
''', m.UndefinedName)
self.flakes('''
foo: not_a_real_type = None
''', m.UndefinedName)
self.flakes('''
class C:
foo: not_a_real_type
''', m.UndefinedName)
self.flakes('''
class C:
foo: not_a_real_type = None
''', m.UndefinedName)
self.flakes('''
def f():
class C:
foo: not_a_real_type
''', m.UndefinedName)
self.flakes('''
def f():
class C:
foo: not_a_real_type = None
''', m.UndefinedName)
self.flakes('''
from foo import Bar
bar: Bar
''')
self.flakes('''
from foo import Bar
bar: 'Bar'
''')
self.flakes('''
import foo
bar: foo.Bar
''')
self.flakes('''
import foo
bar: 'foo.Bar'
''')
self.flakes('''
from foo import Bar
def f(bar: Bar): pass
''')
self.flakes('''
from foo import Bar
def f(bar: 'Bar'): pass
''')
self.flakes('''
from foo import Bar
def f(bar) -> Bar: return bar
''')
self.flakes('''
from foo import Bar
def f(bar) -> 'Bar': return bar
''')
self.flakes('''
bar: 'Bar'
''', m.UndefinedName)
self.flakes('''
bar: 'foo.Bar'
''', m.UndefinedName)
self.flakes('''
from foo import Bar
bar: str
''', m.UnusedImport)
self.flakes('''
from foo import Bar
def f(bar: str): pass
''', m.UnusedImport)
self.flakes('''
def f(a: A) -> A: pass
class A: pass
''', m.UndefinedName, m.UndefinedName)
self.flakes('''
def f(a: 'A') -> 'A': return a
class A: pass
''')
self.flakes('''
a: A
class A: pass
''', m.UndefinedName)
self.flakes('''
a: 'A'
class A: pass
''')
self.flakes('''
T: object
def f(t: T): pass
''', m.UndefinedName)
self.flakes('''
T: object
def g(t: 'T'): pass
''')
self.flakes('''
a: 'A B'
''', m.ForwardAnnotationSyntaxError)
self.flakes('''
a: 'A; B'
''', m.ForwardAnnotationSyntaxError)
self.flakes('''
a: '1 + 2'
''')
self.flakes('''
a: 'a: "A"'
''', m.ForwardAnnotationSyntaxError)
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_annotating_an_import(self):
self.flakes('''
from a import b, c
b: c
print(b)
''')
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_unused_annotation(self):
# Unused annotations are fine in module and class scope
self.flakes('''
x: int
class Cls:
y: int
''')
# TODO: this should print a UnusedVariable message
self.flakes('''
def f():
x: int
''')
# This should only print one UnusedVariable message
self.flakes('''
def f():
x: int
x = 3
''', m.UnusedVariable)
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_annotated_async_def(self):
self.flakes('''
class c: pass
async def func(c: c) -> None: pass
''')
@skipIf(version_info < (3, 7), 'new in Python 3.7')
def test_postponed_annotations(self):
self.flakes('''
from __future__ import annotations
def f(a: A) -> A: pass
class A:
b: B
class B: pass
''')
self.flakes('''
from __future__ import annotations
def f(a: A) -> A: pass
class A:
b: Undefined
class B: pass
''', m.UndefinedName)
self.flakes('''
from __future__ import annotations
T: object
def f(t: T): pass
def g(t: 'T'): pass
''')
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_type_annotation_clobbers_all(self):
self.flakes('''\
from typing import TYPE_CHECKING, List
from y import z
if not TYPE_CHECKING:
__all__ = ("z",)
else:
__all__: List[str]
''')
def test_typeCommentsMarkImportsAsUsed(self):
self.flakes("""
from mod import A, B, C, D, E, F, G
def f(
a, # type: A
):
# type: (...) -> B
for b in a: # type: C
with b as c: # type: D
d = c.x # type: E
return d
def g(x): # type: (F) -> G
return x.y
""")
def test_typeCommentsFullSignature(self):
self.flakes("""
from mod import A, B, C, D
def f(a, b):
# type: (A, B[C]) -> D
return a + b
""")
def test_typeCommentsStarArgs(self):
self.flakes("""
from mod import A, B, C, D
def f(a, *b, **c):
# type: (A, *B, **C) -> D
return a + b
""")
def test_typeCommentsFullSignatureWithDocstring(self):
self.flakes('''
from mod import A, B, C, D
def f(a, b):
# type: (A, B[C]) -> D
"""do the thing!"""
return a + b
''')
def test_typeCommentsAdditionalComment(self):
self.flakes("""
from mod import F
x = 1 # type: F # noqa
""")
def test_typeCommentsNoWhitespaceAnnotation(self):
self.flakes("""
from mod import F
x = 1 #type:F
""")
def test_typeCommentsInvalidDoesNotMarkAsUsed(self):
self.flakes("""
from mod import F
# type: F
""", m.UnusedImport)
def test_typeCommentsSyntaxError(self):
self.flakes("""
def f(x): # type: (F[) -> None
pass
""", m.CommentAnnotationSyntaxError)
def test_typeCommentsSyntaxErrorCorrectLine(self):
checker = self.flakes("""\
x = 1
# type: definitely not a PEP 484 comment
""", m.CommentAnnotationSyntaxError)
self.assertEqual(checker.messages[0].lineno, 2)
def test_typeCommentsAssignedToPreviousNode(self):
# This test demonstrates an issue in the implementation which
# associates the type comment with a node above it, however the type
# comment isn't valid according to mypy. If an improved approach
# which can detect these "invalid" type comments is implemented, this
# test should be removed / improved to assert that new check.
self.flakes("""
from mod import F
x = 1
# type: F
""")
def test_typeIgnore(self):
self.flakes("""
a = 0 # type: ignore
b = 0 # type: ignore[excuse]
c = 0 # type: ignore=excuse
d = 0 # type: ignore [excuse]
e = 0 # type: ignore whatever
""")
def test_typeIgnoreBogus(self):
self.flakes("""
x = 1 # type: ignored
""", m.UndefinedName)
def test_typeIgnoreBogusUnicode(self):
error = (m.CommentAnnotationSyntaxError if version_info < (3,)
else m.UndefinedName)
self.flakes("""
x = 2 # type: ignore\xc3
""", error)
@skipIf(version_info < (3,), 'new in Python 3')
def test_return_annotation_is_class_scope_variable(self):
self.flakes("""
from typing import TypeVar
class Test:
Y = TypeVar('Y')
def t(self, x: Y) -> Y:
return x
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_return_annotation_is_function_body_variable(self):
self.flakes("""
class Test:
def t(self) -> Y:
Y = 2
return Y
""", m.UndefinedName)
@skipIf(version_info < (3, 8), 'new in Python 3.8')
def test_positional_only_argument_annotations(self):
self.flakes("""
from x import C
def f(c: C, /): ...
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_partially_quoted_type_annotation(self):
self.flakes("""
from queue import Queue
from typing import Optional
def f() -> Optional['Queue[str]']:
return None
""")
def test_partially_quoted_type_assignment(self):
self.flakes("""
from queue import Queue
from typing import Optional
MaybeQueue = Optional['Queue[str]']
""")
def test_nested_partially_quoted_type_assignment(self):
self.flakes("""
from queue import Queue
from typing import Callable
Func = Callable[['Queue[str]'], None]
""")
def test_quoted_type_cast(self):
self.flakes("""
from typing import cast, Optional
maybe_int = cast('Optional[int]', 42)
""")
def test_type_cast_literal_str_to_str(self):
# Checks that our handling of quoted type annotations in the first
# argument to `cast` doesn't cause issues when (only) the _second_
# argument is a literal str which looks a bit like a type annotation.
self.flakes("""
from typing import cast
a_string = cast(str, 'Optional[int]')
""")
def test_quoted_type_cast_renamed_import(self):
self.flakes("""
from typing import cast as tsac, Optional as Maybe
maybe_int = tsac('Maybe[int]', 42)
""")
def test_quoted_TypeVar_constraints(self):
self.flakes("""
from typing import TypeVar, Optional
T = TypeVar('T', 'str', 'Optional[int]', bytes)
""")
def test_quoted_TypeVar_bound(self):
self.flakes("""
from typing import TypeVar, Optional, List
T = TypeVar('T', bound='Optional[int]')
S = TypeVar('S', int, bound='List[int]')
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_literal_type_typing(self):
self.flakes("""
from typing import Literal
def f(x: Literal['some string']) -> None:
return None
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_literal_type_typing_extensions(self):
self.flakes("""
from typing_extensions import Literal
def f(x: Literal['some string']) -> None:
return None
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_annotated_type_typing_missing_forward_type(self):
self.flakes("""
from typing import Annotated
def f(x: Annotated['integer']) -> None:
return None
""", m.UndefinedName)
@skipIf(version_info < (3,), 'new in Python 3')
def test_annotated_type_typing_missing_forward_type_multiple_args(self):
self.flakes("""
from typing import Annotated
def f(x: Annotated['integer', 1]) -> None:
return None
""", m.UndefinedName)
@skipIf(version_info < (3,), 'new in Python 3')
def test_annotated_type_typing_with_string_args(self):
self.flakes("""
from typing import Annotated
def f(x: Annotated[int, '> 0']) -> None:
return None
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_annotated_type_typing_with_string_args_in_union(self):
self.flakes("""
from typing import Annotated, Union
def f(x: Union[Annotated['int', '>0'], 'integer']) -> None:
return None
""", m.UndefinedName)
@skipIf(version_info < (3,), 'new in Python 3')
def test_literal_type_some_other_module(self):
"""err on the side of false-negatives for types named Literal"""
self.flakes("""
from my_module import compat
from my_module.compat import Literal
def f(x: compat.Literal['some string']) -> None:
return None
def g(x: Literal['some string']) -> None:
return None
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_literal_union_type_typing(self):
self.flakes("""
from typing import Literal
def f(x: Literal['some string', 'foo bar']) -> None:
return None
""")
@skipIf(version_info < (3,), 'new in Python 3')
def test_deferred_twice_annotation(self):
self.flakes("""
from queue import Queue
from typing import Optional
def f() -> "Optional['Queue[str]']":
return None
""")
@skipIf(version_info < (3, 7), 'new in Python 3.7')
def test_partial_string_annotations_with_future_annotations(self):
self.flakes("""
from __future__ import annotations
from queue import Queue
from typing import Optional
def f() -> Optional['Queue[str]']:
return None
""")
def test_idomiatic_typing_guards(self):
# typing.TYPE_CHECKING: python3.5.3+
self.flakes("""
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from t import T
def f(): # type: () -> T
pass
""")
# False: the old, more-compatible approach
self.flakes("""
if False:
from t import T
def f(): # type: () -> T
pass
""")
# some choose to assign a constant and do it that way
self.flakes("""
MYPY = False
if MYPY:
from t import T
def f(): # type: () -> T
pass
""")
def test_typing_guard_for_protocol(self):
self.flakes("""
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from typing import Protocol
else:
Protocol = object
class C(Protocol):
def f(): # type: () -> int
pass
""")
def test_typednames_correct_forward_ref(self):
self.flakes("""
from typing import TypedDict, List, NamedTuple
List[TypedDict("x", {})]
List[TypedDict("x", x=int)]
List[NamedTuple("a", a=int)]
List[NamedTuple("a", [("a", int)])]
""")
self.flakes("""
from typing import TypedDict, List, NamedTuple, TypeVar
List[TypedDict("x", {"x": "Y"})]
List[TypedDict("x", x="Y")]
List[NamedTuple("a", [("a", "Y")])]
List[NamedTuple("a", a="Y")]
List[TypedDict("x", {"x": List["a"]})]
List[TypeVar("A", bound="C")]
List[TypeVar("A", List["C"])]
""", *[m.UndefinedName]*7)
self.flakes("""
from typing import NamedTuple, TypeVar, cast
from t import A, B, C, D, E
NamedTuple("A", [("a", A["C"])])
TypeVar("A", bound=A["B"])
TypeVar("A", A["D"])
cast(A["E"], [])
""")
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_namedtypes_classes(self):
self.flakes("""
from typing import TypedDict, NamedTuple
class X(TypedDict):
y: TypedDict("z", {"zz":int})
class Y(NamedTuple):
y: NamedTuple("v", [("vv", int)])
""")
| 20,098 | Python | .py | 637 | 21.687598 | 83 | 0.503646 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,311 | harness.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/harness.py | import ast
import textwrap
import unittest
from pyflakes import checker
__all__ = ['TestCase', 'skip', 'skipIf']
skip = unittest.skip
skipIf = unittest.skipIf
class TestCase(unittest.TestCase):
withDoctest = False
def flakes(self, input, *expectedOutputs, **kw):
tree = ast.parse(textwrap.dedent(input))
file_tokens = checker.make_tokens(textwrap.dedent(input))
if kw.get('is_segment'):
tree = tree.body[0]
kw.pop('is_segment')
w = checker.Checker(
tree, file_tokens=file_tokens, withDoctest=self.withDoctest, **kw
)
outputs = [type(o) for o in w.messages]
expectedOutputs = list(expectedOutputs)
outputs.sort(key=lambda t: t.__name__)
expectedOutputs.sort(key=lambda t: t.__name__)
self.assertEqual(outputs, expectedOutputs, '''\
for input:
%s
expected outputs:
%r
but got:
%s''' % (input, expectedOutputs, '\n'.join([str(o) for o in w.messages])))
return w
if not hasattr(unittest.TestCase, 'assertIs'):
def assertIs(self, expr1, expr2, msg=None):
if expr1 is not expr2:
self.fail(msg or '%r is not %r' % (expr1, expr2))
if not hasattr(unittest.TestCase, 'assertIsInstance'):
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls))."""
if not isinstance(obj, cls):
self.fail(msg or '%r is not an instance of %r' % (obj, cls))
if not hasattr(unittest.TestCase, 'assertNotIsInstance'):
def assertNotIsInstance(self, obj, cls, msg=None):
"""Same as self.assertFalse(isinstance(obj, cls))."""
if isinstance(obj, cls):
self.fail(msg or '%r is an instance of %r' % (obj, cls))
if not hasattr(unittest.TestCase, 'assertIn'):
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b)."""
if member not in container:
self.fail(msg or '%r not found in %r' % (member, container))
if not hasattr(unittest.TestCase, 'assertNotIn'):
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b)."""
if member in container:
self.fail(msg or
'%r unexpectedly found in %r' % (member, container))
| 2,404 | Python | .py | 55 | 35.018182 | 78 | 0.613208 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,312 | test_api.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_api.py | """
Tests for L{pyflakes.scripts.pyflakes}.
"""
import contextlib
import os
import sys
import shutil
import subprocess
import tempfile
from pyflakes.messages import UnusedImport
from pyflakes.reporter import Reporter
from pyflakes.api import (
main,
checkPath,
checkRecursive,
iterSourceCode,
)
from pyflakes.test.harness import TestCase, skipIf
if sys.version_info < (3,):
from cStringIO import StringIO
else:
from io import StringIO
unichr = chr
try:
sys.pypy_version_info
PYPY = True
except AttributeError:
PYPY = False
try:
WindowsError
WIN = True
except NameError:
WIN = False
ERROR_HAS_COL_NUM = ERROR_HAS_LAST_LINE = sys.version_info >= (3, 2) or PYPY
def withStderrTo(stderr, f, *args, **kwargs):
"""
Call C{f} with C{sys.stderr} redirected to C{stderr}.
"""
(outer, sys.stderr) = (sys.stderr, stderr)
try:
return f(*args, **kwargs)
finally:
sys.stderr = outer
class Node(object):
"""
Mock an AST node.
"""
def __init__(self, lineno, col_offset=0):
self.lineno = lineno
self.col_offset = col_offset
class SysStreamCapturing(object):
"""
Context manager capturing sys.stdin, sys.stdout and sys.stderr.
The file handles are replaced with a StringIO object.
On environments that support it, the StringIO object uses newlines
set to os.linesep. Otherwise newlines are converted from \\n to
os.linesep during __exit__.
"""
def _create_StringIO(self, buffer=None):
# Python 3 has a newline argument
try:
return StringIO(buffer, newline=os.linesep)
except TypeError:
self._newline = True
# Python 2 creates an input only stream when buffer is not None
if buffer is None:
return StringIO()
else:
return StringIO(buffer)
def __init__(self, stdin):
self._newline = False
self._stdin = self._create_StringIO(stdin or '')
def __enter__(self):
self._orig_stdin = sys.stdin
self._orig_stdout = sys.stdout
self._orig_stderr = sys.stderr
sys.stdin = self._stdin
sys.stdout = self._stdout_stringio = self._create_StringIO()
sys.stderr = self._stderr_stringio = self._create_StringIO()
return self
def __exit__(self, *args):
self.output = self._stdout_stringio.getvalue()
self.error = self._stderr_stringio.getvalue()
if self._newline and os.linesep != '\n':
self.output = self.output.replace('\n', os.linesep)
self.error = self.error.replace('\n', os.linesep)
sys.stdin = self._orig_stdin
sys.stdout = self._orig_stdout
sys.stderr = self._orig_stderr
class LoggingReporter(object):
"""
Implementation of Reporter that just appends any error to a list.
"""
def __init__(self, log):
"""
Construct a C{LoggingReporter}.
@param log: A list to append log messages to.
"""
self.log = log
def flake(self, message):
self.log.append(('flake', str(message)))
def unexpectedError(self, filename, message):
self.log.append(('unexpectedError', filename, message))
def syntaxError(self, filename, msg, lineno, offset, line):
self.log.append(('syntaxError', filename, msg, lineno, offset, line))
class TestIterSourceCode(TestCase):
"""
Tests for L{iterSourceCode}.
"""
def setUp(self):
self.tempdir = tempfile.mkdtemp()
def tearDown(self):
shutil.rmtree(self.tempdir)
def makeEmptyFile(self, *parts):
assert parts
fpath = os.path.join(self.tempdir, *parts)
open(fpath, 'a').close()
return fpath
def test_emptyDirectory(self):
"""
There are no Python files in an empty directory.
"""
self.assertEqual(list(iterSourceCode([self.tempdir])), [])
def test_singleFile(self):
"""
If the directory contains one Python file, C{iterSourceCode} will find
it.
"""
childpath = self.makeEmptyFile('foo.py')
self.assertEqual(list(iterSourceCode([self.tempdir])), [childpath])
def test_onlyPythonSource(self):
"""
Files that are not Python source files are not included.
"""
self.makeEmptyFile('foo.pyc')
self.assertEqual(list(iterSourceCode([self.tempdir])), [])
def test_recurses(self):
"""
If the Python files are hidden deep down in child directories, we will
find them.
"""
os.mkdir(os.path.join(self.tempdir, 'foo'))
apath = self.makeEmptyFile('foo', 'a.py')
self.makeEmptyFile('foo', 'a.py~')
os.mkdir(os.path.join(self.tempdir, 'bar'))
bpath = self.makeEmptyFile('bar', 'b.py')
cpath = self.makeEmptyFile('c.py')
self.assertEqual(
sorted(iterSourceCode([self.tempdir])),
sorted([apath, bpath, cpath]))
def test_shebang(self):
"""
Find Python files that don't end with `.py`, but contain a Python
shebang.
"""
python = os.path.join(self.tempdir, 'a')
with open(python, 'w') as fd:
fd.write('#!/usr/bin/env python\n')
self.makeEmptyFile('b')
with open(os.path.join(self.tempdir, 'c'), 'w') as fd:
fd.write('hello\nworld\n')
python2 = os.path.join(self.tempdir, 'd')
with open(python2, 'w') as fd:
fd.write('#!/usr/bin/env python2\n')
python3 = os.path.join(self.tempdir, 'e')
with open(python3, 'w') as fd:
fd.write('#!/usr/bin/env python3\n')
pythonw = os.path.join(self.tempdir, 'f')
with open(pythonw, 'w') as fd:
fd.write('#!/usr/bin/env pythonw\n')
python3args = os.path.join(self.tempdir, 'g')
with open(python3args, 'w') as fd:
fd.write('#!/usr/bin/python3 -u\n')
python2u = os.path.join(self.tempdir, 'h')
with open(python2u, 'w') as fd:
fd.write('#!/usr/bin/python2u\n')
python3d = os.path.join(self.tempdir, 'i')
with open(python3d, 'w') as fd:
fd.write('#!/usr/local/bin/python3d\n')
python38m = os.path.join(self.tempdir, 'j')
with open(python38m, 'w') as fd:
fd.write('#! /usr/bin/env python3.8m\n')
python27 = os.path.join(self.tempdir, 'k')
with open(python27, 'w') as fd:
fd.write('#!/usr/bin/python2.7 \n')
# Should NOT be treated as Python source
notfirst = os.path.join(self.tempdir, 'l')
with open(notfirst, 'w') as fd:
fd.write('#!/bin/sh\n#!/usr/bin/python\n')
self.assertEqual(
sorted(iterSourceCode([self.tempdir])),
sorted([python, python2, python3, pythonw, python3args, python2u,
python3d, python38m, python27]))
def test_multipleDirectories(self):
"""
L{iterSourceCode} can be given multiple directories. It will recurse
into each of them.
"""
foopath = os.path.join(self.tempdir, 'foo')
barpath = os.path.join(self.tempdir, 'bar')
os.mkdir(foopath)
apath = self.makeEmptyFile('foo', 'a.py')
os.mkdir(barpath)
bpath = self.makeEmptyFile('bar', 'b.py')
self.assertEqual(
sorted(iterSourceCode([foopath, barpath])),
sorted([apath, bpath]))
def test_explicitFiles(self):
"""
If one of the paths given to L{iterSourceCode} is not a directory but
a file, it will include that in its output.
"""
epath = self.makeEmptyFile('e.py')
self.assertEqual(list(iterSourceCode([epath])),
[epath])
class TestReporter(TestCase):
"""
Tests for L{Reporter}.
"""
def test_syntaxError(self):
"""
C{syntaxError} reports that there was a syntax error in the source
file. It reports to the error stream and includes the filename, line
number, error message, actual line of source and a caret pointing to
where the error is.
"""
err = StringIO()
reporter = Reporter(None, err)
reporter.syntaxError('foo.py', 'a problem', 3,
8 if sys.version_info >= (3, 8) else 7,
'bad line of source')
self.assertEqual(
("foo.py:3:8: a problem\n"
"bad line of source\n"
" ^\n"),
err.getvalue())
def test_syntaxErrorNoOffset(self):
"""
C{syntaxError} doesn't include a caret pointing to the error if
C{offset} is passed as C{None}.
"""
err = StringIO()
reporter = Reporter(None, err)
reporter.syntaxError('foo.py', 'a problem', 3, None,
'bad line of source')
self.assertEqual(
("foo.py:3: a problem\n"
"bad line of source\n"),
err.getvalue())
def test_multiLineSyntaxError(self):
"""
If there's a multi-line syntax error, then we only report the last
line. The offset is adjusted so that it is relative to the start of
the last line.
"""
err = StringIO()
lines = [
'bad line of source',
'more bad lines of source',
]
reporter = Reporter(None, err)
reporter.syntaxError('foo.py', 'a problem', 3, len(lines[0]) + 7,
'\n'.join(lines))
column = 25 if sys.version_info >= (3, 8) else 7
self.assertEqual(
("foo.py:3:%d: a problem\n" % column +
lines[-1] + "\n" +
" " * (column - 1) + "^\n"),
err.getvalue())
def test_unexpectedError(self):
"""
C{unexpectedError} reports an error processing a source file.
"""
err = StringIO()
reporter = Reporter(None, err)
reporter.unexpectedError('source.py', 'error message')
self.assertEqual('source.py: error message\n', err.getvalue())
def test_flake(self):
"""
C{flake} reports a code warning from Pyflakes. It is exactly the
str() of a L{pyflakes.messages.Message}.
"""
out = StringIO()
reporter = Reporter(out, None)
message = UnusedImport('foo.py', Node(42), 'bar')
reporter.flake(message)
self.assertEqual(out.getvalue(), "%s\n" % (message,))
class CheckTests(TestCase):
"""
Tests for L{check} and L{checkPath} which check a file for flakes.
"""
@contextlib.contextmanager
def makeTempFile(self, content):
"""
Make a temporary file containing C{content} and return a path to it.
"""
fd, name = tempfile.mkstemp()
try:
with os.fdopen(fd, 'wb') as f:
if not hasattr(content, 'decode'):
content = content.encode('ascii')
f.write(content)
yield name
finally:
os.remove(name)
def assertHasErrors(self, path, errorList):
"""
Assert that C{path} causes errors.
@param path: A path to a file to check.
@param errorList: A list of errors expected to be printed to stderr.
"""
err = StringIO()
count = withStderrTo(err, checkPath, path)
self.assertEqual(
(count, err.getvalue()), (len(errorList), ''.join(errorList)))
def getErrors(self, path):
"""
Get any warnings or errors reported by pyflakes for the file at C{path}.
@param path: The path to a Python file on disk that pyflakes will check.
@return: C{(count, log)}, where C{count} is the number of warnings or
errors generated, and log is a list of those warnings, presented
as structured data. See L{LoggingReporter} for more details.
"""
log = []
reporter = LoggingReporter(log)
count = checkPath(path, reporter)
return count, log
def test_legacyScript(self):
from pyflakes.scripts import pyflakes as script_pyflakes
self.assertIs(script_pyflakes.checkPath, checkPath)
def test_missingTrailingNewline(self):
"""
Source which doesn't end with a newline shouldn't cause any
exception to be raised nor an error indicator to be returned by
L{check}.
"""
with self.makeTempFile("def foo():\n\tpass\n\t") as fName:
self.assertHasErrors(fName, [])
def test_checkPathNonExisting(self):
"""
L{checkPath} handles non-existing files.
"""
count, errors = self.getErrors('extremo')
self.assertEqual(count, 1)
self.assertEqual(
errors,
[('unexpectedError', 'extremo', 'No such file or directory')])
def test_multilineSyntaxError(self):
"""
Source which includes a syntax error which results in the raised
L{SyntaxError.text} containing multiple lines of source are reported
with only the last line of that source.
"""
source = """\
def foo():
'''
def bar():
pass
def baz():
'''quux'''
"""
# Sanity check - SyntaxError.text should be multiple lines, if it
# isn't, something this test was unprepared for has happened.
def evaluate(source):
exec(source)
try:
evaluate(source)
except SyntaxError:
e = sys.exc_info()[1]
if not PYPY and sys.version_info < (3, 10):
self.assertTrue(e.text.count('\n') > 1)
else:
self.fail()
with self.makeTempFile(source) as sourcePath:
if PYPY:
message = 'end of file (EOF) while scanning triple-quoted string literal'
elif sys.version_info >= (3, 10):
message = 'unterminated triple-quoted string literal (detected at line 8)' # noqa: E501
else:
message = 'invalid syntax'
if sys.version_info >= (3, 10):
column = 12
elif sys.version_info >= (3, 8):
column = 8
else:
column = 11
self.assertHasErrors(
sourcePath,
["""\
%s:8:%d: %s
'''quux'''
%s^
""" % (sourcePath, column, message, ' ' * (column - 1))])
def test_eofSyntaxError(self):
"""
The error reported for source files which end prematurely causing a
syntax error reflects the cause for the syntax error.
"""
with self.makeTempFile("def foo(") as sourcePath:
if PYPY:
msg = 'parenthesis is never closed'
elif sys.version_info >= (3, 10):
msg = "'(' was never closed"
else:
msg = 'unexpected EOF while parsing'
if PYPY:
column = 7
elif sys.version_info >= (3, 10):
column = 8
else:
column = 9
spaces = ' ' * (column - 1)
expected = '{}:1:{}: {}\ndef foo(\n{}^\n'.format(
sourcePath, column, msg, spaces
)
self.assertHasErrors(sourcePath, [expected])
def test_eofSyntaxErrorWithTab(self):
"""
The error reported for source files which end prematurely causing a
syntax error reflects the cause for the syntax error.
"""
with self.makeTempFile("if True:\n\tfoo =") as sourcePath:
column = 6 if PYPY else 7
last_line = '\t ^' if PYPY else '\t ^'
self.assertHasErrors(
sourcePath,
["""\
%s:2:%s: invalid syntax
\tfoo =
%s
""" % (sourcePath, column, last_line)])
def test_nonDefaultFollowsDefaultSyntaxError(self):
"""
Source which has a non-default argument following a default argument
should include the line number of the syntax error. However these
exceptions do not include an offset.
"""
source = """\
def foo(bar=baz, bax):
pass
"""
with self.makeTempFile(source) as sourcePath:
if ERROR_HAS_LAST_LINE:
if PYPY:
column = 7
elif sys.version_info >= (3, 10):
column = 18
elif sys.version_info >= (3, 9):
column = 21
elif sys.version_info >= (3, 8):
column = 9
else:
column = 8
last_line = ' ' * (column - 1) + '^\n'
columnstr = '%d:' % column
else:
last_line = columnstr = ''
self.assertHasErrors(
sourcePath,
["""\
%s:1:%s non-default argument follows default argument
def foo(bar=baz, bax):
%s""" % (sourcePath, columnstr, last_line)])
def test_nonKeywordAfterKeywordSyntaxError(self):
"""
Source which has a non-keyword argument after a keyword argument should
include the line number of the syntax error. However these exceptions
do not include an offset.
"""
source = """\
foo(bar=baz, bax)
"""
with self.makeTempFile(source) as sourcePath:
if ERROR_HAS_LAST_LINE:
if PYPY:
column = 12
elif sys.version_info >= (3, 9):
column = 17
elif sys.version_info >= (3, 8):
column = 14
else:
column = 13
last_line = ' ' * (column - 1) + '^\n'
columnstr = '%d:' % column
else:
last_line = columnstr = ''
if sys.version_info >= (3, 5):
message = 'positional argument follows keyword argument'
else:
message = 'non-keyword arg after keyword arg'
self.assertHasErrors(
sourcePath,
["""\
%s:1:%s %s
foo(bar=baz, bax)
%s""" % (sourcePath, columnstr, message, last_line)])
def test_invalidEscape(self):
"""
The invalid escape syntax raises ValueError in Python 2
"""
ver = sys.version_info
# ValueError: invalid \x escape
with self.makeTempFile(r"foo = '\xyz'") as sourcePath:
if ver < (3,):
decoding_error = "%s: problem decoding source\n" % (sourcePath,)
else:
position_end = 1
if PYPY:
column = 5
elif ver >= (3, 9):
column = 13
else:
column = 7
# Column has been "fixed" since 3.2.4 and 3.3.1
if ver < (3, 2, 4) or ver[:3] == (3, 3, 0):
position_end = 2
if ERROR_HAS_LAST_LINE:
last_line = '%s^\n' % (' ' * (column - 1))
else:
last_line = ''
decoding_error = """\
%s:1:%d: (unicode error) 'unicodeescape' codec can't decode bytes \
in position 0-%d: truncated \\xXX escape
foo = '\\xyz'
%s""" % (sourcePath, column, position_end, last_line)
self.assertHasErrors(
sourcePath, [decoding_error])
@skipIf(sys.platform == 'win32', 'unsupported on Windows')
def test_permissionDenied(self):
"""
If the source file is not readable, this is reported on standard
error.
"""
if os.getuid() == 0:
self.skipTest('root user can access all files regardless of '
'permissions')
with self.makeTempFile('') as sourcePath:
os.chmod(sourcePath, 0)
count, errors = self.getErrors(sourcePath)
self.assertEqual(count, 1)
self.assertEqual(
errors,
[('unexpectedError', sourcePath, "Permission denied")])
def test_pyflakesWarning(self):
"""
If the source file has a pyflakes warning, this is reported as a
'flake'.
"""
with self.makeTempFile("import foo") as sourcePath:
count, errors = self.getErrors(sourcePath)
self.assertEqual(count, 1)
self.assertEqual(
errors, [('flake', str(UnusedImport(sourcePath, Node(1), 'foo')))])
def test_encodedFileUTF8(self):
"""
If source file declares the correct encoding, no error is reported.
"""
SNOWMAN = unichr(0x2603)
source = ("""\
# coding: utf-8
x = "%s"
""" % SNOWMAN).encode('utf-8')
with self.makeTempFile(source) as sourcePath:
self.assertHasErrors(sourcePath, [])
def test_CRLFLineEndings(self):
"""
Source files with Windows CR LF line endings are parsed successfully.
"""
with self.makeTempFile("x = 42\r\n") as sourcePath:
self.assertHasErrors(sourcePath, [])
def test_misencodedFileUTF8(self):
"""
If a source file contains bytes which cannot be decoded, this is
reported on stderr.
"""
SNOWMAN = unichr(0x2603)
source = ("""\
# coding: ascii
x = "%s"
""" % SNOWMAN).encode('utf-8')
with self.makeTempFile(source) as sourcePath:
if PYPY and sys.version_info < (3, ):
message = ('\'ascii\' codec can\'t decode byte 0xe2 '
'in position 21: ordinal not in range(128)')
result = """\
%s:0:0: %s
x = "\xe2\x98\x83"
^\n""" % (sourcePath, message)
else:
message = 'problem decoding source'
result = "%s: problem decoding source\n" % (sourcePath,)
self.assertHasErrors(
sourcePath, [result])
def test_misencodedFileUTF16(self):
"""
If a source file contains bytes which cannot be decoded, this is
reported on stderr.
"""
SNOWMAN = unichr(0x2603)
source = ("""\
# coding: ascii
x = "%s"
""" % SNOWMAN).encode('utf-16')
with self.makeTempFile(source) as sourcePath:
self.assertHasErrors(
sourcePath, ["%s: problem decoding source\n" % (sourcePath,)])
def test_checkRecursive(self):
"""
L{checkRecursive} descends into each directory, finding Python files
and reporting problems.
"""
tempdir = tempfile.mkdtemp()
try:
os.mkdir(os.path.join(tempdir, 'foo'))
file1 = os.path.join(tempdir, 'foo', 'bar.py')
with open(file1, 'wb') as fd:
fd.write("import baz\n".encode('ascii'))
file2 = os.path.join(tempdir, 'baz.py')
with open(file2, 'wb') as fd:
fd.write("import contraband".encode('ascii'))
log = []
reporter = LoggingReporter(log)
warnings = checkRecursive([tempdir], reporter)
self.assertEqual(warnings, 2)
self.assertEqual(
sorted(log),
sorted([('flake', str(UnusedImport(file1, Node(1), 'baz'))),
('flake',
str(UnusedImport(file2, Node(1), 'contraband')))]))
finally:
shutil.rmtree(tempdir)
class IntegrationTests(TestCase):
"""
Tests of the pyflakes script that actually spawn the script.
"""
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.tempfilepath = os.path.join(self.tempdir, 'temp')
def tearDown(self):
shutil.rmtree(self.tempdir)
def getPyflakesBinary(self):
"""
Return the path to the pyflakes binary.
"""
import pyflakes
package_dir = os.path.dirname(pyflakes.__file__)
return os.path.join(package_dir, '..', 'bin', 'pyflakes')
def runPyflakes(self, paths, stdin=None):
"""
Launch a subprocess running C{pyflakes}.
@param paths: Command-line arguments to pass to pyflakes.
@param stdin: Text to use as stdin.
@return: C{(returncode, stdout, stderr)} of the completed pyflakes
process.
"""
env = dict(os.environ)
env['PYTHONPATH'] = os.pathsep.join(sys.path)
command = [sys.executable, self.getPyflakesBinary()]
command.extend(paths)
if stdin:
p = subprocess.Popen(command, env=env, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate(stdin.encode('ascii'))
else:
p = subprocess.Popen(command, env=env,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
rv = p.wait()
if sys.version_info >= (3,):
stdout = stdout.decode('utf-8')
stderr = stderr.decode('utf-8')
return (stdout, stderr, rv)
def test_goodFile(self):
"""
When a Python source file is all good, the return code is zero and no
messages are printed to either stdout or stderr.
"""
open(self.tempfilepath, 'a').close()
d = self.runPyflakes([self.tempfilepath])
self.assertEqual(d, ('', '', 0))
def test_fileWithFlakes(self):
"""
When a Python source file has warnings, the return code is non-zero
and the warnings are printed to stdout.
"""
with open(self.tempfilepath, 'wb') as fd:
fd.write("import contraband\n".encode('ascii'))
d = self.runPyflakes([self.tempfilepath])
expected = UnusedImport(self.tempfilepath, Node(1), 'contraband')
self.assertEqual(d, ("%s%s" % (expected, os.linesep), '', 1))
def test_errors_io(self):
"""
When pyflakes finds errors with the files it's given, (if they don't
exist, say), then the return code is non-zero and the errors are
printed to stderr.
"""
d = self.runPyflakes([self.tempfilepath])
error_msg = '%s: No such file or directory%s' % (self.tempfilepath,
os.linesep)
self.assertEqual(d, ('', error_msg, 1))
def test_errors_syntax(self):
"""
When pyflakes finds errors with the files it's given, (if they don't
exist, say), then the return code is non-zero and the errors are
printed to stderr.
"""
with open(self.tempfilepath, 'wb') as fd:
fd.write("import".encode('ascii'))
d = self.runPyflakes([self.tempfilepath])
error_msg = '{0}:1:{2}: invalid syntax{1}import{1} {3}^{1}'.format(
self.tempfilepath, os.linesep, 6 if PYPY else 7, '' if PYPY else ' ')
self.assertEqual(d, ('', error_msg, 1))
def test_readFromStdin(self):
"""
If no arguments are passed to C{pyflakes} then it reads from stdin.
"""
d = self.runPyflakes([], stdin='import contraband')
expected = UnusedImport('<stdin>', Node(1), 'contraband')
self.assertEqual(d, ("%s%s" % (expected, os.linesep), '', 1))
class TestMain(IntegrationTests):
"""
Tests of the pyflakes main function.
"""
def runPyflakes(self, paths, stdin=None):
try:
with SysStreamCapturing(stdin) as capture:
main(args=paths)
except SystemExit as e:
self.assertIsInstance(e.code, bool)
rv = int(e.code)
return (capture.output, capture.error, rv)
else:
raise RuntimeError('SystemExit not raised')
| 27,928 | Python | .py | 733 | 28.174625 | 104 | 0.564598 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,313 | test_is_literal.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_is_literal.py | from pyflakes.messages import IsLiteral
from pyflakes.test.harness import TestCase
class Test(TestCase):
def test_is_str(self):
self.flakes("""
x = 'foo'
if x is 'foo':
pass
""", IsLiteral)
def test_is_bytes(self):
self.flakes("""
x = b'foo'
if x is b'foo':
pass
""", IsLiteral)
def test_is_unicode(self):
self.flakes("""
x = u'foo'
if x is u'foo':
pass
""", IsLiteral)
def test_is_int(self):
self.flakes("""
x = 10
if x is 10:
pass
""", IsLiteral)
def test_is_true(self):
self.flakes("""
x = True
if x is True:
pass
""")
def test_is_false(self):
self.flakes("""
x = False
if x is False:
pass
""")
def test_is_not_str(self):
self.flakes("""
x = 'foo'
if x is not 'foo':
pass
""", IsLiteral)
def test_is_not_bytes(self):
self.flakes("""
x = b'foo'
if x is not b'foo':
pass
""", IsLiteral)
def test_is_not_unicode(self):
self.flakes("""
x = u'foo'
if x is not u'foo':
pass
""", IsLiteral)
def test_is_not_int(self):
self.flakes("""
x = 10
if x is not 10:
pass
""", IsLiteral)
def test_is_not_true(self):
self.flakes("""
x = True
if x is not True:
pass
""")
def test_is_not_false(self):
self.flakes("""
x = False
if x is not False:
pass
""")
def test_left_is_str(self):
self.flakes("""
x = 'foo'
if 'foo' is x:
pass
""", IsLiteral)
def test_left_is_bytes(self):
self.flakes("""
x = b'foo'
if b'foo' is x:
pass
""", IsLiteral)
def test_left_is_unicode(self):
self.flakes("""
x = u'foo'
if u'foo' is x:
pass
""", IsLiteral)
def test_left_is_int(self):
self.flakes("""
x = 10
if 10 is x:
pass
""", IsLiteral)
def test_left_is_true(self):
self.flakes("""
x = True
if True is x:
pass
""")
def test_left_is_false(self):
self.flakes("""
x = False
if False is x:
pass
""")
def test_left_is_not_str(self):
self.flakes("""
x = 'foo'
if 'foo' is not x:
pass
""", IsLiteral)
def test_left_is_not_bytes(self):
self.flakes("""
x = b'foo'
if b'foo' is not x:
pass
""", IsLiteral)
def test_left_is_not_unicode(self):
self.flakes("""
x = u'foo'
if u'foo' is not x:
pass
""", IsLiteral)
def test_left_is_not_int(self):
self.flakes("""
x = 10
if 10 is not x:
pass
""", IsLiteral)
def test_left_is_not_true(self):
self.flakes("""
x = True
if True is not x:
pass
""")
def test_left_is_not_false(self):
self.flakes("""
x = False
if False is not x:
pass
""")
def test_chained_operators_is_true(self):
self.flakes("""
x = 5
if x is True < 4:
pass
""")
def test_chained_operators_is_str(self):
self.flakes("""
x = 5
if x is 'foo' < 4:
pass
""", IsLiteral)
def test_chained_operators_is_true_end(self):
self.flakes("""
x = 5
if 4 < x is True:
pass
""")
def test_chained_operators_is_str_end(self):
self.flakes("""
x = 5
if 4 < x is 'foo':
pass
""", IsLiteral)
def test_is_tuple_constant(self):
self.flakes('''\
x = 5
if x is ():
pass
''', IsLiteral)
def test_is_tuple_constant_containing_constants(self):
self.flakes('''\
x = 5
if x is (1, '2', True, (1.5, ())):
pass
''', IsLiteral)
def test_is_tuple_containing_variables_ok(self):
# a bit nonsensical, but does not trigger a SyntaxWarning
self.flakes('''\
x = 5
if x is (x,):
pass
''')
| 4,573 | Python | .py | 190 | 14.836842 | 65 | 0.443806 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,314 | test_builtin.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_builtin.py | """
Tests for detecting redefinition of builtins.
"""
from sys import version_info
from pyflakes import messages as m
from pyflakes.test.harness import TestCase, skipIf
class TestBuiltins(TestCase):
def test_builtin_unbound_local(self):
self.flakes('''
def foo():
a = range(1, 10)
range = a
return range
foo()
print(range)
''', m.UndefinedLocal)
def test_global_shadowing_builtin(self):
self.flakes('''
def f():
global range
range = None
print(range)
f()
''')
@skipIf(version_info >= (3,), 'not an UnboundLocalError in Python 3')
def test_builtin_in_comprehension(self):
self.flakes('''
def f():
[range for range in range(1, 10)]
f()
''', m.UndefinedLocal)
| 871 | Python | .py | 31 | 20.193548 | 73 | 0.56988 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,315 | test_imports.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_imports.py | from sys import version_info
from pyflakes import messages as m
from pyflakes.checker import (
FutureImportation,
Importation,
ImportationFrom,
StarImportation,
SubmoduleImportation,
)
from pyflakes.test.harness import TestCase, skip, skipIf
class TestImportationObject(TestCase):
def test_import_basic(self):
binding = Importation('a', None, 'a')
assert binding.source_statement == 'import a'
assert str(binding) == 'a'
def test_import_as(self):
binding = Importation('c', None, 'a')
assert binding.source_statement == 'import a as c'
assert str(binding) == 'a as c'
def test_import_submodule(self):
binding = SubmoduleImportation('a.b', None)
assert binding.source_statement == 'import a.b'
assert str(binding) == 'a.b'
def test_import_submodule_as(self):
# A submodule import with an as clause is not a SubmoduleImportation
binding = Importation('c', None, 'a.b')
assert binding.source_statement == 'import a.b as c'
assert str(binding) == 'a.b as c'
def test_import_submodule_as_source_name(self):
binding = Importation('a', None, 'a.b')
assert binding.source_statement == 'import a.b as a'
assert str(binding) == 'a.b as a'
def test_importfrom_relative(self):
binding = ImportationFrom('a', None, '.', 'a')
assert binding.source_statement == 'from . import a'
assert str(binding) == '.a'
def test_importfrom_relative_parent(self):
binding = ImportationFrom('a', None, '..', 'a')
assert binding.source_statement == 'from .. import a'
assert str(binding) == '..a'
def test_importfrom_relative_with_module(self):
binding = ImportationFrom('b', None, '..a', 'b')
assert binding.source_statement == 'from ..a import b'
assert str(binding) == '..a.b'
def test_importfrom_relative_with_module_as(self):
binding = ImportationFrom('c', None, '..a', 'b')
assert binding.source_statement == 'from ..a import b as c'
assert str(binding) == '..a.b as c'
def test_importfrom_member(self):
binding = ImportationFrom('b', None, 'a', 'b')
assert binding.source_statement == 'from a import b'
assert str(binding) == 'a.b'
def test_importfrom_submodule_member(self):
binding = ImportationFrom('c', None, 'a.b', 'c')
assert binding.source_statement == 'from a.b import c'
assert str(binding) == 'a.b.c'
def test_importfrom_member_as(self):
binding = ImportationFrom('c', None, 'a', 'b')
assert binding.source_statement == 'from a import b as c'
assert str(binding) == 'a.b as c'
def test_importfrom_submodule_member_as(self):
binding = ImportationFrom('d', None, 'a.b', 'c')
assert binding.source_statement == 'from a.b import c as d'
assert str(binding) == 'a.b.c as d'
def test_importfrom_star(self):
binding = StarImportation('a.b', None)
assert binding.source_statement == 'from a.b import *'
assert str(binding) == 'a.b.*'
def test_importfrom_star_relative(self):
binding = StarImportation('.b', None)
assert binding.source_statement == 'from .b import *'
assert str(binding) == '.b.*'
def test_importfrom_future(self):
binding = FutureImportation('print_function', None, None)
assert binding.source_statement == 'from __future__ import print_function'
assert str(binding) == '__future__.print_function'
def test_unusedImport_underscore(self):
"""
The magic underscore var should be reported as unused when used as an
import alias.
"""
self.flakes('import fu as _', m.UnusedImport)
class Test(TestCase):
def test_unusedImport(self):
self.flakes('import fu, bar', m.UnusedImport, m.UnusedImport)
self.flakes('from baz import fu, bar', m.UnusedImport, m.UnusedImport)
def test_unusedImport_relative(self):
self.flakes('from . import fu', m.UnusedImport)
self.flakes('from . import fu as baz', m.UnusedImport)
self.flakes('from .. import fu', m.UnusedImport)
self.flakes('from ... import fu', m.UnusedImport)
self.flakes('from .. import fu as baz', m.UnusedImport)
self.flakes('from .bar import fu', m.UnusedImport)
self.flakes('from ..bar import fu', m.UnusedImport)
self.flakes('from ...bar import fu', m.UnusedImport)
self.flakes('from ...bar import fu as baz', m.UnusedImport)
checker = self.flakes('from . import fu', m.UnusedImport)
error = checker.messages[0]
assert error.message == '%r imported but unused'
assert error.message_args == ('.fu', )
checker = self.flakes('from . import fu as baz', m.UnusedImport)
error = checker.messages[0]
assert error.message == '%r imported but unused'
assert error.message_args == ('.fu as baz', )
def test_aliasedImport(self):
self.flakes('import fu as FU, bar as FU',
m.RedefinedWhileUnused, m.UnusedImport)
self.flakes('from moo import fu as FU, bar as FU',
m.RedefinedWhileUnused, m.UnusedImport)
def test_aliasedImportShadowModule(self):
"""Imported aliases can shadow the source of the import."""
self.flakes('from moo import fu as moo; moo')
self.flakes('import fu as fu; fu')
self.flakes('import fu.bar as fu; fu')
def test_usedImport(self):
self.flakes('import fu; print(fu)')
self.flakes('from baz import fu; print(fu)')
self.flakes('import fu; del fu')
def test_usedImport_relative(self):
self.flakes('from . import fu; assert fu')
self.flakes('from .bar import fu; assert fu')
self.flakes('from .. import fu; assert fu')
self.flakes('from ..bar import fu as baz; assert baz')
def test_redefinedWhileUnused(self):
self.flakes('import fu; fu = 3', m.RedefinedWhileUnused)
self.flakes('import fu; fu, bar = 3', m.RedefinedWhileUnused)
self.flakes('import fu; [fu, bar] = 3', m.RedefinedWhileUnused)
def test_redefinedIf(self):
"""
Test that importing a module twice within an if
block does raise a warning.
"""
self.flakes('''
i = 2
if i==1:
import os
import os
os.path''', m.RedefinedWhileUnused)
def test_redefinedIfElse(self):
"""
Test that importing a module twice in if
and else blocks does not raise a warning.
"""
self.flakes('''
i = 2
if i==1:
import os
else:
import os
os.path''')
def test_redefinedTry(self):
"""
Test that importing a module twice in a try block
does raise a warning.
"""
self.flakes('''
try:
import os
import os
except:
pass
os.path''', m.RedefinedWhileUnused)
def test_redefinedTryExcept(self):
"""
Test that importing a module twice in a try
and except block does not raise a warning.
"""
self.flakes('''
try:
import os
except:
import os
os.path''')
def test_redefinedTryNested(self):
"""
Test that importing a module twice using a nested
try/except and if blocks does not issue a warning.
"""
self.flakes('''
try:
if True:
if True:
import os
except:
import os
os.path''')
def test_redefinedTryExceptMulti(self):
self.flakes("""
try:
from aa import mixer
except AttributeError:
from bb import mixer
except RuntimeError:
from cc import mixer
except:
from dd import mixer
mixer(123)
""")
def test_redefinedTryElse(self):
self.flakes("""
try:
from aa import mixer
except ImportError:
pass
else:
from bb import mixer
mixer(123)
""", m.RedefinedWhileUnused)
def test_redefinedTryExceptElse(self):
self.flakes("""
try:
import funca
except ImportError:
from bb import funca
from bb import funcb
else:
from bbb import funcb
print(funca, funcb)
""")
def test_redefinedTryExceptFinally(self):
self.flakes("""
try:
from aa import a
except ImportError:
from bb import a
finally:
a = 42
print(a)
""")
def test_redefinedTryExceptElseFinally(self):
self.flakes("""
try:
import b
except ImportError:
b = Ellipsis
from bb import a
else:
from aa import a
finally:
a = 42
print(a, b)
""")
def test_redefinedByFunction(self):
self.flakes('''
import fu
def fu():
pass
''', m.RedefinedWhileUnused)
def test_redefinedInNestedFunction(self):
"""
Test that shadowing a global name with a nested function definition
generates a warning.
"""
self.flakes('''
import fu
def bar():
def baz():
def fu():
pass
''', m.RedefinedWhileUnused, m.UnusedImport)
def test_redefinedInNestedFunctionTwice(self):
"""
Test that shadowing a global name with a nested function definition
generates a warning.
"""
self.flakes('''
import fu
def bar():
import fu
def baz():
def fu():
pass
''',
m.RedefinedWhileUnused, m.RedefinedWhileUnused,
m.UnusedImport, m.UnusedImport)
def test_redefinedButUsedLater(self):
"""
Test that a global import which is redefined locally,
but used later in another scope does not generate a warning.
"""
self.flakes('''
import unittest, transport
class GetTransportTestCase(unittest.TestCase):
def test_get_transport(self):
transport = 'transport'
self.assertIsNotNone(transport)
class TestTransportMethodArgs(unittest.TestCase):
def test_send_defaults(self):
transport.Transport()
''')
def test_redefinedByClass(self):
self.flakes('''
import fu
class fu:
pass
''', m.RedefinedWhileUnused)
def test_redefinedBySubclass(self):
"""
If an imported name is redefined by a class statement which also uses
that name in the bases list, no warning is emitted.
"""
self.flakes('''
from fu import bar
class bar(bar):
pass
''')
def test_redefinedInClass(self):
"""
Test that shadowing a global with a class attribute does not produce a
warning.
"""
self.flakes('''
import fu
class bar:
fu = 1
print(fu)
''')
def test_importInClass(self):
"""
Test that import within class is a locally scoped attribute.
"""
self.flakes('''
class bar:
import fu
''')
self.flakes('''
class bar:
import fu
fu
''', m.UndefinedName)
def test_usedInFunction(self):
self.flakes('''
import fu
def fun():
print(fu)
''')
def test_shadowedByParameter(self):
self.flakes('''
import fu
def fun(fu):
print(fu)
''', m.UnusedImport, m.RedefinedWhileUnused)
self.flakes('''
import fu
def fun(fu):
print(fu)
print(fu)
''')
def test_newAssignment(self):
self.flakes('fu = None')
def test_usedInGetattr(self):
self.flakes('import fu; fu.bar.baz')
self.flakes('import fu; "bar".fu.baz', m.UnusedImport)
def test_usedInSlice(self):
self.flakes('import fu; print(fu.bar[1:])')
def test_usedInIfBody(self):
self.flakes('''
import fu
if True: print(fu)
''')
def test_usedInIfConditional(self):
self.flakes('''
import fu
if fu: pass
''')
def test_usedInElifConditional(self):
self.flakes('''
import fu
if False: pass
elif fu: pass
''')
def test_usedInElse(self):
self.flakes('''
import fu
if False: pass
else: print(fu)
''')
def test_usedInCall(self):
self.flakes('import fu; fu.bar()')
def test_usedInClass(self):
self.flakes('''
import fu
class bar:
bar = fu
''')
def test_usedInClassBase(self):
self.flakes('''
import fu
class bar(object, fu.baz):
pass
''')
def test_notUsedInNestedScope(self):
self.flakes('''
import fu
def bleh():
pass
print(fu)
''')
def test_usedInFor(self):
self.flakes('''
import fu
for bar in range(9):
print(fu)
''')
def test_usedInForElse(self):
self.flakes('''
import fu
for bar in range(10):
pass
else:
print(fu)
''')
def test_redefinedByFor(self):
self.flakes('''
import fu
for fu in range(2):
pass
''', m.ImportShadowedByLoopVar)
def test_shadowedByFor(self):
"""
Test that shadowing a global name with a for loop variable generates a
warning.
"""
self.flakes('''
import fu
fu.bar()
for fu in ():
pass
''', m.ImportShadowedByLoopVar)
def test_shadowedByForDeep(self):
"""
Test that shadowing a global name with a for loop variable nested in a
tuple unpack generates a warning.
"""
self.flakes('''
import fu
fu.bar()
for (x, y, z, (a, b, c, (fu,))) in ():
pass
''', m.ImportShadowedByLoopVar)
# Same with a list instead of a tuple
self.flakes('''
import fu
fu.bar()
for [x, y, z, (a, b, c, (fu,))] in ():
pass
''', m.ImportShadowedByLoopVar)
def test_usedInReturn(self):
self.flakes('''
import fu
def fun():
return fu
''')
def test_usedInOperators(self):
self.flakes('import fu; 3 + fu.bar')
self.flakes('import fu; 3 % fu.bar')
self.flakes('import fu; 3 - fu.bar')
self.flakes('import fu; 3 * fu.bar')
self.flakes('import fu; 3 ** fu.bar')
self.flakes('import fu; 3 / fu.bar')
self.flakes('import fu; 3 // fu.bar')
self.flakes('import fu; -fu.bar')
self.flakes('import fu; ~fu.bar')
self.flakes('import fu; 1 == fu.bar')
self.flakes('import fu; 1 | fu.bar')
self.flakes('import fu; 1 & fu.bar')
self.flakes('import fu; 1 ^ fu.bar')
self.flakes('import fu; 1 >> fu.bar')
self.flakes('import fu; 1 << fu.bar')
def test_usedInAssert(self):
self.flakes('import fu; assert fu.bar')
def test_usedInSubscript(self):
self.flakes('import fu; fu.bar[1]')
def test_usedInLogic(self):
self.flakes('import fu; fu and False')
self.flakes('import fu; fu or False')
self.flakes('import fu; not fu.bar')
def test_usedInList(self):
self.flakes('import fu; [fu]')
def test_usedInTuple(self):
self.flakes('import fu; (fu,)')
def test_usedInTry(self):
self.flakes('''
import fu
try: fu
except: pass
''')
def test_usedInExcept(self):
self.flakes('''
import fu
try: fu
except: pass
''')
def test_redefinedByExcept(self):
expected = [m.RedefinedWhileUnused]
if version_info >= (3,):
# The exc variable is unused inside the exception handler.
expected.append(m.UnusedVariable)
self.flakes('''
import fu
try: pass
except Exception as fu: pass
''', *expected)
def test_usedInRaise(self):
self.flakes('''
import fu
raise fu.bar
''')
def test_usedInYield(self):
self.flakes('''
import fu
def gen():
yield fu
''')
def test_usedInDict(self):
self.flakes('import fu; {fu:None}')
self.flakes('import fu; {1:fu}')
def test_usedInParameterDefault(self):
self.flakes('''
import fu
def f(bar=fu):
pass
''')
def test_usedInAttributeAssign(self):
self.flakes('import fu; fu.bar = 1')
def test_usedInKeywordArg(self):
self.flakes('import fu; fu.bar(stuff=fu)')
def test_usedInAssignment(self):
self.flakes('import fu; bar=fu')
self.flakes('import fu; n=0; n+=fu')
def test_usedInListComp(self):
self.flakes('import fu; [fu for _ in range(1)]')
self.flakes('import fu; [1 for _ in range(1) if fu]')
@skipIf(version_info >= (3,),
'in Python 3 list comprehensions execute in a separate scope')
def test_redefinedByListComp(self):
self.flakes('import fu; [1 for fu in range(1)]',
m.RedefinedInListComp)
def test_usedInTryFinally(self):
self.flakes('''
import fu
try: pass
finally: fu
''')
self.flakes('''
import fu
try: fu
finally: pass
''')
def test_usedInWhile(self):
self.flakes('''
import fu
while 0:
fu
''')
self.flakes('''
import fu
while fu: pass
''')
def test_usedInGlobal(self):
"""
A 'global' statement shadowing an unused import should not prevent it
from being reported.
"""
self.flakes('''
import fu
def f(): global fu
''', m.UnusedImport)
def test_usedAndGlobal(self):
"""
A 'global' statement shadowing a used import should not cause it to be
reported as unused.
"""
self.flakes('''
import foo
def f(): global foo
def g(): foo.is_used()
''')
def test_assignedToGlobal(self):
"""
Binding an import to a declared global should not cause it to be
reported as unused.
"""
self.flakes('''
def f(): global foo; import foo
def g(): foo.is_used()
''')
@skipIf(version_info >= (3,), 'deprecated syntax')
def test_usedInBackquote(self):
self.flakes('import fu; `fu`')
def test_usedInExec(self):
if version_info < (3,):
exec_stmt = 'exec "print 1" in fu.bar'
else:
exec_stmt = 'exec("print(1)", fu.bar)'
self.flakes('import fu; %s' % exec_stmt)
def test_usedInLambda(self):
self.flakes('import fu; lambda: fu')
def test_shadowedByLambda(self):
self.flakes('import fu; lambda fu: fu',
m.UnusedImport, m.RedefinedWhileUnused)
self.flakes('import fu; lambda fu: fu\nfu()')
def test_usedInSliceObj(self):
self.flakes('import fu; "meow"[::fu]')
def test_unusedInNestedScope(self):
self.flakes('''
def bar():
import fu
fu
''', m.UnusedImport, m.UndefinedName)
def test_methodsDontUseClassScope(self):
self.flakes('''
class bar:
import fu
def fun(self):
fu
''', m.UndefinedName)
def test_nestedFunctionsNestScope(self):
self.flakes('''
def a():
def b():
fu
import fu
''')
def test_nestedClassAndFunctionScope(self):
self.flakes('''
def a():
import fu
class b:
def c(self):
print(fu)
''')
def test_importStar(self):
"""Use of import * at module level is reported."""
self.flakes('from fu import *', m.ImportStarUsed, m.UnusedImport)
self.flakes('''
try:
from fu import *
except:
pass
''', m.ImportStarUsed, m.UnusedImport)
checker = self.flakes('from fu import *',
m.ImportStarUsed, m.UnusedImport)
error = checker.messages[0]
assert error.message.startswith("'from %s import *' used; unable ")
assert error.message_args == ('fu', )
error = checker.messages[1]
assert error.message == '%r imported but unused'
assert error.message_args == ('fu.*', )
def test_importStar_relative(self):
"""Use of import * from a relative import is reported."""
self.flakes('from .fu import *', m.ImportStarUsed, m.UnusedImport)
self.flakes('''
try:
from .fu import *
except:
pass
''', m.ImportStarUsed, m.UnusedImport)
checker = self.flakes('from .fu import *',
m.ImportStarUsed, m.UnusedImport)
error = checker.messages[0]
assert error.message.startswith("'from %s import *' used; unable ")
assert error.message_args == ('.fu', )
error = checker.messages[1]
assert error.message == '%r imported but unused'
assert error.message_args == ('.fu.*', )
checker = self.flakes('from .. import *',
m.ImportStarUsed, m.UnusedImport)
error = checker.messages[0]
assert error.message.startswith("'from %s import *' used; unable ")
assert error.message_args == ('..', )
error = checker.messages[1]
assert error.message == '%r imported but unused'
assert error.message_args == ('from .. import *', )
@skipIf(version_info < (3,),
'import * below module level is a warning on Python 2')
def test_localImportStar(self):
"""import * is only allowed at module level."""
self.flakes('''
def a():
from fu import *
''', m.ImportStarNotPermitted)
self.flakes('''
class a:
from fu import *
''', m.ImportStarNotPermitted)
checker = self.flakes('''
class a:
from .. import *
''', m.ImportStarNotPermitted)
error = checker.messages[0]
assert error.message == "'from %s import *' only allowed at module level"
assert error.message_args == ('..', )
@skipIf(version_info > (3,),
'import * below module level is an error on Python 3')
def test_importStarNested(self):
"""All star imports are marked as used by an undefined variable."""
self.flakes('''
from fu import *
def f():
from bar import *
x
''', m.ImportStarUsed, m.ImportStarUsed, m.ImportStarUsage)
def test_packageImport(self):
"""
If a dotted name is imported and used, no warning is reported.
"""
self.flakes('''
import fu.bar
fu.bar
''')
def test_unusedPackageImport(self):
"""
If a dotted name is imported and not used, an unused import warning is
reported.
"""
self.flakes('import fu.bar', m.UnusedImport)
def test_duplicateSubmoduleImport(self):
"""
If a submodule of a package is imported twice, an unused import warning
and a redefined while unused warning are reported.
"""
self.flakes('''
import fu.bar, fu.bar
fu.bar
''', m.RedefinedWhileUnused)
self.flakes('''
import fu.bar
import fu.bar
fu.bar
''', m.RedefinedWhileUnused)
def test_differentSubmoduleImport(self):
"""
If two different submodules of a package are imported, no duplicate
import warning is reported for the package.
"""
self.flakes('''
import fu.bar, fu.baz
fu.bar, fu.baz
''')
self.flakes('''
import fu.bar
import fu.baz
fu.bar, fu.baz
''')
def test_used_package_with_submodule_import(self):
"""
Usage of package marks submodule imports as used.
"""
self.flakes('''
import fu
import fu.bar
fu.x
''')
self.flakes('''
import fu.bar
import fu
fu.x
''')
def test_used_package_with_submodule_import_of_alias(self):
"""
Usage of package by alias marks submodule imports as used.
"""
self.flakes('''
import foo as f
import foo.bar
f.bar.do_something()
''')
self.flakes('''
import foo as f
import foo.bar.blah
f.bar.blah.do_something()
''')
def test_unused_package_with_submodule_import(self):
"""
When a package and its submodule are imported, only report once.
"""
checker = self.flakes('''
import fu
import fu.bar
''', m.UnusedImport)
error = checker.messages[0]
assert error.message == '%r imported but unused'
assert error.message_args == ('fu.bar', )
assert error.lineno == 5 if self.withDoctest else 3
def test_assignRHSFirst(self):
self.flakes('import fu; fu = fu')
self.flakes('import fu; fu, bar = fu')
self.flakes('import fu; [fu, bar] = fu')
self.flakes('import fu; fu += fu')
def test_tryingMultipleImports(self):
self.flakes('''
try:
import fu
except ImportError:
import bar as fu
fu
''')
def test_nonGlobalDoesNotRedefine(self):
self.flakes('''
import fu
def a():
fu = 3
return fu
fu
''')
def test_functionsRunLater(self):
self.flakes('''
def a():
fu
import fu
''')
def test_functionNamesAreBoundNow(self):
self.flakes('''
import fu
def fu():
fu
fu
''', m.RedefinedWhileUnused)
def test_ignoreNonImportRedefinitions(self):
self.flakes('a = 1; a = 2')
@skip("todo")
def test_importingForImportError(self):
self.flakes('''
try:
import fu
except ImportError:
pass
''')
def test_importedInClass(self):
"""Imports in class scope can be used through self."""
self.flakes('''
class c:
import i
def __init__(self):
self.i
''')
def test_importUsedInMethodDefinition(self):
"""
Method named 'foo' with default args referring to module named 'foo'.
"""
self.flakes('''
import foo
class Thing(object):
def foo(self, parser=foo.parse_foo):
pass
''')
def test_futureImport(self):
"""__future__ is special."""
self.flakes('from __future__ import division')
self.flakes('''
"docstring is allowed before future import"
from __future__ import division
''')
def test_futureImportFirst(self):
"""
__future__ imports must come before anything else.
"""
self.flakes('''
x = 5
from __future__ import division
''', m.LateFutureImport)
self.flakes('''
from foo import bar
from __future__ import division
bar
''', m.LateFutureImport)
def test_futureImportUsed(self):
"""__future__ is special, but names are injected in the namespace."""
self.flakes('''
from __future__ import division
from __future__ import print_function
assert print_function is not division
''')
def test_futureImportUndefined(self):
"""Importing undefined names from __future__ fails."""
self.flakes('''
from __future__ import print_statement
''', m.FutureFeatureNotDefined)
def test_futureImportStar(self):
"""Importing '*' from __future__ fails."""
self.flakes('''
from __future__ import *
''', m.FutureFeatureNotDefined)
class TestSpecialAll(TestCase):
"""
Tests for suppression of unused import warnings by C{__all__}.
"""
def test_ignoredInFunction(self):
"""
An C{__all__} definition does not suppress unused import warnings in a
function scope.
"""
self.flakes('''
def foo():
import bar
__all__ = ["bar"]
''', m.UnusedImport, m.UnusedVariable)
def test_ignoredInClass(self):
"""
An C{__all__} definition in a class does not suppress unused import warnings.
"""
self.flakes('''
import bar
class foo:
__all__ = ["bar"]
''', m.UnusedImport)
def test_warningSuppressed(self):
"""
If a name is imported and unused but is named in C{__all__}, no warning
is reported.
"""
self.flakes('''
import foo
__all__ = ["foo"]
''')
self.flakes('''
import foo
__all__ = ("foo",)
''')
def test_augmentedAssignment(self):
"""
The C{__all__} variable is defined incrementally.
"""
self.flakes('''
import a
import c
__all__ = ['a']
__all__ += ['b']
if 1 < 3:
__all__ += ['c', 'd']
''', m.UndefinedExport, m.UndefinedExport)
def test_list_concatenation_assignment(self):
"""
The C{__all__} variable is defined through list concatenation.
"""
self.flakes('''
import sys
__all__ = ['a'] + ['b'] + ['c']
''', m.UndefinedExport, m.UndefinedExport, m.UndefinedExport, m.UnusedImport)
def test_tuple_concatenation_assignment(self):
"""
The C{__all__} variable is defined through tuple concatenation.
"""
self.flakes('''
import sys
__all__ = ('a',) + ('b',) + ('c',)
''', m.UndefinedExport, m.UndefinedExport, m.UndefinedExport, m.UnusedImport)
def test_all_with_attributes(self):
self.flakes('''
from foo import bar
__all__ = [bar.__name__]
''')
def test_all_with_names(self):
# not actually valid, but shouldn't produce a crash
self.flakes('''
from foo import bar
__all__ = [bar]
''')
def test_all_with_attributes_added(self):
self.flakes('''
from foo import bar
from bar import baz
__all__ = [bar.__name__] + [baz.__name__]
''')
def test_all_mixed_attributes_and_strings(self):
self.flakes('''
from foo import bar
from foo import baz
__all__ = ['bar', baz.__name__]
''')
def test_unboundExported(self):
"""
If C{__all__} includes a name which is not bound, a warning is emitted.
"""
self.flakes('''
__all__ = ["foo"]
''', m.UndefinedExport)
# Skip this in __init__.py though, since the rules there are a little
# different.
for filename in ["foo/__init__.py", "__init__.py"]:
self.flakes('''
__all__ = ["foo"]
''', filename=filename)
def test_importStarExported(self):
"""
Report undefined if import * is used
"""
self.flakes('''
from math import *
__all__ = ['sin', 'cos']
csc(1)
''', m.ImportStarUsed, m.ImportStarUsage, m.ImportStarUsage, m.ImportStarUsage)
def test_importStarNotExported(self):
"""Report unused import when not needed to satisfy __all__."""
self.flakes('''
from foolib import *
a = 1
__all__ = ['a']
''', m.ImportStarUsed, m.UnusedImport)
def test_usedInGenExp(self):
"""
Using a global in a generator expression results in no warnings.
"""
self.flakes('import fu; (fu for _ in range(1))')
self.flakes('import fu; (1 for _ in range(1) if fu)')
def test_redefinedByGenExp(self):
"""
Re-using a global name as the loop variable for a generator
expression results in a redefinition warning.
"""
self.flakes('import fu; (1 for fu in range(1))',
m.RedefinedWhileUnused, m.UnusedImport)
def test_usedAsDecorator(self):
"""
Using a global name in a decorator statement results in no warnings,
but using an undefined name in a decorator statement results in an
undefined name warning.
"""
self.flakes('''
from interior import decorate
@decorate
def f():
return "hello"
''')
self.flakes('''
from interior import decorate
@decorate('value')
def f():
return "hello"
''')
self.flakes('''
@decorate
def f():
return "hello"
''', m.UndefinedName)
def test_usedAsClassDecorator(self):
"""
Using an imported name as a class decorator results in no warnings,
but using an undefined name as a class decorator results in an
undefined name warning.
"""
self.flakes('''
from interior import decorate
@decorate
class foo:
pass
''')
self.flakes('''
from interior import decorate
@decorate("foo")
class bar:
pass
''')
self.flakes('''
@decorate
class foo:
pass
''', m.UndefinedName)
| 34,599 | Python | .py | 1,059 | 23.443815 | 87 | 0.552009 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,316 | test_match.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_match.py | from sys import version_info
from pyflakes.test.harness import TestCase, skipIf
@skipIf(version_info < (3, 10), "Python >= 3.10 only")
class TestMatch(TestCase):
def test_match_bindings(self):
self.flakes('''
def f():
x = 1
match x:
case 1 as y:
print(f'matched as {y}')
''')
self.flakes('''
def f():
x = [1, 2, 3]
match x:
case [1, y, 3]:
print(f'matched {y}')
''')
self.flakes('''
def f():
x = {'foo': 1}
match x:
case {'foo': y}:
print(f'matched {y}')
''')
def test_match_pattern_matched_class(self):
self.flakes('''
from a import B
match 1:
case B(x=1) as y:
print(f'matched {y}')
''')
self.flakes('''
from a import B
match 1:
case B(a, x=z) as y:
print(f'matched {y} {a} {z}')
''')
def test_match_placeholder(self):
self.flakes('''
def f():
match 1:
case _:
print('catchall!')
''')
def test_match_singleton(self):
self.flakes('''
match 1:
case True:
print('true')
''')
def test_match_or_pattern(self):
self.flakes('''
match 1:
case 1 | 2:
print('one or two')
''')
def test_match_star(self):
self.flakes('''
x = [1, 2, 3]
match x:
case [1, *y]:
print(f'captured: {y}')
''')
def test_match_double_star(self):
self.flakes('''
x = {'foo': 'bar', 'baz': 'womp'}
match x:
case {'foo': k1, **rest}:
print(f'{k1=} {rest=}')
''')
| 2,097 | Python | .py | 72 | 16.027778 | 54 | 0.372393 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,317 | test_other.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_other.py | """
Tests for various Pyflakes behavior.
"""
from sys import version_info
from pyflakes import messages as m
from pyflakes.test.harness import TestCase, skip, skipIf
class Test(TestCase):
def test_duplicateArgs(self):
self.flakes('def fu(bar, bar): pass', m.DuplicateArgument)
def test_localReferencedBeforeAssignment(self):
self.flakes('''
a = 1
def f():
a; a=1
f()
''', m.UndefinedLocal, m.UnusedVariable)
@skipIf(version_info >= (3,),
'in Python 3 list comprehensions execute in a separate scope')
def test_redefinedInListComp(self):
"""
Test that shadowing a variable in a list comprehension raises
a warning.
"""
self.flakes('''
a = 1
[1 for a, b in [(1, 2)]]
''', m.RedefinedInListComp)
self.flakes('''
class A:
a = 1
[1 for a, b in [(1, 2)]]
''', m.RedefinedInListComp)
self.flakes('''
def f():
a = 1
[1 for a, b in [(1, 2)]]
''', m.RedefinedInListComp)
self.flakes('''
[1 for a, b in [(1, 2)]]
[1 for a, b in [(1, 2)]]
''')
self.flakes('''
for a, b in [(1, 2)]:
pass
[1 for a, b in [(1, 2)]]
''')
def test_redefinedInGenerator(self):
"""
Test that reusing a variable in a generator does not raise
a warning.
"""
self.flakes('''
a = 1
(1 for a, b in [(1, 2)])
''')
self.flakes('''
class A:
a = 1
list(1 for a, b in [(1, 2)])
''')
self.flakes('''
def f():
a = 1
(1 for a, b in [(1, 2)])
''', m.UnusedVariable)
self.flakes('''
(1 for a, b in [(1, 2)])
(1 for a, b in [(1, 2)])
''')
self.flakes('''
for a, b in [(1, 2)]:
pass
(1 for a, b in [(1, 2)])
''')
def test_redefinedInSetComprehension(self):
"""
Test that reusing a variable in a set comprehension does not raise
a warning.
"""
self.flakes('''
a = 1
{1 for a, b in [(1, 2)]}
''')
self.flakes('''
class A:
a = 1
{1 for a, b in [(1, 2)]}
''')
self.flakes('''
def f():
a = 1
{1 for a, b in [(1, 2)]}
''', m.UnusedVariable)
self.flakes('''
{1 for a, b in [(1, 2)]}
{1 for a, b in [(1, 2)]}
''')
self.flakes('''
for a, b in [(1, 2)]:
pass
{1 for a, b in [(1, 2)]}
''')
def test_redefinedInDictComprehension(self):
"""
Test that reusing a variable in a dict comprehension does not raise
a warning.
"""
self.flakes('''
a = 1
{1: 42 for a, b in [(1, 2)]}
''')
self.flakes('''
class A:
a = 1
{1: 42 for a, b in [(1, 2)]}
''')
self.flakes('''
def f():
a = 1
{1: 42 for a, b in [(1, 2)]}
''', m.UnusedVariable)
self.flakes('''
{1: 42 for a, b in [(1, 2)]}
{1: 42 for a, b in [(1, 2)]}
''')
self.flakes('''
for a, b in [(1, 2)]:
pass
{1: 42 for a, b in [(1, 2)]}
''')
def test_redefinedFunction(self):
"""
Test that shadowing a function definition with another one raises a
warning.
"""
self.flakes('''
def a(): pass
def a(): pass
''', m.RedefinedWhileUnused)
def test_redefinedUnderscoreFunction(self):
"""
Test that shadowing a function definition named with underscore doesn't
raise anything.
"""
self.flakes('''
def _(): pass
def _(): pass
''')
def test_redefinedUnderscoreImportation(self):
"""
Test that shadowing an underscore importation raises a warning.
"""
self.flakes('''
from .i18n import _
def _(): pass
''', m.RedefinedWhileUnused)
def test_redefinedClassFunction(self):
"""
Test that shadowing a function definition in a class suite with another
one raises a warning.
"""
self.flakes('''
class A:
def a(): pass
def a(): pass
''', m.RedefinedWhileUnused)
def test_redefinedIfElseFunction(self):
"""
Test that shadowing a function definition twice in an if
and else block does not raise a warning.
"""
self.flakes('''
if True:
def a(): pass
else:
def a(): pass
''')
def test_redefinedIfFunction(self):
"""
Test that shadowing a function definition within an if block
raises a warning.
"""
self.flakes('''
if True:
def a(): pass
def a(): pass
''', m.RedefinedWhileUnused)
def test_redefinedTryExceptFunction(self):
"""
Test that shadowing a function definition twice in try
and except block does not raise a warning.
"""
self.flakes('''
try:
def a(): pass
except:
def a(): pass
''')
def test_redefinedTryFunction(self):
"""
Test that shadowing a function definition within a try block
raises a warning.
"""
self.flakes('''
try:
def a(): pass
def a(): pass
except:
pass
''', m.RedefinedWhileUnused)
def test_redefinedIfElseInListComp(self):
"""
Test that shadowing a variable in a list comprehension in
an if and else block does not raise a warning.
"""
self.flakes('''
if False:
a = 1
else:
[a for a in '12']
''')
@skipIf(version_info >= (3,),
'in Python 3 list comprehensions execute in a separate scope')
def test_redefinedElseInListComp(self):
"""
Test that shadowing a variable in a list comprehension in
an else (or if) block raises a warning.
"""
self.flakes('''
if False:
pass
else:
a = 1
[a for a in '12']
''', m.RedefinedInListComp)
def test_functionDecorator(self):
"""
Test that shadowing a function definition with a decorated version of
that function does not raise a warning.
"""
self.flakes('''
from somewhere import somedecorator
def a(): pass
a = somedecorator(a)
''')
def test_classFunctionDecorator(self):
"""
Test that shadowing a function definition in a class suite with a
decorated version of that function does not raise a warning.
"""
self.flakes('''
class A:
def a(): pass
a = classmethod(a)
''')
def test_modernProperty(self):
self.flakes("""
class A:
@property
def t(self):
pass
@t.setter
def t(self, value):
pass
@t.deleter
def t(self):
pass
""")
def test_unaryPlus(self):
"""Don't die on unary +."""
self.flakes('+1')
def test_undefinedBaseClass(self):
"""
If a name in the base list of a class definition is undefined, a
warning is emitted.
"""
self.flakes('''
class foo(foo):
pass
''', m.UndefinedName)
def test_classNameUndefinedInClassBody(self):
"""
If a class name is used in the body of that class's definition and
the name is not already defined, a warning is emitted.
"""
self.flakes('''
class foo:
foo
''', m.UndefinedName)
def test_classNameDefinedPreviously(self):
"""
If a class name is used in the body of that class's definition and
the name was previously defined in some other way, no warning is
emitted.
"""
self.flakes('''
foo = None
class foo:
foo
''')
def test_classRedefinition(self):
"""
If a class is defined twice in the same module, a warning is emitted.
"""
self.flakes('''
class Foo:
pass
class Foo:
pass
''', m.RedefinedWhileUnused)
def test_functionRedefinedAsClass(self):
"""
If a function is redefined as a class, a warning is emitted.
"""
self.flakes('''
def Foo():
pass
class Foo:
pass
''', m.RedefinedWhileUnused)
def test_classRedefinedAsFunction(self):
"""
If a class is redefined as a function, a warning is emitted.
"""
self.flakes('''
class Foo:
pass
def Foo():
pass
''', m.RedefinedWhileUnused)
def test_classWithReturn(self):
"""
If a return is used inside a class, a warning is emitted.
"""
self.flakes('''
class Foo(object):
return
''', m.ReturnOutsideFunction)
def test_moduleWithReturn(self):
"""
If a return is used at the module level, a warning is emitted.
"""
self.flakes('''
return
''', m.ReturnOutsideFunction)
def test_classWithYield(self):
"""
If a yield is used inside a class, a warning is emitted.
"""
self.flakes('''
class Foo(object):
yield
''', m.YieldOutsideFunction)
def test_moduleWithYield(self):
"""
If a yield is used at the module level, a warning is emitted.
"""
self.flakes('''
yield
''', m.YieldOutsideFunction)
@skipIf(version_info < (3, 3), "Python >= 3.3 only")
def test_classWithYieldFrom(self):
"""
If a yield from is used inside a class, a warning is emitted.
"""
self.flakes('''
class Foo(object):
yield from range(10)
''', m.YieldOutsideFunction)
@skipIf(version_info < (3, 3), "Python >= 3.3 only")
def test_moduleWithYieldFrom(self):
"""
If a yield from is used at the module level, a warning is emitted.
"""
self.flakes('''
yield from range(10)
''', m.YieldOutsideFunction)
def test_continueOutsideLoop(self):
self.flakes('''
continue
''', m.ContinueOutsideLoop)
self.flakes('''
def f():
continue
''', m.ContinueOutsideLoop)
self.flakes('''
while True:
pass
else:
continue
''', m.ContinueOutsideLoop)
self.flakes('''
while True:
pass
else:
if 1:
if 2:
continue
''', m.ContinueOutsideLoop)
self.flakes('''
while True:
def f():
continue
''', m.ContinueOutsideLoop)
self.flakes('''
while True:
class A:
continue
''', m.ContinueOutsideLoop)
def test_continueInsideLoop(self):
self.flakes('''
while True:
continue
''')
self.flakes('''
for i in range(10):
continue
''')
self.flakes('''
while True:
if 1:
continue
''')
self.flakes('''
for i in range(10):
if 1:
continue
''')
self.flakes('''
while True:
while True:
pass
else:
continue
else:
pass
''')
self.flakes('''
while True:
try:
pass
finally:
while True:
continue
''')
@skipIf(version_info > (3, 8), "Python <= 3.8 only")
def test_continueInFinally(self):
# 'continue' inside 'finally' is a special syntax error
# that is removed in 3.8
self.flakes('''
while True:
try:
pass
finally:
continue
''', m.ContinueInFinally)
self.flakes('''
while True:
try:
pass
finally:
if 1:
if 2:
continue
''', m.ContinueInFinally)
# Even when not in a loop, this is the error Python gives
self.flakes('''
try:
pass
finally:
continue
''', m.ContinueInFinally)
def test_breakOutsideLoop(self):
self.flakes('''
break
''', m.BreakOutsideLoop)
self.flakes('''
def f():
break
''', m.BreakOutsideLoop)
self.flakes('''
while True:
pass
else:
break
''', m.BreakOutsideLoop)
self.flakes('''
while True:
pass
else:
if 1:
if 2:
break
''', m.BreakOutsideLoop)
self.flakes('''
while True:
def f():
break
''', m.BreakOutsideLoop)
self.flakes('''
while True:
class A:
break
''', m.BreakOutsideLoop)
self.flakes('''
try:
pass
finally:
break
''', m.BreakOutsideLoop)
def test_breakInsideLoop(self):
self.flakes('''
while True:
break
''')
self.flakes('''
for i in range(10):
break
''')
self.flakes('''
while True:
if 1:
break
''')
self.flakes('''
for i in range(10):
if 1:
break
''')
self.flakes('''
while True:
while True:
pass
else:
break
else:
pass
''')
self.flakes('''
while True:
try:
pass
finally:
while True:
break
''')
self.flakes('''
while True:
try:
pass
finally:
break
''')
self.flakes('''
while True:
try:
pass
finally:
if 1:
if 2:
break
''')
def test_defaultExceptLast(self):
"""
A default except block should be last.
YES:
try:
...
except Exception:
...
except:
...
NO:
try:
...
except:
...
except Exception:
...
"""
self.flakes('''
try:
pass
except ValueError:
pass
''')
self.flakes('''
try:
pass
except ValueError:
pass
except:
pass
''')
self.flakes('''
try:
pass
except:
pass
''')
self.flakes('''
try:
pass
except ValueError:
pass
else:
pass
''')
self.flakes('''
try:
pass
except:
pass
else:
pass
''')
self.flakes('''
try:
pass
except ValueError:
pass
except:
pass
else:
pass
''')
def test_defaultExceptNotLast(self):
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
except ValueError:
pass
''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
else:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except:
pass
else:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
else:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
except ValueError:
pass
else:
pass
''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
finally:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except:
pass
finally:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
finally:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
except ValueError:
pass
finally:
pass
''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
else:
pass
finally:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except:
pass
else:
pass
finally:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
else:
pass
finally:
pass
''', m.DefaultExceptNotLast)
self.flakes('''
try:
pass
except:
pass
except ValueError:
pass
except:
pass
except ValueError:
pass
else:
pass
finally:
pass
''', m.DefaultExceptNotLast, m.DefaultExceptNotLast)
@skipIf(version_info < (3,), "Python 3 only")
def test_starredAssignmentNoError(self):
"""
Python 3 extended iterable unpacking
"""
self.flakes('''
a, *b = range(10)
''')
self.flakes('''
*a, b = range(10)
''')
self.flakes('''
a, *b, c = range(10)
''')
self.flakes('''
(a, *b) = range(10)
''')
self.flakes('''
(*a, b) = range(10)
''')
self.flakes('''
(a, *b, c) = range(10)
''')
self.flakes('''
[a, *b] = range(10)
''')
self.flakes('''
[*a, b] = range(10)
''')
self.flakes('''
[a, *b, c] = range(10)
''')
# Taken from test_unpack_ex.py in the cPython source
s = ", ".join("a%d" % i for i in range(1 << 8 - 1)) + \
", *rest = range(1<<8)"
self.flakes(s)
s = "(" + ", ".join("a%d" % i for i in range(1 << 8 - 1)) + \
", *rest) = range(1<<8)"
self.flakes(s)
s = "[" + ", ".join("a%d" % i for i in range(1 << 8 - 1)) + \
", *rest] = range(1<<8)"
self.flakes(s)
@skipIf(version_info < (3, ), "Python 3 only")
def test_starredAssignmentErrors(self):
"""
SyntaxErrors (not encoded in the ast) surrounding Python 3 extended
iterable unpacking
"""
# Taken from test_unpack_ex.py in the cPython source
s = ", ".join("a%d" % i for i in range(1 << 8)) + \
", *rest = range(1<<8 + 1)"
self.flakes(s, m.TooManyExpressionsInStarredAssignment)
s = "(" + ", ".join("a%d" % i for i in range(1 << 8)) + \
", *rest) = range(1<<8 + 1)"
self.flakes(s, m.TooManyExpressionsInStarredAssignment)
s = "[" + ", ".join("a%d" % i for i in range(1 << 8)) + \
", *rest] = range(1<<8 + 1)"
self.flakes(s, m.TooManyExpressionsInStarredAssignment)
s = ", ".join("a%d" % i for i in range(1 << 8 + 1)) + \
", *rest = range(1<<8 + 2)"
self.flakes(s, m.TooManyExpressionsInStarredAssignment)
s = "(" + ", ".join("a%d" % i for i in range(1 << 8 + 1)) + \
", *rest) = range(1<<8 + 2)"
self.flakes(s, m.TooManyExpressionsInStarredAssignment)
s = "[" + ", ".join("a%d" % i for i in range(1 << 8 + 1)) + \
", *rest] = range(1<<8 + 2)"
self.flakes(s, m.TooManyExpressionsInStarredAssignment)
# No way we can actually test this!
# s = "*rest, " + ", ".join("a%d" % i for i in range(1<<24)) + \
# ", *rest = range(1<<24 + 1)"
# self.flakes(s, m.TooManyExpressionsInStarredAssignment)
self.flakes('''
a, *b, *c = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
a, *b, c, *d = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
*a, *b, *c = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
(a, *b, *c) = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
(a, *b, c, *d) = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
(*a, *b, *c) = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
[a, *b, *c] = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
[a, *b, c, *d] = range(10)
''', m.TwoStarredExpressions)
self.flakes('''
[*a, *b, *c] = range(10)
''', m.TwoStarredExpressions)
@skip("todo: Too hard to make this warn but other cases stay silent")
def test_doubleAssignment(self):
"""
If a variable is re-assigned to without being used, no warning is
emitted.
"""
self.flakes('''
x = 10
x = 20
''', m.RedefinedWhileUnused)
def test_doubleAssignmentConditionally(self):
"""
If a variable is re-assigned within a conditional, no warning is
emitted.
"""
self.flakes('''
x = 10
if True:
x = 20
''')
def test_doubleAssignmentWithUse(self):
"""
If a variable is re-assigned to after being used, no warning is
emitted.
"""
self.flakes('''
x = 10
y = x * 2
x = 20
''')
def test_comparison(self):
"""
If a defined name is used on either side of any of the six comparison
operators, no warning is emitted.
"""
self.flakes('''
x = 10
y = 20
x < y
x <= y
x == y
x != y
x >= y
x > y
''')
def test_identity(self):
"""
If a defined name is used on either side of an identity test, no
warning is emitted.
"""
self.flakes('''
x = 10
y = 20
x is y
x is not y
''')
def test_containment(self):
"""
If a defined name is used on either side of a containment test, no
warning is emitted.
"""
self.flakes('''
x = 10
y = 20
x in y
x not in y
''')
def test_loopControl(self):
"""
break and continue statements are supported.
"""
self.flakes('''
for x in [1, 2]:
break
''')
self.flakes('''
for x in [1, 2]:
continue
''')
def test_ellipsis(self):
"""
Ellipsis in a slice is supported.
"""
self.flakes('''
[1, 2][...]
''')
def test_extendedSlice(self):
"""
Extended slices are supported.
"""
self.flakes('''
x = 3
[1, 2][x,:]
''')
def test_varAugmentedAssignment(self):
"""
Augmented assignment of a variable is supported.
We don't care about var refs.
"""
self.flakes('''
foo = 0
foo += 1
''')
def test_attrAugmentedAssignment(self):
"""
Augmented assignment of attributes is supported.
We don't care about attr refs.
"""
self.flakes('''
foo = None
foo.bar += foo.baz
''')
def test_globalDeclaredInDifferentScope(self):
"""
A 'global' can be declared in one scope and reused in another.
"""
self.flakes('''
def f(): global foo
def g(): foo = 'anything'; foo.is_used()
''')
def test_function_arguments(self):
"""
Test to traverse ARG and ARGUMENT handler
"""
self.flakes('''
def foo(a, b):
pass
''')
self.flakes('''
def foo(a, b, c=0):
pass
''')
self.flakes('''
def foo(a, b, c=0, *args):
pass
''')
self.flakes('''
def foo(a, b, c=0, *args, **kwargs):
pass
''')
@skipIf(version_info < (3, 3), "Python >= 3.3 only")
def test_function_arguments_python3(self):
self.flakes('''
def foo(a, b, c=0, *args, d=0, **kwargs):
pass
''')
class TestUnusedAssignment(TestCase):
"""
Tests for warning about unused assignments.
"""
def test_unusedVariable(self):
"""
Warn when a variable in a function is assigned a value that's never
used.
"""
self.flakes('''
def a():
b = 1
''', m.UnusedVariable)
def test_unusedUnderscoreVariable(self):
"""
Don't warn when the magic "_" (underscore) variable is unused.
See issue #202.
"""
self.flakes('''
def a(unused_param):
_ = unused_param
''')
def test_unusedVariableAsLocals(self):
"""
Using locals() it is perfectly valid to have unused variables
"""
self.flakes('''
def a():
b = 1
return locals()
''')
def test_unusedVariableNoLocals(self):
"""
Using locals() in wrong scope should not matter
"""
self.flakes('''
def a():
locals()
def a():
b = 1
return
''', m.UnusedVariable)
@skip("todo: Difficult because it doesn't apply in the context of a loop")
def test_unusedReassignedVariable(self):
"""
Shadowing a used variable can still raise an UnusedVariable warning.
"""
self.flakes('''
def a():
b = 1
b.foo()
b = 2
''', m.UnusedVariable)
def test_variableUsedInLoop(self):
"""
Shadowing a used variable cannot raise an UnusedVariable warning in the
context of a loop.
"""
self.flakes('''
def a():
b = True
while b:
b = False
''')
def test_assignToGlobal(self):
"""
Assigning to a global and then not using that global is perfectly
acceptable. Do not mistake it for an unused local variable.
"""
self.flakes('''
b = 0
def a():
global b
b = 1
''')
@skipIf(version_info < (3,), 'new in Python 3')
def test_assignToNonlocal(self):
"""
Assigning to a nonlocal and then not using that binding is perfectly
acceptable. Do not mistake it for an unused local variable.
"""
self.flakes('''
b = b'0'
def a():
nonlocal b
b = b'1'
''')
def test_assignToMember(self):
"""
Assigning to a member of another object and then not using that member
variable is perfectly acceptable. Do not mistake it for an unused
local variable.
"""
# XXX: Adding this test didn't generate a failure. Maybe not
# necessary?
self.flakes('''
class b:
pass
def a():
b.foo = 1
''')
def test_assignInForLoop(self):
"""
Don't warn when a variable in a for loop is assigned to but not used.
"""
self.flakes('''
def f():
for i in range(10):
pass
''')
def test_assignInListComprehension(self):
"""
Don't warn when a variable in a list comprehension is
assigned to but not used.
"""
self.flakes('''
def f():
[None for i in range(10)]
''')
def test_generatorExpression(self):
"""
Don't warn when a variable in a generator expression is
assigned to but not used.
"""
self.flakes('''
def f():
(None for i in range(10))
''')
def test_assignmentInsideLoop(self):
"""
Don't warn when a variable assignment occurs lexically after its use.
"""
self.flakes('''
def f():
x = None
for i in range(10):
if i > 2:
return x
x = i * 2
''')
def test_tupleUnpacking(self):
"""
Don't warn when a variable included in tuple unpacking is unused. It's
very common for variables in a tuple unpacking assignment to be unused
in good Python code, so warning will only create false positives.
"""
self.flakes('''
def f(tup):
(x, y) = tup
''')
self.flakes('''
def f():
(x, y) = 1, 2
''', m.UnusedVariable, m.UnusedVariable)
self.flakes('''
def f():
(x, y) = coords = 1, 2
if x > 1:
print(coords)
''')
self.flakes('''
def f():
(x, y) = coords = 1, 2
''', m.UnusedVariable)
self.flakes('''
def f():
coords = (x, y) = 1, 2
''', m.UnusedVariable)
def test_listUnpacking(self):
"""
Don't warn when a variable included in list unpacking is unused.
"""
self.flakes('''
def f(tup):
[x, y] = tup
''')
self.flakes('''
def f():
[x, y] = [1, 2]
''', m.UnusedVariable, m.UnusedVariable)
def test_closedOver(self):
"""
Don't warn when the assignment is used in an inner function.
"""
self.flakes('''
def barMaker():
foo = 5
def bar():
return foo
return bar
''')
def test_doubleClosedOver(self):
"""
Don't warn when the assignment is used in an inner function, even if
that inner function itself is in an inner function.
"""
self.flakes('''
def barMaker():
foo = 5
def bar():
def baz():
return foo
return bar
''')
def test_tracebackhideSpecialVariable(self):
"""
Do not warn about unused local variable __tracebackhide__, which is
a special variable for py.test.
"""
self.flakes("""
def helper():
__tracebackhide__ = True
""")
def test_ifexp(self):
"""
Test C{foo if bar else baz} statements.
"""
self.flakes("a = 'moo' if True else 'oink'")
self.flakes("a = foo if True else 'oink'", m.UndefinedName)
self.flakes("a = 'moo' if True else bar", m.UndefinedName)
def test_if_tuple(self):
"""
Test C{if (foo,)} conditions.
"""
self.flakes("""if (): pass""")
self.flakes("""
if (
True
):
pass
""")
self.flakes("""
if (
True,
):
pass
""", m.IfTuple)
self.flakes("""
x = 1 if (
True,
) else 2
""", m.IfTuple)
def test_withStatementNoNames(self):
"""
No warnings are emitted for using inside or after a nameless C{with}
statement a name defined beforehand.
"""
self.flakes('''
bar = None
with open("foo"):
bar
bar
''')
def test_withStatementSingleName(self):
"""
No warnings are emitted for using a name defined by a C{with} statement
within the suite or afterwards.
"""
self.flakes('''
with open('foo') as bar:
bar
bar
''')
def test_withStatementAttributeName(self):
"""
No warnings are emitted for using an attribute as the target of a
C{with} statement.
"""
self.flakes('''
import foo
with open('foo') as foo.bar:
pass
''')
def test_withStatementSubscript(self):
"""
No warnings are emitted for using a subscript as the target of a
C{with} statement.
"""
self.flakes('''
import foo
with open('foo') as foo[0]:
pass
''')
def test_withStatementSubscriptUndefined(self):
"""
An undefined name warning is emitted if the subscript used as the
target of a C{with} statement is not defined.
"""
self.flakes('''
import foo
with open('foo') as foo[bar]:
pass
''', m.UndefinedName)
def test_withStatementTupleNames(self):
"""
No warnings are emitted for using any of the tuple of names defined by
a C{with} statement within the suite or afterwards.
"""
self.flakes('''
with open('foo') as (bar, baz):
bar, baz
bar, baz
''')
def test_withStatementListNames(self):
"""
No warnings are emitted for using any of the list of names defined by a
C{with} statement within the suite or afterwards.
"""
self.flakes('''
with open('foo') as [bar, baz]:
bar, baz
bar, baz
''')
def test_withStatementComplicatedTarget(self):
"""
If the target of a C{with} statement uses any or all of the valid forms
for that part of the grammar (See
U{http://docs.python.org/reference/compound_stmts.html#the-with-statement}),
the names involved are checked both for definedness and any bindings
created are respected in the suite of the statement and afterwards.
"""
self.flakes('''
c = d = e = g = h = i = None
with open('foo') as [(a, b), c[d], e.f, g[h:i]]:
a, b, c, d, e, g, h, i
a, b, c, d, e, g, h, i
''')
def test_withStatementSingleNameUndefined(self):
"""
An undefined name warning is emitted if the name first defined by a
C{with} statement is used before the C{with} statement.
"""
self.flakes('''
bar
with open('foo') as bar:
pass
''', m.UndefinedName)
def test_withStatementTupleNamesUndefined(self):
"""
An undefined name warning is emitted if a name first defined by the
tuple-unpacking form of the C{with} statement is used before the
C{with} statement.
"""
self.flakes('''
baz
with open('foo') as (bar, baz):
pass
''', m.UndefinedName)
def test_withStatementSingleNameRedefined(self):
"""
A redefined name warning is emitted if a name bound by an import is
rebound by the name defined by a C{with} statement.
"""
self.flakes('''
import bar
with open('foo') as bar:
pass
''', m.RedefinedWhileUnused)
def test_withStatementTupleNamesRedefined(self):
"""
A redefined name warning is emitted if a name bound by an import is
rebound by one of the names defined by the tuple-unpacking form of a
C{with} statement.
"""
self.flakes('''
import bar
with open('foo') as (bar, baz):
pass
''', m.RedefinedWhileUnused)
def test_withStatementUndefinedInside(self):
"""
An undefined name warning is emitted if a name is used inside the
body of a C{with} statement without first being bound.
"""
self.flakes('''
with open('foo') as bar:
baz
''', m.UndefinedName)
def test_withStatementNameDefinedInBody(self):
"""
A name defined in the body of a C{with} statement can be used after
the body ends without warning.
"""
self.flakes('''
with open('foo') as bar:
baz = 10
baz
''')
def test_withStatementUndefinedInExpression(self):
"""
An undefined name warning is emitted if a name in the I{test}
expression of a C{with} statement is undefined.
"""
self.flakes('''
with bar as baz:
pass
''', m.UndefinedName)
self.flakes('''
with bar as bar:
pass
''', m.UndefinedName)
def test_dictComprehension(self):
"""
Dict comprehensions are properly handled.
"""
self.flakes('''
a = {1: x for x in range(10)}
''')
def test_setComprehensionAndLiteral(self):
"""
Set comprehensions are properly handled.
"""
self.flakes('''
a = {1, 2, 3}
b = {x for x in range(10)}
''')
def test_exceptionUsedInExcept(self):
self.flakes('''
try: pass
except Exception as e: e
''')
self.flakes('''
def download_review():
try: pass
except Exception as e: e
''')
@skipIf(version_info < (3,),
"In Python 2 exception names stay bound after the exception handler")
def test_exceptionUnusedInExcept(self):
self.flakes('''
try: pass
except Exception as e: pass
''', m.UnusedVariable)
def test_exceptionUnusedInExceptInFunction(self):
self.flakes('''
def download_review():
try: pass
except Exception as e: pass
''', m.UnusedVariable)
def test_exceptWithoutNameInFunction(self):
"""
Don't issue false warning when an unnamed exception is used.
Previously, there would be a false warning, but only when the
try..except was in a function
"""
self.flakes('''
import tokenize
def foo():
try: pass
except tokenize.TokenError: pass
''')
def test_exceptWithoutNameInFunctionTuple(self):
"""
Don't issue false warning when an unnamed exception is used.
This example catches a tuple of exception types.
"""
self.flakes('''
import tokenize
def foo():
try: pass
except (tokenize.TokenError, IndentationError): pass
''')
def test_augmentedAssignmentImportedFunctionCall(self):
"""
Consider a function that is called on the right part of an
augassign operation to be used.
"""
self.flakes('''
from foo import bar
baz = 0
baz += bar()
''')
def test_assert_without_message(self):
"""An assert without a message is not an error."""
self.flakes('''
a = 1
assert a
''')
def test_assert_with_message(self):
"""An assert with a message is not an error."""
self.flakes('''
a = 1
assert a, 'x'
''')
def test_assert_tuple(self):
"""An assert of a non-empty tuple is always True."""
self.flakes('''
assert (False, 'x')
assert (False, )
''', m.AssertTuple, m.AssertTuple)
def test_assert_tuple_empty(self):
"""An assert of an empty tuple is always False."""
self.flakes('''
assert ()
''')
def test_assert_static(self):
"""An assert of a static value is not an error."""
self.flakes('''
assert True
assert 1
''')
@skipIf(version_info < (3, 3), 'new in Python 3.3')
def test_yieldFromUndefined(self):
"""
Test C{yield from} statement
"""
self.flakes('''
def bar():
yield from foo()
''', m.UndefinedName)
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_f_string(self):
"""Test PEP 498 f-strings are treated as a usage."""
self.flakes('''
baz = 0
print(f'\x7b4*baz\N{RIGHT CURLY BRACKET}')
''')
@skipIf(version_info < (3, 8), 'new in Python 3.8')
def test_assign_expr(self):
"""Test PEP 572 assignment expressions are treated as usage / write."""
self.flakes('''
from foo import y
print(x := y)
print(x)
''')
class TestStringFormatting(TestCase):
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_f_string_without_placeholders(self):
self.flakes("f'foo'", m.FStringMissingPlaceholders)
self.flakes('''
f"""foo
bar
"""
''', m.FStringMissingPlaceholders)
self.flakes('''
print(
f'foo'
f'bar'
)
''', m.FStringMissingPlaceholders)
# this is an "escaped placeholder" but not a placeholder
self.flakes("f'{{}}'", m.FStringMissingPlaceholders)
# ok: f-string with placeholders
self.flakes('''
x = 5
print(f'{x}')
''')
# ok: f-string with format specifiers
self.flakes('''
x = 'a' * 90
print(f'{x:.8}')
''')
# ok: f-string with multiple format specifiers
self.flakes('''
x = y = 5
print(f'{x:>2} {y:>2}')
''')
def test_invalid_dot_format_calls(self):
self.flakes('''
'{'.format(1)
''', m.StringDotFormatInvalidFormat)
self.flakes('''
'{} {1}'.format(1, 2)
''', m.StringDotFormatMixingAutomatic)
self.flakes('''
'{0} {}'.format(1, 2)
''', m.StringDotFormatMixingAutomatic)
self.flakes('''
'{}'.format(1, 2)
''', m.StringDotFormatExtraPositionalArguments)
self.flakes('''
'{}'.format(1, bar=2)
''', m.StringDotFormatExtraNamedArguments)
self.flakes('''
'{} {}'.format(1)
''', m.StringDotFormatMissingArgument)
self.flakes('''
'{2}'.format()
''', m.StringDotFormatMissingArgument)
self.flakes('''
'{bar}'.format()
''', m.StringDotFormatMissingArgument)
# too much string recursion (placeholder-in-placeholder)
self.flakes('''
'{:{:{}}}'.format(1, 2, 3)
''', m.StringDotFormatInvalidFormat)
# ok: dotted / bracketed names need to handle the param differently
self.flakes("'{.__class__}'.format('')")
self.flakes("'{foo[bar]}'.format(foo={'bar': 'barv'})")
# ok: placeholder-placeholders
self.flakes('''
print('{:{}} {}'.format(1, 15, 2))
''')
# ok: not a placeholder-placeholder
self.flakes('''
print('{:2}'.format(1))
''')
# ok: not mixed automatic
self.flakes('''
'{foo}-{}'.format(1, foo=2)
''')
# ok: we can't determine statically the format args
self.flakes('''
a = ()
"{}".format(*a)
''')
self.flakes('''
k = {}
"{foo}".format(**k)
''')
def test_invalid_percent_format_calls(self):
self.flakes('''
'%(foo)' % {'foo': 'bar'}
''', m.PercentFormatInvalidFormat)
self.flakes('''
'%s %(foo)s' % {'foo': 'bar'}
''', m.PercentFormatMixedPositionalAndNamed)
self.flakes('''
'%(foo)s %s' % {'foo': 'bar'}
''', m.PercentFormatMixedPositionalAndNamed)
self.flakes('''
'%j' % (1,)
''', m.PercentFormatUnsupportedFormatCharacter)
self.flakes('''
'%s %s' % (1,)
''', m.PercentFormatPositionalCountMismatch)
self.flakes('''
'%s %s' % (1, 2, 3)
''', m.PercentFormatPositionalCountMismatch)
self.flakes('''
'%(bar)s' % {}
''', m.PercentFormatMissingArgument,)
self.flakes('''
'%(bar)s' % {'bar': 1, 'baz': 2}
''', m.PercentFormatExtraNamedArguments)
self.flakes('''
'%(bar)s' % (1, 2, 3)
''', m.PercentFormatExpectedMapping)
self.flakes('''
'%s %s' % {'k': 'v'}
''', m.PercentFormatExpectedSequence)
self.flakes('''
'%(bar)*s' % {'bar': 'baz'}
''', m.PercentFormatStarRequiresSequence)
# ok: single %s with mapping
self.flakes('''
'%s' % {'foo': 'bar', 'baz': 'womp'}
''')
# ok: does not cause a MemoryError (the strings aren't evaluated)
self.flakes('''
"%1000000000000f" % 1
''')
# ok: %% should not count towards placeholder count
self.flakes('''
'%% %s %% %s' % (1, 2)
''')
# ok: * consumes one positional argument
self.flakes('''
'%.*f' % (2, 1.1234)
'%*.*f' % (5, 2, 3.1234)
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_ok_percent_format_cannot_determine_element_count(self):
self.flakes('''
a = []
'%s %s' % [*a]
'%s %s' % (*a,)
''')
self.flakes('''
k = {}
'%(k)s' % {**k}
''')
class TestAsyncStatements(TestCase):
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncDef(self):
self.flakes('''
async def bar():
return 42
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncDefAwait(self):
self.flakes('''
async def read_data(db):
await db.fetch('SELECT ...')
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncDefUndefined(self):
self.flakes('''
async def bar():
return foo()
''', m.UndefinedName)
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncFor(self):
self.flakes('''
async def read_data(db):
output = []
async for row in db.cursor():
output.append(row)
return output
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncForUnderscoreLoopVar(self):
self.flakes('''
async def coro(it):
async for _ in it:
pass
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_loopControlInAsyncFor(self):
self.flakes('''
async def read_data(db):
output = []
async for row in db.cursor():
if row[0] == 'skip':
continue
output.append(row)
return output
''')
self.flakes('''
async def read_data(db):
output = []
async for row in db.cursor():
if row[0] == 'stop':
break
output.append(row)
return output
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_loopControlInAsyncForElse(self):
self.flakes('''
async def read_data(db):
output = []
async for row in db.cursor():
output.append(row)
else:
continue
return output
''', m.ContinueOutsideLoop)
self.flakes('''
async def read_data(db):
output = []
async for row in db.cursor():
output.append(row)
else:
break
return output
''', m.BreakOutsideLoop)
@skipIf(version_info < (3, 5), 'new in Python 3.5')
@skipIf(version_info > (3, 8), "Python <= 3.8 only")
def test_continueInAsyncForFinally(self):
self.flakes('''
async def read_data(db):
output = []
async for row in db.cursor():
try:
output.append(row)
finally:
continue
return output
''', m.ContinueInFinally)
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncWith(self):
self.flakes('''
async def commit(session, data):
async with session.transaction():
await session.update(data)
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_asyncWithItem(self):
self.flakes('''
async def commit(session, data):
async with session.transaction() as trans:
await trans.begin()
...
await trans.end()
''')
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_matmul(self):
self.flakes('''
def foo(a, b):
return a @ b
''')
@skipIf(version_info < (3, 6), 'new in Python 3.6')
def test_formatstring(self):
self.flakes('''
hi = 'hi'
mom = 'mom'
f'{hi} {mom}'
''')
def test_raise_notimplemented(self):
self.flakes('''
raise NotImplementedError("This is fine")
''')
self.flakes('''
raise NotImplementedError
''')
self.flakes('''
raise NotImplemented("This isn't gonna work")
''', m.RaiseNotImplemented)
self.flakes('''
raise NotImplemented
''', m.RaiseNotImplemented)
class TestIncompatiblePrintOperator(TestCase):
"""
Tests for warning about invalid use of print function.
"""
def test_valid_print(self):
self.flakes('''
print("Hello")
''')
def test_invalid_print_when_imported_from_future(self):
exc = self.flakes('''
from __future__ import print_function
import sys
print >>sys.stderr, "Hello"
''', m.InvalidPrintSyntax).messages[0]
self.assertEqual(exc.lineno, 4)
self.assertEqual(exc.col, 0)
def test_print_function_assignment(self):
"""
A valid assignment, tested for catching false positives.
"""
self.flakes('''
from __future__ import print_function
log = print
log("Hello")
''')
def test_print_in_lambda(self):
self.flakes('''
from __future__ import print_function
a = lambda: print
''')
def test_print_returned_in_function(self):
self.flakes('''
from __future__ import print_function
def a():
return print
''')
def test_print_as_condition_test(self):
self.flakes('''
from __future__ import print_function
if print: pass
''')
| 53,478 | Python | .py | 1,898 | 18.351949 | 84 | 0.484713 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,318 | test_code_segment.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_code_segment.py | from sys import version_info
from pyflakes import messages as m
from pyflakes.checker import (FunctionScope, ClassScope, ModuleScope,
Argument, FunctionDefinition, Assignment)
from pyflakes.test.harness import TestCase, skipIf
class TestCodeSegments(TestCase):
"""
Tests for segments of a module
"""
def test_function_segment(self):
self.flakes('''
def foo():
def bar():
pass
''', is_segment=True)
self.flakes('''
def foo():
def bar():
x = 0
''', m.UnusedVariable, is_segment=True)
def test_class_segment(self):
self.flakes('''
class Foo:
class Bar:
pass
''', is_segment=True)
self.flakes('''
class Foo:
def bar():
x = 0
''', m.UnusedVariable, is_segment=True)
def test_scope_class(self):
checker = self.flakes('''
class Foo:
x = 0
def bar(a, b=1, *d, **e):
pass
''', is_segment=True)
scopes = checker.deadScopes
module_scopes = [
scope for scope in scopes if scope.__class__ is ModuleScope]
class_scopes = [
scope for scope in scopes if scope.__class__ is ClassScope]
function_scopes = [
scope for scope in scopes if scope.__class__ is FunctionScope]
# Ensure module scope is not present because we are analysing
# the inner contents of Foo
self.assertEqual(len(module_scopes), 0)
self.assertEqual(len(class_scopes), 1)
self.assertEqual(len(function_scopes), 1)
class_scope = class_scopes[0]
function_scope = function_scopes[0]
self.assertIsInstance(class_scope, ClassScope)
self.assertIsInstance(function_scope, FunctionScope)
self.assertIn('x', class_scope)
self.assertIn('bar', class_scope)
self.assertIn('a', function_scope)
self.assertIn('b', function_scope)
self.assertIn('d', function_scope)
self.assertIn('e', function_scope)
self.assertIsInstance(class_scope['bar'], FunctionDefinition)
self.assertIsInstance(class_scope['x'], Assignment)
self.assertIsInstance(function_scope['a'], Argument)
self.assertIsInstance(function_scope['b'], Argument)
self.assertIsInstance(function_scope['d'], Argument)
self.assertIsInstance(function_scope['e'], Argument)
def test_scope_function(self):
checker = self.flakes('''
def foo(a, b=1, *d, **e):
def bar(f, g=1, *h, **i):
pass
''', is_segment=True)
scopes = checker.deadScopes
module_scopes = [
scope for scope in scopes if scope.__class__ is ModuleScope]
function_scopes = [
scope for scope in scopes if scope.__class__ is FunctionScope]
# Ensure module scope is not present because we are analysing
# the inner contents of foo
self.assertEqual(len(module_scopes), 0)
self.assertEqual(len(function_scopes), 2)
function_scope_foo = function_scopes[1]
function_scope_bar = function_scopes[0]
self.assertIsInstance(function_scope_foo, FunctionScope)
self.assertIsInstance(function_scope_bar, FunctionScope)
self.assertIn('a', function_scope_foo)
self.assertIn('b', function_scope_foo)
self.assertIn('d', function_scope_foo)
self.assertIn('e', function_scope_foo)
self.assertIn('bar', function_scope_foo)
self.assertIn('f', function_scope_bar)
self.assertIn('g', function_scope_bar)
self.assertIn('h', function_scope_bar)
self.assertIn('i', function_scope_bar)
self.assertIsInstance(function_scope_foo['bar'], FunctionDefinition)
self.assertIsInstance(function_scope_foo['a'], Argument)
self.assertIsInstance(function_scope_foo['b'], Argument)
self.assertIsInstance(function_scope_foo['d'], Argument)
self.assertIsInstance(function_scope_foo['e'], Argument)
self.assertIsInstance(function_scope_bar['f'], Argument)
self.assertIsInstance(function_scope_bar['g'], Argument)
self.assertIsInstance(function_scope_bar['h'], Argument)
self.assertIsInstance(function_scope_bar['i'], Argument)
@skipIf(version_info < (3, 5), 'new in Python 3.5')
def test_scope_async_function(self):
self.flakes('async def foo(): pass', is_segment=True)
| 4,590 | Python | .py | 106 | 33.660377 | 76 | 0.620682 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,319 | test_undefined_names.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_undefined_names.py | import ast
from sys import version_info
from pyflakes import messages as m, checker
from pyflakes.test.harness import TestCase, skipIf, skip
class Test(TestCase):
def test_undefined(self):
self.flakes('bar', m.UndefinedName)
def test_definedInListComp(self):
self.flakes('[a for a in range(10) if a]')
@skipIf(version_info < (3,),
'in Python 2 list comprehensions execute in the same scope')
def test_undefinedInListComp(self):
self.flakes('''
[a for a in range(10)]
a
''',
m.UndefinedName)
@skipIf(version_info < (3,),
'in Python 2 exception names stay bound after the except: block')
def test_undefinedExceptionName(self):
"""Exception names can't be used after the except: block.
The exc variable is unused inside the exception handler."""
self.flakes('''
try:
raise ValueError('ve')
except ValueError as exc:
pass
exc
''', m.UndefinedName, m.UnusedVariable)
def test_namesDeclaredInExceptBlocks(self):
"""Locals declared in except: blocks can be used after the block.
This shows the example in test_undefinedExceptionName is
different."""
self.flakes('''
try:
raise ValueError('ve')
except ValueError as exc:
e = exc
e
''')
@skip('error reporting disabled due to false positives below')
def test_undefinedExceptionNameObscuringLocalVariable(self):
"""Exception names obscure locals, can't be used after.
Last line will raise UnboundLocalError on Python 3 after exiting
the except: block. Note next two examples for false positives to
watch out for."""
self.flakes('''
exc = 'Original value'
try:
raise ValueError('ve')
except ValueError as exc:
pass
exc
''',
m.UndefinedName)
@skipIf(version_info < (3,),
'in Python 2 exception names stay bound after the except: block')
def test_undefinedExceptionNameObscuringLocalVariable2(self):
"""Exception names are unbound after the `except:` block.
Last line will raise UnboundLocalError on Python 3 but would print out
've' on Python 2. The exc variable is unused inside the exception
handler."""
self.flakes('''
try:
raise ValueError('ve')
except ValueError as exc:
pass
print(exc)
exc = 'Original value'
''', m.UndefinedName, m.UnusedVariable)
def test_undefinedExceptionNameObscuringLocalVariableFalsePositive1(self):
"""Exception names obscure locals, can't be used after. Unless.
Last line will never raise UnboundLocalError because it's only
entered if no exception was raised."""
# The exc variable is unused inside the exception handler.
expected = [] if version_info < (3,) else [m.UnusedVariable]
self.flakes('''
exc = 'Original value'
try:
raise ValueError('ve')
except ValueError as exc:
print('exception logged')
raise
exc
''', *expected)
def test_delExceptionInExcept(self):
"""The exception name can be deleted in the except: block."""
self.flakes('''
try:
pass
except Exception as exc:
del exc
''')
def test_undefinedExceptionNameObscuringLocalVariableFalsePositive2(self):
"""Exception names obscure locals, can't be used after. Unless.
Last line will never raise UnboundLocalError because `error` is
only falsy if the `except:` block has not been entered."""
# The exc variable is unused inside the exception handler.
expected = [] if version_info < (3,) else [m.UnusedVariable]
self.flakes('''
exc = 'Original value'
error = None
try:
raise ValueError('ve')
except ValueError as exc:
error = 'exception logged'
if error:
print(error)
else:
exc
''', *expected)
@skip('error reporting disabled due to false positives below')
def test_undefinedExceptionNameObscuringGlobalVariable(self):
"""Exception names obscure globals, can't be used after.
Last line will raise UnboundLocalError on both Python 2 and
Python 3 because the existence of that exception name creates
a local scope placeholder for it, obscuring any globals, etc."""
self.flakes('''
exc = 'Original value'
def func():
try:
pass # nothing is raised
except ValueError as exc:
pass # block never entered, exc stays unbound
exc
''',
m.UndefinedLocal)
@skip('error reporting disabled due to false positives below')
def test_undefinedExceptionNameObscuringGlobalVariable2(self):
"""Exception names obscure globals, can't be used after.
Last line will raise NameError on Python 3 because the name is
locally unbound after the `except:` block, even if it's
nonlocal. We should issue an error in this case because code
only working correctly if an exception isn't raised, is invalid.
Unless it's explicitly silenced, see false positives below."""
self.flakes('''
exc = 'Original value'
def func():
global exc
try:
raise ValueError('ve')
except ValueError as exc:
pass # block never entered, exc stays unbound
exc
''',
m.UndefinedLocal)
def test_undefinedExceptionNameObscuringGlobalVariableFalsePositive1(self):
"""Exception names obscure globals, can't be used after. Unless.
Last line will never raise NameError because it's only entered
if no exception was raised."""
# The exc variable is unused inside the exception handler.
expected = [] if version_info < (3,) else [m.UnusedVariable]
self.flakes('''
exc = 'Original value'
def func():
global exc
try:
raise ValueError('ve')
except ValueError as exc:
print('exception logged')
raise
exc
''', *expected)
def test_undefinedExceptionNameObscuringGlobalVariableFalsePositive2(self):
"""Exception names obscure globals, can't be used after. Unless.
Last line will never raise NameError because `error` is only
falsy if the `except:` block has not been entered."""
# The exc variable is unused inside the exception handler.
expected = [] if version_info < (3,) else [m.UnusedVariable]
self.flakes('''
exc = 'Original value'
def func():
global exc
error = None
try:
raise ValueError('ve')
except ValueError as exc:
error = 'exception logged'
if error:
print(error)
else:
exc
''', *expected)
def test_functionsNeedGlobalScope(self):
self.flakes('''
class a:
def b():
fu
fu = 1
''')
def test_builtins(self):
self.flakes('range(10)')
def test_builtinWindowsError(self):
"""
C{WindowsError} is sometimes a builtin name, so no warning is emitted
for using it.
"""
self.flakes('WindowsError')
@skipIf(version_info < (3, 6), 'new feature in 3.6')
def test_moduleAnnotations(self):
"""
Use of the C{__annotations__} in module scope should not emit
an undefined name warning when version is greater than or equal to 3.6.
"""
self.flakes('__annotations__')
def test_magicGlobalsFile(self):
"""
Use of the C{__file__} magic global should not emit an undefined name
warning.
"""
self.flakes('__file__')
def test_magicGlobalsBuiltins(self):
"""
Use of the C{__builtins__} magic global should not emit an undefined
name warning.
"""
self.flakes('__builtins__')
def test_magicGlobalsName(self):
"""
Use of the C{__name__} magic global should not emit an undefined name
warning.
"""
self.flakes('__name__')
def test_magicGlobalsPath(self):
"""
Use of the C{__path__} magic global should not emit an undefined name
warning, if you refer to it from a file called __init__.py.
"""
self.flakes('__path__', m.UndefinedName)
self.flakes('__path__', filename='package/__init__.py')
def test_magicModuleInClassScope(self):
"""
Use of the C{__module__} magic builtin should not emit an undefined
name warning if used in class scope.
"""
self.flakes('__module__', m.UndefinedName)
self.flakes('''
class Foo:
__module__
''')
self.flakes('''
class Foo:
def bar(self):
__module__
''', m.UndefinedName)
@skipIf(version_info < (3, 3), "Python >= 3.3 only")
def test_magicQualnameInClassScope(self):
"""
Use of the C{__qualname__} magic builtin should not emit an undefined
name warning if used in class scope.
"""
self.flakes('__qualname__', m.UndefinedName)
self.flakes('''
class Foo:
__qualname__
''')
self.flakes('''
class Foo:
def bar(self):
__qualname__
''', m.UndefinedName)
def test_globalImportStar(self):
"""Can't find undefined names with import *."""
self.flakes('from fu import *; bar',
m.ImportStarUsed, m.ImportStarUsage)
@skipIf(version_info >= (3,), 'obsolete syntax')
def test_localImportStar(self):
"""
A local import * still allows undefined names to be found
in upper scopes.
"""
self.flakes('''
def a():
from fu import *
bar
''', m.ImportStarUsed, m.UndefinedName, m.UnusedImport)
@skipIf(version_info >= (3,), 'obsolete syntax')
def test_unpackedParameter(self):
"""Unpacked function parameters create bindings."""
self.flakes('''
def a((bar, baz)):
bar; baz
''')
def test_definedByGlobal(self):
"""
"global" can make an otherwise undefined name in another function
defined.
"""
self.flakes('''
def a(): global fu; fu = 1
def b(): fu
''')
self.flakes('''
def c(): bar
def b(): global bar; bar = 1
''')
def test_definedByGlobalMultipleNames(self):
"""
"global" can accept multiple names.
"""
self.flakes('''
def a(): global fu, bar; fu = 1; bar = 2
def b(): fu; bar
''')
def test_globalInGlobalScope(self):
"""
A global statement in the global scope is ignored.
"""
self.flakes('''
global x
def foo():
print(x)
''', m.UndefinedName)
def test_global_reset_name_only(self):
"""A global statement does not prevent other names being undefined."""
# Only different undefined names are reported.
# See following test that fails where the same name is used.
self.flakes('''
def f1():
s
def f2():
global m
''', m.UndefinedName)
@skip("todo")
def test_unused_global(self):
"""An unused global statement does not define the name."""
self.flakes('''
def f1():
m
def f2():
global m
''', m.UndefinedName)
def test_del(self):
"""Del deletes bindings."""
self.flakes('a = 1; del a; a', m.UndefinedName)
def test_delGlobal(self):
"""Del a global binding from a function."""
self.flakes('''
a = 1
def f():
global a
del a
a
''')
def test_delUndefined(self):
"""Del an undefined name."""
self.flakes('del a', m.UndefinedName)
def test_delConditional(self):
"""
Ignores conditional bindings deletion.
"""
self.flakes('''
context = None
test = True
if False:
del(test)
assert(test)
''')
def test_delConditionalNested(self):
"""
Ignored conditional bindings deletion even if they are nested in other
blocks.
"""
self.flakes('''
context = None
test = True
if False:
with context():
del(test)
assert(test)
''')
def test_delWhile(self):
"""
Ignore bindings deletion if called inside the body of a while
statement.
"""
self.flakes('''
def test():
foo = 'bar'
while False:
del foo
assert(foo)
''')
def test_delWhileTestUsage(self):
"""
Ignore bindings deletion if called inside the body of a while
statement and name is used inside while's test part.
"""
self.flakes('''
def _worker():
o = True
while o is not True:
del o
o = False
''')
def test_delWhileNested(self):
"""
Ignore bindings deletions if node is part of while's test, even when
del is in a nested block.
"""
self.flakes('''
context = None
def _worker():
o = True
while o is not True:
while True:
with context():
del o
o = False
''')
def test_globalFromNestedScope(self):
"""Global names are available from nested scopes."""
self.flakes('''
a = 1
def b():
def c():
a
''')
def test_laterRedefinedGlobalFromNestedScope(self):
"""
Test that referencing a local name that shadows a global, before it is
defined, generates a warning.
"""
self.flakes('''
a = 1
def fun():
a
a = 2
return a
''', m.UndefinedLocal)
def test_laterRedefinedGlobalFromNestedScope2(self):
"""
Test that referencing a local name in a nested scope that shadows a
global declared in an enclosing scope, before it is defined, generates
a warning.
"""
self.flakes('''
a = 1
def fun():
global a
def fun2():
a
a = 2
return a
''', m.UndefinedLocal)
def test_intermediateClassScopeIgnored(self):
"""
If a name defined in an enclosing scope is shadowed by a local variable
and the name is used locally before it is bound, an unbound local
warning is emitted, even if there is a class scope between the enclosing
scope and the local scope.
"""
self.flakes('''
def f():
x = 1
class g:
def h(self):
a = x
x = None
print(x, a)
print(x)
''', m.UndefinedLocal)
def test_doubleNestingReportsClosestName(self):
"""
Test that referencing a local name in a nested scope that shadows a
variable declared in two different outer scopes before it is defined
in the innermost scope generates an UnboundLocal warning which
refers to the nearest shadowed name.
"""
exc = self.flakes('''
def a():
x = 1
def b():
x = 2 # line 5
def c():
x
x = 3
return x
return x
return x
''', m.UndefinedLocal).messages[0]
# _DoctestMixin.flakes adds two lines preceding the code above.
expected_line_num = 7 if self.withDoctest else 5
self.assertEqual(exc.message_args, ('x', expected_line_num))
def test_laterRedefinedGlobalFromNestedScope3(self):
"""
Test that referencing a local name in a nested scope that shadows a
global, before it is defined, generates a warning.
"""
self.flakes('''
def fun():
a = 1
def fun2():
a
a = 1
return a
return a
''', m.UndefinedLocal)
def test_undefinedAugmentedAssignment(self):
self.flakes(
'''
def f(seq):
a = 0
seq[a] += 1
seq[b] /= 2
c[0] *= 2
a -= 3
d += 4
e[any] = 5
''',
m.UndefinedName, # b
m.UndefinedName, # c
m.UndefinedName, m.UnusedVariable, # d
m.UndefinedName, # e
)
def test_nestedClass(self):
"""Nested classes can access enclosing scope."""
self.flakes('''
def f(foo):
class C:
bar = foo
def f(self):
return foo
return C()
f(123).f()
''')
def test_badNestedClass(self):
"""Free variables in nested classes must bind at class creation."""
self.flakes('''
def f():
class C:
bar = foo
foo = 456
return foo
f()
''', m.UndefinedName)
def test_definedAsStarArgs(self):
"""Star and double-star arg names are defined."""
self.flakes('''
def f(a, *b, **c):
print(a, b, c)
''')
@skipIf(version_info < (3,), 'new in Python 3')
def test_definedAsStarUnpack(self):
"""Star names in unpack are defined."""
self.flakes('''
a, *b = range(10)
print(a, b)
''')
self.flakes('''
*a, b = range(10)
print(a, b)
''')
self.flakes('''
a, *b, c = range(10)
print(a, b, c)
''')
@skipIf(version_info < (3,), 'new in Python 3')
def test_usedAsStarUnpack(self):
"""
Star names in unpack are used if RHS is not a tuple/list literal.
"""
self.flakes('''
def f():
a, *b = range(10)
''')
self.flakes('''
def f():
(*a, b) = range(10)
''')
self.flakes('''
def f():
[a, *b, c] = range(10)
''')
@skipIf(version_info < (3,), 'new in Python 3')
def test_unusedAsStarUnpack(self):
"""
Star names in unpack are unused if RHS is a tuple/list literal.
"""
self.flakes('''
def f():
a, *b = any, all, 4, 2, 'un'
''', m.UnusedVariable, m.UnusedVariable)
self.flakes('''
def f():
(*a, b) = [bool, int, float, complex]
''', m.UnusedVariable, m.UnusedVariable)
self.flakes('''
def f():
[a, *b, c] = 9, 8, 7, 6, 5, 4
''', m.UnusedVariable, m.UnusedVariable, m.UnusedVariable)
@skipIf(version_info < (3,), 'new in Python 3')
def test_keywordOnlyArgs(self):
"""Keyword-only arg names are defined."""
self.flakes('''
def f(*, a, b=None):
print(a, b)
''')
self.flakes('''
import default_b
def f(*, a, b=default_b):
print(a, b)
''')
@skipIf(version_info < (3,), 'new in Python 3')
def test_keywordOnlyArgsUndefined(self):
"""Typo in kwonly name."""
self.flakes('''
def f(*, a, b=default_c):
print(a, b)
''', m.UndefinedName)
@skipIf(version_info < (3,), 'new in Python 3')
def test_annotationUndefined(self):
"""Undefined annotations."""
self.flakes('''
from abc import note1, note2, note3, note4, note5
def func(a: note1, *args: note2,
b: note3=12, **kw: note4) -> note5: pass
''')
self.flakes('''
def func():
d = e = 42
def func(a: {1, d}) -> (lambda c: e): pass
''')
@skipIf(version_info < (3,), 'new in Python 3')
def test_metaClassUndefined(self):
self.flakes('''
from abc import ABCMeta
class A(metaclass=ABCMeta): pass
''')
def test_definedInGenExp(self):
"""
Using the loop variable of a generator expression results in no
warnings.
"""
self.flakes('(a for a in [1, 2, 3] if a)')
self.flakes('(b for b in (a for a in [1, 2, 3] if a) if b)')
def test_undefinedInGenExpNested(self):
"""
The loop variables of generator expressions nested together are
not defined in the other generator.
"""
self.flakes('(b for b in (a for a in [1, 2, 3] if b) if b)',
m.UndefinedName)
self.flakes('(b for b in (a for a in [1, 2, 3] if a) if a)',
m.UndefinedName)
def test_undefinedWithErrorHandler(self):
"""
Some compatibility code checks explicitly for NameError.
It should not trigger warnings.
"""
self.flakes('''
try:
socket_map
except NameError:
socket_map = {}
''')
self.flakes('''
try:
_memoryview.contiguous
except (NameError, AttributeError):
raise RuntimeError("Python >= 3.3 is required")
''')
# If NameError is not explicitly handled, generate a warning
self.flakes('''
try:
socket_map
except:
socket_map = {}
''', m.UndefinedName)
self.flakes('''
try:
socket_map
except Exception:
socket_map = {}
''', m.UndefinedName)
def test_definedInClass(self):
"""
Defined name for generator expressions and dict/set comprehension.
"""
self.flakes('''
class A:
T = range(10)
Z = (x for x in T)
L = [x for x in T]
B = dict((i, str(i)) for i in T)
''')
self.flakes('''
class A:
T = range(10)
X = {x for x in T}
Y = {x:x for x in T}
''')
def test_definedInClassNested(self):
"""Defined name for nested generator expressions in a class."""
self.flakes('''
class A:
T = range(10)
Z = (x for x in (a for a in T))
''')
def test_undefinedInLoop(self):
"""
The loop variable is defined after the expression is computed.
"""
self.flakes('''
for i in range(i):
print(i)
''', m.UndefinedName)
self.flakes('''
[42 for i in range(i)]
''', m.UndefinedName)
self.flakes('''
(42 for i in range(i))
''', m.UndefinedName)
def test_definedFromLambdaInDictionaryComprehension(self):
"""
Defined name referenced from a lambda function within a dict/set
comprehension.
"""
self.flakes('''
{lambda: id(x) for x in range(10)}
''')
def test_definedFromLambdaInGenerator(self):
"""
Defined name referenced from a lambda function within a generator
expression.
"""
self.flakes('''
any(lambda: id(x) for x in range(10))
''')
def test_undefinedFromLambdaInDictionaryComprehension(self):
"""
Undefined name referenced from a lambda function within a dict/set
comprehension.
"""
self.flakes('''
{lambda: id(y) for x in range(10)}
''', m.UndefinedName)
def test_undefinedFromLambdaInComprehension(self):
"""
Undefined name referenced from a lambda function within a generator
expression.
"""
self.flakes('''
any(lambda: id(y) for x in range(10))
''', m.UndefinedName)
def test_dunderClass(self):
"""
`__class__` is defined in class scope under Python 3, but is not
in Python 2.
"""
code = '''
class Test(object):
def __init__(self):
print(__class__.__name__)
self.x = 1
t = Test()
'''
if version_info < (3,):
self.flakes(code, m.UndefinedName)
else:
self.flakes(code)
class NameTests(TestCase):
"""
Tests for some extra cases of name handling.
"""
def test_impossibleContext(self):
"""
A Name node with an unrecognized context results in a RuntimeError being
raised.
"""
tree = ast.parse("x = 10")
file_tokens = checker.make_tokens("x = 10")
# Make it into something unrecognizable.
tree.body[0].targets[0].ctx = object()
self.assertRaises(RuntimeError, checker.Checker, tree, file_tokens=file_tokens)
| 25,805 | Python | .py | 775 | 23.247742 | 87 | 0.536617 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,320 | test_dict.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_dict.py | """
Tests for dict duplicate keys Pyflakes behavior.
"""
from sys import version_info
from pyflakes import messages as m
from pyflakes.test.harness import TestCase, skipIf
class Test(TestCase):
def test_duplicate_keys(self):
self.flakes(
"{'yes': 1, 'yes': 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
@skipIf(version_info < (3,),
"bytes and strings with same 'value' are not equal in python3")
def test_duplicate_keys_bytes_vs_unicode_py3(self):
self.flakes("{b'a': 1, u'a': 2}")
@skipIf(version_info < (3,),
"bytes and strings with same 'value' are not equal in python3")
def test_duplicate_values_bytes_vs_unicode_py3(self):
self.flakes(
"{1: b'a', 1: u'a'}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
@skipIf(version_info >= (3,),
"bytes and strings with same 'value' are equal in python2")
def test_duplicate_keys_bytes_vs_unicode_py2(self):
self.flakes(
"{b'a': 1, u'a': 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
@skipIf(version_info >= (3,),
"bytes and strings with same 'value' are equal in python2")
def test_duplicate_values_bytes_vs_unicode_py2(self):
self.flakes("{1: b'a', 1: u'a'}")
def test_multiple_duplicate_keys(self):
self.flakes(
"{'yes': 1, 'yes': 2, 'no': 2, 'no': 3}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_in_function(self):
self.flakes(
'''
def f(thing):
pass
f({'yes': 1, 'yes': 2})
''',
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_in_lambda(self):
self.flakes(
"lambda x: {(0,1): 1, (0,1): 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_tuples(self):
self.flakes(
"{(0,1): 1, (0,1): 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_tuples_int_and_float(self):
self.flakes(
"{(0,1): 1, (0,1.0): 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_ints(self):
self.flakes(
"{1: 1, 1: 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_bools(self):
self.flakes(
"{True: 1, True: 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_bools_false(self):
# Needed to ensure 2.x correctly coerces these from variables
self.flakes(
"{False: 1, False: 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_keys_none(self):
self.flakes(
"{None: 1, None: 2}",
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_variable_keys(self):
self.flakes(
'''
a = 1
{a: 1, a: 2}
''',
m.MultiValueRepeatedKeyVariable,
m.MultiValueRepeatedKeyVariable,
)
def test_duplicate_variable_values(self):
self.flakes(
'''
a = 1
b = 2
{1: a, 1: b}
''',
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_variable_values_same_value(self):
# Current behaviour is not to look up variable values. This is to
# confirm that.
self.flakes(
'''
a = 1
b = 1
{1: a, 1: b}
''',
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_duplicate_key_float_and_int(self):
"""
These do look like different values, but when it comes to their use as
keys, they compare as equal and so are actually duplicates.
The literal dict {1: 1, 1.0: 1} actually becomes {1.0: 1}.
"""
self.flakes(
'''
{1: 1, 1.0: 2}
''',
m.MultiValueRepeatedKeyLiteral,
m.MultiValueRepeatedKeyLiteral,
)
def test_no_duplicate_key_error_same_value(self):
self.flakes('''
{'yes': 1, 'yes': 1}
''')
def test_no_duplicate_key_errors(self):
self.flakes('''
{'yes': 1, 'no': 2}
''')
def test_no_duplicate_keys_tuples_same_first_element(self):
self.flakes("{(0,1): 1, (0,2): 1}")
def test_no_duplicate_key_errors_func_call(self):
self.flakes('''
def test(thing):
pass
test({True: 1, None: 2, False: 1})
''')
def test_no_duplicate_key_errors_bool_or_none(self):
self.flakes("{True: 1, None: 2, False: 1}")
def test_no_duplicate_key_errors_ints(self):
self.flakes('''
{1: 1, 2: 1}
''')
def test_no_duplicate_key_errors_vars(self):
self.flakes('''
test = 'yes'
rest = 'yes'
{test: 1, rest: 2}
''')
def test_no_duplicate_key_errors_tuples(self):
self.flakes('''
{(0,1): 1, (0,2): 1}
''')
def test_no_duplicate_key_errors_instance_attributes(self):
self.flakes('''
class Test():
pass
f = Test()
f.a = 1
{f.a: 1, f.a: 1}
''')
| 6,050 | Python | .py | 182 | 23.434066 | 78 | 0.546685 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,321 | test_return_with_arguments_inside_generator.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_return_with_arguments_inside_generator.py |
from sys import version_info
from pyflakes import messages as m
from pyflakes.test.harness import TestCase, skipIf
class Test(TestCase):
@skipIf(version_info >= (3, 3), 'new in Python 3.3')
def test_return(self):
self.flakes('''
class a:
def b():
for x in a.c:
if x:
yield x
return a
''', m.ReturnWithArgsInsideGenerator)
@skipIf(version_info >= (3, 3), 'new in Python 3.3')
def test_returnNone(self):
self.flakes('''
def a():
yield 12
return None
''', m.ReturnWithArgsInsideGenerator)
@skipIf(version_info >= (3, 3), 'new in Python 3.3')
def test_returnYieldExpression(self):
self.flakes('''
def a():
b = yield a
return b
''', m.ReturnWithArgsInsideGenerator)
| 899 | Python | .py | 28 | 22.607143 | 56 | 0.545665 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,322 | test_doctests.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_doctests.py | import sys
import textwrap
from pyflakes import messages as m
from pyflakes.checker import (
DoctestScope,
FunctionScope,
ModuleScope,
)
from pyflakes.test.test_other import Test as TestOther
from pyflakes.test.test_imports import Test as TestImports
from pyflakes.test.test_undefined_names import Test as TestUndefinedNames
from pyflakes.test.harness import TestCase, skip
try:
sys.pypy_version_info
PYPY = True
except AttributeError:
PYPY = False
class _DoctestMixin(object):
withDoctest = True
def doctestify(self, input):
lines = []
for line in textwrap.dedent(input).splitlines():
if line.strip() == '':
pass
elif (line.startswith(' ') or
line.startswith('except:') or
line.startswith('except ') or
line.startswith('finally:') or
line.startswith('else:') or
line.startswith('elif ') or
(lines and lines[-1].startswith(('>>> @', '... @')))):
line = "... %s" % line
else:
line = ">>> %s" % line
lines.append(line)
doctestificator = textwrap.dedent('''\
def doctest_something():
"""
%s
"""
''')
return doctestificator % "\n ".join(lines)
def flakes(self, input, *args, **kw):
return super(_DoctestMixin, self).flakes(self.doctestify(input), *args, **kw)
class Test(TestCase):
withDoctest = True
def test_scope_class(self):
"""Check that a doctest is given a DoctestScope."""
checker = self.flakes("""
m = None
def doctest_stuff():
'''
>>> d = doctest_stuff()
'''
f = m
return f
""")
scopes = checker.deadScopes
module_scopes = [
scope for scope in scopes if scope.__class__ is ModuleScope]
doctest_scopes = [
scope for scope in scopes if scope.__class__ is DoctestScope]
function_scopes = [
scope for scope in scopes if scope.__class__ is FunctionScope]
self.assertEqual(len(module_scopes), 1)
self.assertEqual(len(doctest_scopes), 1)
module_scope = module_scopes[0]
doctest_scope = doctest_scopes[0]
self.assertIsInstance(doctest_scope, DoctestScope)
self.assertIsInstance(doctest_scope, ModuleScope)
self.assertNotIsInstance(doctest_scope, FunctionScope)
self.assertNotIsInstance(module_scope, DoctestScope)
self.assertIn('m', module_scope)
self.assertIn('doctest_stuff', module_scope)
self.assertIn('d', doctest_scope)
self.assertEqual(len(function_scopes), 1)
self.assertIn('f', function_scopes[0])
def test_nested_doctest_ignored(self):
"""Check that nested doctests are ignored."""
checker = self.flakes("""
m = None
def doctest_stuff():
'''
>>> def function_in_doctest():
... \"\"\"
... >>> ignored_undefined_name
... \"\"\"
... df = m
... return df
...
>>> function_in_doctest()
'''
f = m
return f
""")
scopes = checker.deadScopes
module_scopes = [
scope for scope in scopes if scope.__class__ is ModuleScope]
doctest_scopes = [
scope for scope in scopes if scope.__class__ is DoctestScope]
function_scopes = [
scope for scope in scopes if scope.__class__ is FunctionScope]
self.assertEqual(len(module_scopes), 1)
self.assertEqual(len(doctest_scopes), 1)
module_scope = module_scopes[0]
doctest_scope = doctest_scopes[0]
self.assertIn('m', module_scope)
self.assertIn('doctest_stuff', module_scope)
self.assertIn('function_in_doctest', doctest_scope)
self.assertEqual(len(function_scopes), 2)
self.assertIn('f', function_scopes[0])
self.assertIn('df', function_scopes[1])
def test_global_module_scope_pollution(self):
"""Check that global in doctest does not pollute module scope."""
checker = self.flakes("""
def doctest_stuff():
'''
>>> def function_in_doctest():
... global m
... m = 50
... df = 10
... m = df
...
>>> function_in_doctest()
'''
f = 10
return f
""")
scopes = checker.deadScopes
module_scopes = [
scope for scope in scopes if scope.__class__ is ModuleScope]
doctest_scopes = [
scope for scope in scopes if scope.__class__ is DoctestScope]
function_scopes = [
scope for scope in scopes if scope.__class__ is FunctionScope]
self.assertEqual(len(module_scopes), 1)
self.assertEqual(len(doctest_scopes), 1)
module_scope = module_scopes[0]
doctest_scope = doctest_scopes[0]
self.assertIn('doctest_stuff', module_scope)
self.assertIn('function_in_doctest', doctest_scope)
self.assertEqual(len(function_scopes), 2)
self.assertIn('f', function_scopes[0])
self.assertIn('df', function_scopes[1])
self.assertIn('m', function_scopes[1])
self.assertNotIn('m', module_scope)
def test_global_undefined(self):
self.flakes("""
global m
def doctest_stuff():
'''
>>> m
'''
""", m.UndefinedName)
def test_nested_class(self):
"""Doctest within nested class are processed."""
self.flakes("""
class C:
class D:
'''
>>> m
'''
def doctest_stuff(self):
'''
>>> m
'''
return 1
""", m.UndefinedName, m.UndefinedName)
def test_ignore_nested_function(self):
"""Doctest module does not process doctest in nested functions."""
# 'syntax error' would cause a SyntaxError if the doctest was processed.
# However doctest does not find doctest in nested functions
# (https://bugs.python.org/issue1650090). If nested functions were
# processed, this use of m should cause UndefinedName, and the
# name inner_function should probably exist in the doctest scope.
self.flakes("""
def doctest_stuff():
def inner_function():
'''
>>> syntax error
>>> inner_function()
1
>>> m
'''
return 1
m = inner_function()
return m
""")
def test_inaccessible_scope_class(self):
"""Doctest may not access class scope."""
self.flakes("""
class C:
def doctest_stuff(self):
'''
>>> m
'''
return 1
m = 1
""", m.UndefinedName)
def test_importBeforeDoctest(self):
self.flakes("""
import foo
def doctest_stuff():
'''
>>> foo
'''
""")
@skip("todo")
def test_importBeforeAndInDoctest(self):
self.flakes('''
import foo
def doctest_stuff():
"""
>>> import foo
>>> foo
"""
foo
''', m.RedefinedWhileUnused)
def test_importInDoctestAndAfter(self):
self.flakes('''
def doctest_stuff():
"""
>>> import foo
>>> foo
"""
import foo
foo()
''')
def test_offsetInDoctests(self):
exc = self.flakes('''
def doctest_stuff():
"""
>>> x # line 5
"""
''', m.UndefinedName).messages[0]
self.assertEqual(exc.lineno, 5)
self.assertEqual(exc.col, 12)
def test_offsetInLambdasInDoctests(self):
exc = self.flakes('''
def doctest_stuff():
"""
>>> lambda: x # line 5
"""
''', m.UndefinedName).messages[0]
self.assertEqual(exc.lineno, 5)
self.assertEqual(exc.col, 20)
def test_offsetAfterDoctests(self):
exc = self.flakes('''
def doctest_stuff():
"""
>>> x = 5
"""
x
''', m.UndefinedName).messages[0]
self.assertEqual(exc.lineno, 8)
self.assertEqual(exc.col, 0)
def test_syntaxErrorInDoctest(self):
exceptions = self.flakes(
'''
def doctest_stuff():
"""
>>> from # line 4
>>> fortytwo = 42
>>> except Exception:
"""
''',
m.DoctestSyntaxError,
m.DoctestSyntaxError,
m.DoctestSyntaxError).messages
exc = exceptions[0]
self.assertEqual(exc.lineno, 4)
if PYPY:
self.assertEqual(exc.col, 27)
elif sys.version_info >= (3, 8):
self.assertEqual(exc.col, 18)
else:
self.assertEqual(exc.col, 26)
# PyPy error column offset is 0,
# for the second and third line of the doctest
# i.e. at the beginning of the line
exc = exceptions[1]
self.assertEqual(exc.lineno, 5)
if PYPY:
self.assertEqual(exc.col, 14)
else:
self.assertEqual(exc.col, 16)
exc = exceptions[2]
self.assertEqual(exc.lineno, 6)
if PYPY:
self.assertEqual(exc.col, 14)
elif sys.version_info >= (3, 8):
self.assertEqual(exc.col, 13)
else:
self.assertEqual(exc.col, 18)
def test_indentationErrorInDoctest(self):
exc = self.flakes('''
def doctest_stuff():
"""
>>> if True:
... pass
"""
''', m.DoctestSyntaxError).messages[0]
self.assertEqual(exc.lineno, 5)
if PYPY:
self.assertEqual(exc.col, 14)
elif sys.version_info >= (3, 8):
self.assertEqual(exc.col, 13)
else:
self.assertEqual(exc.col, 16)
def test_offsetWithMultiLineArgs(self):
(exc1, exc2) = self.flakes(
'''
def doctest_stuff(arg1,
arg2,
arg3):
"""
>>> assert
>>> this
"""
''',
m.DoctestSyntaxError,
m.UndefinedName).messages
self.assertEqual(exc1.lineno, 6)
if PYPY:
self.assertEqual(exc1.col, 20)
else:
self.assertEqual(exc1.col, 19)
self.assertEqual(exc2.lineno, 7)
self.assertEqual(exc2.col, 12)
def test_doctestCanReferToFunction(self):
self.flakes("""
def foo():
'''
>>> foo
'''
""")
def test_doctestCanReferToClass(self):
self.flakes("""
class Foo():
'''
>>> Foo
'''
def bar(self):
'''
>>> Foo
'''
""")
def test_noOffsetSyntaxErrorInDoctest(self):
exceptions = self.flakes(
'''
def buildurl(base, *args, **kwargs):
"""
>>> buildurl('/blah.php', ('a', '&'), ('b', '=')
'/blah.php?a=%26&b=%3D'
>>> buildurl('/blah.php', a='&', 'b'='=')
'/blah.php?b=%3D&a=%26'
"""
pass
''',
m.DoctestSyntaxError,
m.DoctestSyntaxError).messages
exc = exceptions[0]
self.assertEqual(exc.lineno, 4)
exc = exceptions[1]
self.assertEqual(exc.lineno, 6)
def test_singleUnderscoreInDoctest(self):
self.flakes('''
def func():
"""A docstring
>>> func()
1
>>> _
1
"""
return 1
''')
def test_globalUnderscoreInDoctest(self):
self.flakes("""
from gettext import ugettext as _
def doctest_stuff():
'''
>>> pass
'''
""", m.UnusedImport)
class TestOther(_DoctestMixin, TestOther):
"""Run TestOther with each test wrapped in a doctest."""
class TestImports(_DoctestMixin, TestImports):
"""Run TestImports with each test wrapped in a doctest."""
class TestUndefinedNames(_DoctestMixin, TestUndefinedNames):
"""Run TestUndefinedNames with each test wrapped in a doctest."""
| 13,193 | Python | .py | 390 | 22.412821 | 85 | 0.5044 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,323 | test_checker.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/test/test_checker.py | import ast
import sys
from pyflakes import checker
from pyflakes.test.harness import TestCase, skipIf
class TypeableVisitorTests(TestCase):
"""
Tests of L{_TypeableVisitor}
"""
@staticmethod
def _run_visitor(s):
"""
Run L{_TypeableVisitor} on the parsed source and return the visitor.
"""
tree = ast.parse(s)
visitor = checker._TypeableVisitor()
visitor.visit(tree)
return visitor
def test_node_types(self):
"""
Test that the typeable node types are collected
"""
visitor = self._run_visitor(
"""\
x = 1 # assignment
for x in range(1): pass # for loop
def f(): pass # function definition
with a as b: pass # with statement
"""
)
self.assertEqual(visitor.typeable_lines, [1, 2, 3, 4])
self.assertIsInstance(visitor.typeable_nodes[1], ast.Assign)
self.assertIsInstance(visitor.typeable_nodes[2], ast.For)
self.assertIsInstance(visitor.typeable_nodes[3], ast.FunctionDef)
self.assertIsInstance(visitor.typeable_nodes[4], ast.With)
def test_visitor_recurses(self):
"""
Test the common pitfall of missing `generic_visit` in visitors by
ensuring that nested nodes are reported
"""
visitor = self._run_visitor(
"""\
def f():
x = 1
"""
)
self.assertEqual(visitor.typeable_lines, [1, 2])
self.assertIsInstance(visitor.typeable_nodes[1], ast.FunctionDef)
self.assertIsInstance(visitor.typeable_nodes[2], ast.Assign)
@skipIf(sys.version_info < (3, 5), 'async syntax introduced in py35')
def test_py35_node_types(self):
"""
Test that the PEP 492 node types are collected
"""
visitor = self._run_visitor(
"""\
async def f(): # async def
async for x in y: pass # async for
async with a as b: pass # async with
"""
)
self.assertEqual(visitor.typeable_lines, [1, 2, 3])
self.assertIsInstance(visitor.typeable_nodes[1], ast.AsyncFunctionDef)
self.assertIsInstance(visitor.typeable_nodes[2], ast.AsyncFor)
self.assertIsInstance(visitor.typeable_nodes[3], ast.AsyncWith)
def test_last_node_wins(self):
"""
Test that when two typeable nodes are present on a line, the last
typeable one wins.
"""
visitor = self._run_visitor('x = 1; y = 1')
# detected both assignable nodes
self.assertEqual(visitor.typeable_lines, [1, 1])
# but the assignment to `y` wins
self.assertEqual(visitor.typeable_nodes[1].targets[0].id, 'y')
class CollectTypeCommentsTests(TestCase):
"""
Tests of L{_collect_type_comments}
"""
@staticmethod
def _collect(s):
"""
Run L{_collect_type_comments} on the parsed source and return the
mapping from nodes to comments. The return value is converted to
a set: {(node_type, tuple of comments), ...}
"""
tree = ast.parse(s)
tokens = checker.make_tokens(s)
ret = checker._collect_type_comments(tree, tokens)
return {(type(k), tuple(s for _, s in v)) for k, v in ret.items()}
def test_bytes(self):
"""
Test that the function works for binary source
"""
ret = self._collect(b'x = 1 # type: int')
self.assertSetEqual(ret, {(ast.Assign, ('# type: int',))})
def test_text(self):
"""
Test that the function works for text source
"""
ret = self._collect(u'x = 1 # type: int')
self.assertEqual(ret, {(ast.Assign, ('# type: int',))})
def test_non_type_comment_ignored(self):
"""
Test that a non-type comment is ignored
"""
ret = self._collect('x = 1 # noqa')
self.assertSetEqual(ret, set())
def test_type_comment_before_typeable(self):
"""
Test that a type comment before something typeable is ignored.
"""
ret = self._collect('# type: int\nx = 1')
self.assertSetEqual(ret, set())
def test_type_ignore_comment_ignored(self):
"""
Test that `# type: ignore` comments are not collected.
"""
ret = self._collect('x = 1 # type: ignore')
self.assertSetEqual(ret, set())
def test_type_ignore_with_other_things_ignored(self):
"""
Test that `# type: ignore` comments with more content are also not
collected.
"""
ret = self._collect('x = 1 # type: ignore # noqa')
self.assertSetEqual(ret, set())
ret = self._collect('x = 1 #type:ignore#noqa')
self.assertSetEqual(ret, set())
def test_type_comment_with_extra_still_collected(self):
ret = self._collect('x = 1 # type: int # noqa')
self.assertSetEqual(ret, {(ast.Assign, ('# type: int # noqa',))})
def test_type_comment_without_whitespace(self):
ret = self._collect('x = 1 #type:int')
self.assertSetEqual(ret, {(ast.Assign, ('#type:int',))})
def test_type_comment_starts_with_word_ignore(self):
ret = self._collect('x = 1 # type: ignore[T]')
self.assertSetEqual(ret, set())
def test_last_node_wins(self):
"""
Test that when two typeable nodes are present on a line, the last
typeable one wins.
"""
ret = self._collect('def f(): x = 1 # type: int')
self.assertSetEqual(ret, {(ast.Assign, ('# type: int',))})
def test_function_def_assigned_comments(self):
"""
Test that type comments for function arguments are all attributed to
the function definition.
"""
ret = self._collect(
"""\
def f(
a, # type: int
b, # type: str
):
# type: (...) -> None
pass
"""
)
expected = {(
ast.FunctionDef,
('# type: int', '# type: str', '# type: (...) -> None'),
)}
self.assertSetEqual(ret, expected)
| 6,014 | Python | .py | 164 | 29.02439 | 78 | 0.59523 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,324 | pyflakes.py | DamnWidget_anaconda/anaconda_lib/linting/pyflakes/scripts/pyflakes.py | """
Implementation of the command-line I{pyflakes} tool.
"""
from __future__ import absolute_import
# For backward compatibility
__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
from pyflakes.api import check, checkPath, checkRecursive, iterSourceCode, main
| 287 | Python | .py | 7 | 39.857143 | 79 | 0.763441 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,325 | __main__.py | DamnWidget_anaconda/anaconda_lib/jedi/__main__.py | import sys
from os.path import join, dirname, abspath, isdir
def _start_linter():
"""
This is a pre-alpha API. You're not supposed to use it at all, except for
testing. It will very likely change.
"""
import jedi
if '--debug' in sys.argv:
jedi.set_debug_function()
for path in sys.argv[2:]:
if path.startswith('--'):
continue
if isdir(path):
import fnmatch
import os
paths = []
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, '*.py'):
paths.append(os.path.join(root, filename))
else:
paths = [path]
try:
for p in paths:
for error in jedi.Script(path=p)._analysis():
print(error)
except Exception:
if '--pdb' in sys.argv:
import traceback
traceback.print_exc()
import pdb
pdb.post_mortem()
else:
raise
def _complete():
import jedi
import pdb
if '-d' in sys.argv:
sys.argv.remove('-d')
jedi.set_debug_function()
try:
completions = jedi.Script(sys.argv[2]).complete()
for c in completions:
c.docstring()
c.type
except Exception as e:
print(repr(e))
pdb.post_mortem()
else:
print(completions)
if len(sys.argv) == 2 and sys.argv[1] == 'repl':
# don't want to use __main__ only for repl yet, maybe we want to use it for
# something else. So just use the keyword ``repl`` for now.
print(join(dirname(abspath(__file__)), 'api', 'replstartup.py'))
elif len(sys.argv) > 1 and sys.argv[1] == '_linter':
_start_linter()
elif len(sys.argv) > 1 and sys.argv[1] == '_complete':
_complete()
else:
print('Command not implemented: %s' % sys.argv[1])
| 1,950 | Python | .py | 60 | 23.566667 | 79 | 0.551651 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,326 | parser_utils.py | DamnWidget_anaconda/anaconda_lib/jedi/parser_utils.py | import re
import textwrap
from ast import literal_eval
from inspect import cleandoc
from weakref import WeakKeyDictionary
from parso.python import tree
from parso.cache import parser_cache
from parso import split_lines
_EXECUTE_NODES = {'funcdef', 'classdef', 'import_from', 'import_name', 'test',
'or_test', 'and_test', 'not_test', 'comparison', 'expr',
'xor_expr', 'and_expr', 'shift_expr', 'arith_expr',
'atom_expr', 'term', 'factor', 'power', 'atom'}
_FLOW_KEYWORDS = (
'try', 'except', 'finally', 'else', 'if', 'elif', 'with', 'for', 'while'
)
def get_executable_nodes(node, last_added=False):
"""
For static analysis.
"""
result = []
typ = node.type
if typ == 'name':
next_leaf = node.get_next_leaf()
if last_added is False and node.parent.type != 'param' and next_leaf != '=':
result.append(node)
elif typ == 'expr_stmt':
# I think inferring the statement (and possibly returned arrays),
# should be enough for static analysis.
result.append(node)
for child in node.children:
result += get_executable_nodes(child, last_added=True)
elif typ == 'decorator':
# decorator
if node.children[-2] == ')':
node = node.children[-3]
if node != '(':
result += get_executable_nodes(node)
else:
try:
children = node.children
except AttributeError:
pass
else:
if node.type in _EXECUTE_NODES and not last_added:
result.append(node)
for child in children:
result += get_executable_nodes(child, last_added)
return result
def get_sync_comp_fors(comp_for):
yield comp_for
last = comp_for.children[-1]
while True:
if last.type == 'comp_for':
yield last.children[1] # Ignore the async.
elif last.type == 'sync_comp_for':
yield last
elif not last.type == 'comp_if':
break
last = last.children[-1]
def for_stmt_defines_one_name(for_stmt):
"""
Returns True if only one name is returned: ``for x in y``.
Returns False if the for loop is more complicated: ``for x, z in y``.
:returns: bool
"""
return for_stmt.children[1].type == 'name'
def get_flow_branch_keyword(flow_node, node):
start_pos = node.start_pos
if not (flow_node.start_pos < start_pos <= flow_node.end_pos):
raise ValueError('The node is not part of the flow.')
keyword = None
for i, child in enumerate(flow_node.children):
if start_pos < child.start_pos:
return keyword
first_leaf = child.get_first_leaf()
if first_leaf in _FLOW_KEYWORDS:
keyword = first_leaf
return None
def clean_scope_docstring(scope_node):
""" Returns a cleaned version of the docstring token. """
node = scope_node.get_doc_node()
if node is not None:
# TODO We have to check next leaves until there are no new
# leaves anymore that might be part of the docstring. A
# docstring can also look like this: ``'foo' 'bar'
# Returns a literal cleaned version of the ``Token``.
return cleandoc(safe_literal_eval(node.value))
return ''
def find_statement_documentation(tree_node):
if tree_node.type == 'expr_stmt':
tree_node = tree_node.parent # simple_stmt
maybe_string = tree_node.get_next_sibling()
if maybe_string is not None:
if maybe_string.type == 'simple_stmt':
maybe_string = maybe_string.children[0]
if maybe_string.type == 'string':
return cleandoc(safe_literal_eval(maybe_string.value))
return ''
def safe_literal_eval(value):
first_two = value[:2].lower()
if first_two[0] == 'f' or first_two in ('fr', 'rf'):
# literal_eval is not able to resovle f literals. We have to do that
# manually, but that's right now not implemented.
return ''
return literal_eval(value)
def get_signature(funcdef, width=72, call_string=None,
omit_first_param=False, omit_return_annotation=False):
"""
Generate a string signature of a function.
:param width: Fold lines if a line is longer than this value.
:type width: int
:arg func_name: Override function name when given.
:type func_name: str
:rtype: str
"""
# Lambdas have no name.
if call_string is None:
if funcdef.type == 'lambdef':
call_string = '<lambda>'
else:
call_string = funcdef.name.value
params = funcdef.get_params()
if omit_first_param:
params = params[1:]
p = '(' + ''.join(param.get_code() for param in params).strip() + ')'
# TODO this is pretty bad, we should probably just normalize.
p = re.sub(r'\s+', ' ', p)
if funcdef.annotation and not omit_return_annotation:
rtype = " ->" + funcdef.annotation.get_code()
else:
rtype = ""
code = call_string + p + rtype
return '\n'.join(textwrap.wrap(code, width))
def move(node, line_offset):
"""
Move the `Node` start_pos.
"""
try:
children = node.children
except AttributeError:
node.line += line_offset
else:
for c in children:
move(c, line_offset)
def get_following_comment_same_line(node):
"""
returns (as string) any comment that appears on the same line,
after the node, including the #
"""
try:
if node.type == 'for_stmt':
whitespace = node.children[5].get_first_leaf().prefix
elif node.type == 'with_stmt':
whitespace = node.children[3].get_first_leaf().prefix
elif node.type == 'funcdef':
# actually on the next line
whitespace = node.children[4].get_first_leaf().get_next_leaf().prefix
else:
whitespace = node.get_last_leaf().get_next_leaf().prefix
except AttributeError:
return None
except ValueError:
# TODO in some particular cases, the tree doesn't seem to be linked
# correctly
return None
if "#" not in whitespace:
return None
comment = whitespace[whitespace.index("#"):]
if "\r" in comment:
comment = comment[:comment.index("\r")]
if "\n" in comment:
comment = comment[:comment.index("\n")]
return comment
def is_scope(node):
t = node.type
if t == 'comp_for':
# Starting with Python 3.8, async is outside of the statement.
return node.children[1].type != 'sync_comp_for'
return t in ('file_input', 'classdef', 'funcdef', 'lambdef', 'sync_comp_for')
def _get_parent_scope_cache(func):
cache = WeakKeyDictionary()
def wrapper(parso_cache_node, node, include_flows=False):
if parso_cache_node is None:
return func(node, include_flows)
try:
for_module = cache[parso_cache_node]
except KeyError:
for_module = cache[parso_cache_node] = {}
try:
return for_module[node]
except KeyError:
result = for_module[node] = func(node, include_flows)
return result
return wrapper
def get_parent_scope(node, include_flows=False):
"""
Returns the underlying scope.
"""
scope = node.parent
if scope is None:
return None # It's a module already.
while True:
if is_scope(scope):
if scope.type in ('classdef', 'funcdef', 'lambdef'):
index = scope.children.index(':')
if scope.children[index].start_pos >= node.start_pos:
if node.parent.type == 'param' and node.parent.name == node:
pass
elif node.parent.type == 'tfpdef' and node.parent.children[0] == node:
pass
else:
scope = scope.parent
continue
return scope
elif include_flows and isinstance(scope, tree.Flow):
# The cursor might be on `if foo`, so the parent scope will not be
# the if, but the parent of the if.
if not (scope.type == 'if_stmt'
and any(n.start_pos <= node.start_pos < n.end_pos
for n in scope.get_test_nodes())):
return scope
scope = scope.parent
get_cached_parent_scope = _get_parent_scope_cache(get_parent_scope)
def get_cached_code_lines(grammar, path):
"""
Basically access the cached code lines in parso. This is not the nicest way
to do this, but we avoid splitting all the lines again.
"""
return get_parso_cache_node(grammar, path).lines
def get_parso_cache_node(grammar, path):
"""
This is of course not public. But as long as I control parso, this
shouldn't be a problem. ~ Dave
The reason for this is mostly caching. This is obviously also a sign of a
broken caching architecture.
"""
return parser_cache[grammar._hashed][path]
def cut_value_at_position(leaf, position):
"""
Cuts of the value of the leaf at position
"""
lines = split_lines(leaf.value, keepends=True)[:position[0] - leaf.line + 1]
column = position[1]
if leaf.line == position[0]:
column -= leaf.column
if not lines:
return ''
lines[-1] = lines[-1][:column]
return ''.join(lines)
def expr_is_dotted(node):
"""
Checks if a path looks like `name` or `name.foo.bar` and not `name()`.
"""
if node.type == 'atom':
if len(node.children) == 3 and node.children[0] == '(':
return expr_is_dotted(node.children[1])
return False
if node.type == 'atom_expr':
children = node.children
if children[0] == 'await':
return False
if not expr_is_dotted(children[0]):
return False
# Check trailers
return all(c.children[0] == '.' for c in children[1:])
return node.type == 'name'
def _function_is_x_method(*method_names):
def wrapper(function_node):
"""
This is a heuristic. It will not hold ALL the times, but it will be
correct pretty much for anyone that doesn't try to beat it.
staticmethod/classmethod are builtins and unless overwritten, this will
be correct.
"""
for decorator in function_node.get_decorators():
dotted_name = decorator.children[1]
if dotted_name.get_code() in method_names:
return True
return False
return wrapper
function_is_staticmethod = _function_is_x_method('staticmethod')
function_is_classmethod = _function_is_x_method('classmethod')
function_is_property = _function_is_x_method('property', 'cached_property')
| 10,900 | Python | .py | 283 | 30.385159 | 90 | 0.606497 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,327 | settings.py | DamnWidget_anaconda/anaconda_lib/jedi/settings.py | """
This module contains variables with global |jedi| settings. To change the
behavior of |jedi|, change the variables defined in :mod:`jedi.settings`.
Plugins should expose an interface so that the user can adjust the
configuration.
Example usage::
from jedi import settings
settings.case_insensitive_completion = True
Completion output
~~~~~~~~~~~~~~~~~
.. autodata:: case_insensitive_completion
.. autodata:: add_bracket_after_function
Filesystem cache
~~~~~~~~~~~~~~~~
.. autodata:: cache_directory
Parser
~~~~~~
.. autodata:: fast_parser
Dynamic stuff
~~~~~~~~~~~~~
.. autodata:: dynamic_array_additions
.. autodata:: dynamic_params
.. autodata:: dynamic_params_for_other_modules
.. autodata:: auto_import_modules
Caching
~~~~~~~
.. autodata:: call_signatures_validity
"""
import os
import platform
# ----------------
# Completion Output Settings
# ----------------
case_insensitive_completion = True
"""
Completions are by default case insensitive.
"""
add_bracket_after_function = False
"""
Adds an opening bracket after a function for completions.
"""
# ----------------
# Filesystem Cache
# ----------------
if platform.system().lower() == 'windows':
_cache_directory = os.path.join(
os.getenv('LOCALAPPDATA') or os.path.expanduser('~'),
'Jedi',
'Jedi',
)
elif platform.system().lower() == 'darwin':
_cache_directory = os.path.join('~', 'Library', 'Caches', 'Jedi')
else:
_cache_directory = os.path.join(os.getenv('XDG_CACHE_HOME') or '~/.cache',
'jedi')
cache_directory = os.path.expanduser(_cache_directory)
"""
The path where the cache is stored.
On Linux, this defaults to ``~/.cache/jedi/``, on OS X to
``~/Library/Caches/Jedi/`` and on Windows to ``%LOCALAPPDATA%\\Jedi\\Jedi\\``.
On Linux, if the environment variable ``$XDG_CACHE_HOME`` is set,
``$XDG_CACHE_HOME/jedi`` is used instead of the default one.
"""
# ----------------
# Parser
# ----------------
fast_parser = True
"""
Uses Parso's diff parser. If it is enabled, this might cause issues, please
read the warning on :class:`.Script`. This feature makes it possible to only
parse the parts again that have changed, while reusing the rest of the syntax
tree.
"""
_cropped_file_size = int(10e6) # 1 Megabyte
"""
Jedi gets extremely slow if the file size exceed a few thousand lines.
To avoid getting stuck completely Jedi crops the file at some point.
One megabyte of typical Python code equals about 20'000 lines of code.
"""
# ----------------
# Dynamic Stuff
# ----------------
dynamic_array_additions = True
"""
check for `append`, etc. on arrays: [], {}, () as well as list/set calls.
"""
dynamic_params = True
"""
A dynamic param completion, finds the callees of the function, which define
the params of a function.
"""
dynamic_params_for_other_modules = True
"""
Do the same for other modules.
"""
dynamic_flow_information = True
"""
Check for `isinstance` and other information to infer a type.
"""
auto_import_modules = [
'gi', # This third-party repository (GTK stuff) doesn't really work with jedi
]
"""
Modules that will not be analyzed but imported, if they contain Python code.
This improves autocompletion for libraries that use ``setattr`` or
``globals()`` modifications a lot.
"""
# ----------------
# Caching Validity
# ----------------
call_signatures_validity = 3.0
"""
Finding function calls might be slow (0.1-0.5s). This is not acceptible for
normal writing. Therefore cache it for a short time.
"""
| 3,526 | Python | .py | 115 | 28.556522 | 82 | 0.68535 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,328 | file_io.py | DamnWidget_anaconda/anaconda_lib/jedi/file_io.py | import os
from parso import file_io
class AbstractFolderIO:
def __init__(self, path):
self.path = path
def get_base_name(self):
raise NotImplementedError
def list(self):
raise NotImplementedError
def get_file_io(self, name):
raise NotImplementedError
def get_parent_folder(self):
raise NotImplementedError
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.path)
class FolderIO(AbstractFolderIO):
def get_base_name(self):
return os.path.basename(self.path)
def list(self):
return os.listdir(self.path)
def get_file_io(self, name):
return FileIO(os.path.join(self.path, name))
def get_parent_folder(self):
return FolderIO(os.path.dirname(self.path))
def walk(self):
for root, dirs, files in os.walk(self.path):
root_folder_io = FolderIO(root)
original_folder_ios = [FolderIO(os.path.join(root, d)) for d in dirs]
modified_folder_ios = list(original_folder_ios)
yield (
root_folder_io,
modified_folder_ios,
[FileIO(os.path.join(root, f)) for f in files],
)
modified_iterator = iter(reversed(modified_folder_ios))
current = next(modified_iterator, None)
i = len(original_folder_ios)
for folder_io in reversed(original_folder_ios):
i -= 1 # Basically enumerate but reversed
if current is folder_io:
current = next(modified_iterator, None)
else:
del dirs[i]
class FileIOFolderMixin:
def get_parent_folder(self):
return FolderIO(os.path.dirname(self.path))
class ZipFileIO(file_io.KnownContentFileIO, FileIOFolderMixin):
"""For .zip and .egg archives"""
def __init__(self, path, code, zip_path):
super().__init__(path, code)
self._zip_path = zip_path
def get_last_modified(self):
try:
return os.path.getmtime(self._zip_path)
except (FileNotFoundError, PermissionError, NotADirectoryError):
return None
class FileIO(file_io.FileIO, FileIOFolderMixin):
pass
class KnownContentFileIO(file_io.KnownContentFileIO, FileIOFolderMixin):
pass
| 2,337 | Python | .py | 60 | 29.833333 | 81 | 0.622005 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,329 | utils.py | DamnWidget_anaconda/anaconda_lib/jedi/utils.py | """
Utilities for end-users.
"""
import __main__ # type: ignore[import]
from collections import namedtuple
import logging
import traceback
import re
import os
import sys
from jedi import Interpreter
READLINE_DEBUG = False
def setup_readline(namespace_module=__main__, fuzzy=False):
"""
This function sets up :mod:`readline` to use Jedi in a Python interactive
shell.
If you want to use a custom ``PYTHONSTARTUP`` file (typically
``$HOME/.pythonrc.py``), you can add this piece of code::
try:
from jedi.utils import setup_readline
except ImportError:
# Fallback to the stdlib readline completer if it is installed.
# Taken from http://docs.python.org/2/library/rlcompleter.html
print("Jedi is not installed, falling back to readline")
try:
import readline
import rlcompleter
readline.parse_and_bind("tab: complete")
except ImportError:
print("Readline is not installed either. No tab completion is enabled.")
else:
setup_readline()
This will fallback to the readline completer if Jedi is not installed.
The readline completer will only complete names in the global namespace,
so for example::
ran<TAB>
will complete to ``range``.
With Jedi the following code::
range(10).cou<TAB>
will complete to ``range(10).count``, this does not work with the default
cPython :mod:`readline` completer.
You will also need to add ``export PYTHONSTARTUP=$HOME/.pythonrc.py`` to
your shell profile (usually ``.bash_profile`` or ``.profile`` if you use
bash).
"""
if READLINE_DEBUG:
logging.basicConfig(
filename='/tmp/jedi.log',
filemode='a',
level=logging.DEBUG
)
class JediRL:
def complete(self, text, state):
"""
This complete stuff is pretty weird, a generator would make
a lot more sense, but probably due to backwards compatibility
this is still the way how it works.
The only important part is stuff in the ``state == 0`` flow,
everything else has been copied from the ``rlcompleter`` std.
library module.
"""
if state == 0:
sys.path.insert(0, os.getcwd())
# Calling python doesn't have a path, so add to sys.path.
try:
logging.debug("Start REPL completion: " + repr(text))
interpreter = Interpreter(text, [namespace_module.__dict__])
completions = interpreter.complete(fuzzy=fuzzy)
logging.debug("REPL completions: %s", completions)
self.matches = [
text[:len(text) - c._like_name_length] + c.name_with_symbols
for c in completions
]
except:
logging.error("REPL Completion error:\n" + traceback.format_exc())
raise
finally:
sys.path.pop(0)
try:
return self.matches[state]
except IndexError:
return None
try:
# Need to import this one as well to make sure it's executed before
# this code. This didn't use to be an issue until 3.3. Starting with
# 3.4 this is different, it always overwrites the completer if it's not
# already imported here.
import rlcompleter # noqa: F401
import readline
except ImportError:
print("Jedi: Module readline not available.")
else:
readline.set_completer(JediRL().complete)
readline.parse_and_bind("tab: complete")
# jedi itself does the case matching
readline.parse_and_bind("set completion-ignore-case on")
# because it's easier to hit the tab just once
readline.parse_and_bind("set show-all-if-unmodified")
readline.parse_and_bind("set show-all-if-ambiguous on")
# don't repeat all the things written in the readline all the time
readline.parse_and_bind("set completion-prefix-display-length 2")
# No delimiters, Jedi handles that.
readline.set_completer_delims('')
def version_info():
"""
Returns a namedtuple of Jedi's version, similar to Python's
``sys.version_info``.
"""
Version = namedtuple('Version', 'major, minor, micro')
from jedi import __version__
tupl = re.findall(r'[a-z]+|\d+', __version__)
return Version(*[x if i == 3 else int(x) for i, x in enumerate(tupl)])
| 4,704 | Python | .py | 112 | 32.053571 | 88 | 0.608315 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,330 | cache.py | DamnWidget_anaconda/anaconda_lib/jedi/cache.py | """
This caching is very important for speed and memory optimizations. There's
nothing really spectacular, just some decorators. The following cache types are
available:
- ``time_cache`` can be used to cache something for just a limited time span,
which can be useful if there's user interaction and the user cannot react
faster than a certain time.
This module is one of the reasons why |jedi| is not thread-safe. As you can see
there are global variables, which are holding the cache information. Some of
these variables are being cleaned after every API usage.
"""
import time
from functools import wraps
from typing import Any, Dict, Tuple
from jedi import settings
from parso.cache import parser_cache
_time_caches: Dict[str, Dict[Any, Tuple[float, Any]]] = {}
def clear_time_caches(delete_all: bool = False) -> None:
""" Jedi caches many things, that should be completed after each completion
finishes.
:param delete_all: Deletes also the cache that is normally not deleted,
like parser cache, which is important for faster parsing.
"""
global _time_caches
if delete_all:
for cache in _time_caches.values():
cache.clear()
parser_cache.clear()
else:
# normally just kill the expired entries, not all
for tc in _time_caches.values():
# check time_cache for expired entries
for key, (t, value) in list(tc.items()):
if t < time.time():
# delete expired entries
del tc[key]
def signature_time_cache(time_add_setting):
"""
This decorator works as follows: Call it with a setting and after that
use the function with a callable that returns the key.
But: This function is only called if the key is not available. After a
certain amount of time (`time_add_setting`) the cache is invalid.
If the given key is None, the function will not be cached.
"""
def _temp(key_func):
dct = {}
_time_caches[time_add_setting] = dct
def wrapper(*args, **kwargs):
generator = key_func(*args, **kwargs)
key = next(generator)
try:
expiry, value = dct[key]
if expiry > time.time():
return value
except KeyError:
pass
value = next(generator)
time_add = getattr(settings, time_add_setting)
if key is not None:
dct[key] = time.time() + time_add, value
return value
return wrapper
return _temp
def time_cache(seconds):
def decorator(func):
cache = {}
@wraps(func)
def wrapper(*args, **kwargs):
key = (args, frozenset(kwargs.items()))
try:
created, result = cache[key]
if time.time() < created + seconds:
return result
except KeyError:
pass
result = func(*args, **kwargs)
cache[key] = time.time(), result
return result
wrapper.clear_cache = lambda: cache.clear()
return wrapper
return decorator
def memoize_method(method):
"""A normal memoize function."""
@wraps(method)
def wrapper(self, *args, **kwargs):
cache_dict = self.__dict__.setdefault('_memoize_method_dct', {})
dct = cache_dict.setdefault(method, {})
key = (args, frozenset(kwargs.items()))
try:
return dct[key]
except KeyError:
result = method(self, *args, **kwargs)
dct[key] = result
return result
return wrapper
| 3,674 | Python | .py | 95 | 30.052632 | 79 | 0.616465 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,331 | __init__.py | DamnWidget_anaconda/anaconda_lib/jedi/__init__.py | """
Jedi is a static analysis tool for Python that is typically used in
IDEs/editors plugins. Jedi has a focus on autocompletion and goto
functionality. Other features include refactoring, code search and finding
references.
Jedi has a simple API to work with. There is a reference implementation as a
`VIM-Plugin <https://github.com/davidhalter/jedi-vim>`_. Autocompletion in your
REPL is also possible, IPython uses it natively and for the CPython REPL you
can install it. Jedi is well tested and bugs should be rare.
Here's a simple example of the autocompletion feature:
>>> import jedi
>>> source = '''
... import json
... json.lo'''
>>> script = jedi.Script(source, path='example.py')
>>> script
<Script: 'example.py' ...>
>>> completions = script.complete(3, len('json.lo'))
>>> completions
[<Completion: load>, <Completion: loads>]
>>> print(completions[0].complete)
ad
>>> print(completions[0].name)
load
"""
__version__ = '0.18.1'
from jedi.api import Script, Interpreter, set_debug_function, preload_module
from jedi import settings
from jedi.api.environment import find_virtualenvs, find_system_environments, \
get_default_environment, InvalidPythonEnvironment, create_environment, \
get_system_environment, InterpreterEnvironment
from jedi.api.project import Project, get_default_project
from jedi.api.exceptions import InternalError, RefactoringError
# Finally load the internal plugins. This is only internal.
from jedi.plugins import registry
del registry
| 1,486 | Python | .py | 36 | 39.888889 | 79 | 0.782548 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,332 | debug.py | DamnWidget_anaconda/anaconda_lib/jedi/debug.py | import os
import time
from contextlib import contextmanager
from typing import Callable, Optional
_inited = False
def _lazy_colorama_init():
"""
Lazily init colorama if necessary, not to screw up stdout if debugging is
not enabled.
This version of the function does nothing.
"""
try:
if os.name == 'nt':
# Does not work on Windows, as pyreadline and colorama interfere
raise ImportError
else:
# Use colorama for nicer console output.
from colorama import Fore, init # type: ignore[import]
from colorama import initialise
def _lazy_colorama_init(): # noqa: F811
"""
Lazily init colorama if necessary, not to screw up stdout is
debug not enabled.
This version of the function does init colorama.
"""
global _inited
if not _inited:
# pytest resets the stream at the end - causes troubles. Since
# after every output the stream is reset automatically we don't
# need this.
initialise.atexit_done = True
try:
init(strip=False)
except Exception:
# Colorama fails with initializing under vim and is buggy in
# version 0.3.6.
pass
_inited = True
except ImportError:
class Fore: # type: ignore[no-redef]
RED = ''
GREEN = ''
YELLOW = ''
MAGENTA = ''
RESET = ''
BLUE = ''
NOTICE = object()
WARNING = object()
SPEED = object()
enable_speed = False
enable_warning = False
enable_notice = False
# callback, interface: level, str
debug_function: Optional[Callable[[str, str], None]] = None
_debug_indent = 0
_start_time = time.time()
def reset_time():
global _start_time, _debug_indent
_start_time = time.time()
_debug_indent = 0
def increase_indent(func):
"""Decorator for makin """
def wrapper(*args, **kwargs):
with increase_indent_cm():
return func(*args, **kwargs)
return wrapper
@contextmanager
def increase_indent_cm(title=None, color='MAGENTA'):
global _debug_indent
if title:
dbg('Start: ' + title, color=color)
_debug_indent += 1
try:
yield
finally:
_debug_indent -= 1
if title:
dbg('End: ' + title, color=color)
def dbg(message, *args, color='GREEN'):
""" Looks at the stack, to see if a debug message should be printed. """
assert color
if debug_function and enable_notice:
i = ' ' * _debug_indent
_lazy_colorama_init()
debug_function(color, i + 'dbg: ' + message % tuple(repr(a) for a in args))
def warning(message, *args, format=True):
if debug_function and enable_warning:
i = ' ' * _debug_indent
if format:
message = message % tuple(repr(a) for a in args)
debug_function('RED', i + 'warning: ' + message)
def speed(name):
if debug_function and enable_speed:
now = time.time()
i = ' ' * _debug_indent
debug_function('YELLOW', i + 'speed: ' + '%s %s' % (name, now - _start_time))
def print_to_stdout(color, str_out):
"""
The default debug function that prints to standard out.
:param str color: A string that is an attribute of ``colorama.Fore``.
"""
col = getattr(Fore, color)
_lazy_colorama_init()
print(col + str_out + Fore.RESET)
| 3,504 | Python | .py | 104 | 26.269231 | 85 | 0.602017 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,333 | _compatibility.py | DamnWidget_anaconda/anaconda_lib/jedi/_compatibility.py | """
This module is here to ensure compatibility of Windows/Linux/MacOS and
different Python versions.
"""
import errno
import sys
import pickle
def pickle_load(file):
try:
return pickle.load(file)
# Python on Windows don't throw EOF errors for pipes. So reraise them with
# the correct type, which is caught upwards.
except OSError:
if sys.platform == 'win32':
raise EOFError()
raise
def pickle_dump(data, file, protocol):
try:
pickle.dump(data, file, protocol)
# On Python 3.3 flush throws sometimes an error even though the writing
# operation should be completed.
file.flush()
# Python on Windows don't throw EPIPE errors for pipes. So reraise them with
# the correct type and error number.
except OSError:
if sys.platform == 'win32':
raise IOError(errno.EPIPE, "Broken pipe")
raise
| 918 | Python | .py | 28 | 27.071429 | 80 | 0.67833 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,334 | common.py | DamnWidget_anaconda/anaconda_lib/jedi/common.py | from contextlib import contextmanager
@contextmanager
def monkeypatch(obj, attribute_name, new_value):
"""
Like pytest's monkeypatch, but as a value manager.
"""
old_value = getattr(obj, attribute_name)
try:
setattr(obj, attribute_name, new_value)
yield
finally:
setattr(obj, attribute_name, old_value)
def indent_block(text, indention=' '):
"""This function indents a text block with a default of four spaces."""
temp = ''
while text and text[-1] == '\n':
temp += text[-1]
text = text[:-1]
lines = text.split('\n')
return '\n'.join(map(lambda s: indention + s, lines)) + temp
| 668 | Python | .py | 20 | 28 | 75 | 0.635093 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,335 | ipaddress.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/ipaddress.pyi | from typing import Any, Container, Generic, Iterable, Iterator, Optional, SupportsInt, Text, Tuple, TypeVar, overload
# Undocumented length constants
IPV4LENGTH: int
IPV6LENGTH: int
_A = TypeVar("_A", IPv4Address, IPv6Address)
_N = TypeVar("_N", IPv4Network, IPv6Network)
_T = TypeVar("_T")
def ip_address(address: object) -> Any: ... # morally Union[IPv4Address, IPv6Address]
def ip_network(address: object, strict: bool = ...) -> Any: ... # morally Union[IPv4Network, IPv6Network]
def ip_interface(address: object) -> Any: ... # morally Union[IPv4Interface, IPv6Interface]
class _IPAddressBase:
def __eq__(self, other: Any) -> bool: ...
def __ge__(self: _T, other: _T) -> bool: ...
def __gt__(self: _T, other: _T) -> bool: ...
def __le__(self: _T, other: _T) -> bool: ...
def __lt__(self: _T, other: _T) -> bool: ...
def __ne__(self, other: Any) -> bool: ...
@property
def compressed(self) -> Text: ...
@property
def exploded(self) -> Text: ...
@property
def reverse_pointer(self) -> Text: ...
@property
def version(self) -> int: ...
class _BaseAddress(_IPAddressBase, SupportsInt):
def __init__(self, address: object) -> None: ...
def __add__(self: _T, other: int) -> _T: ...
def __hash__(self) -> int: ...
def __int__(self) -> int: ...
def __sub__(self: _T, other: int) -> _T: ...
@property
def is_global(self) -> bool: ...
@property
def is_link_local(self) -> bool: ...
@property
def is_loopback(self) -> bool: ...
@property
def is_multicast(self) -> bool: ...
@property
def is_private(self) -> bool: ...
@property
def is_reserved(self) -> bool: ...
@property
def is_unspecified(self) -> bool: ...
@property
def max_prefixlen(self) -> int: ...
@property
def packed(self) -> bytes: ...
class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]):
network_address: _A
netmask: _A
def __init__(self, address: object, strict: bool = ...) -> None: ...
def __contains__(self, other: Any) -> bool: ...
def __getitem__(self, n: int) -> _A: ...
def __iter__(self) -> Iterator[_A]: ...
def address_exclude(self: _T, other: _T) -> Iterator[_T]: ...
@property
def broadcast_address(self) -> _A: ...
def compare_networks(self: _T, other: _T) -> int: ...
def hosts(self) -> Iterator[_A]: ...
@property
def is_global(self) -> bool: ...
@property
def is_link_local(self) -> bool: ...
@property
def is_loopback(self) -> bool: ...
@property
def is_multicast(self) -> bool: ...
@property
def is_private(self) -> bool: ...
@property
def is_reserved(self) -> bool: ...
@property
def is_unspecified(self) -> bool: ...
@property
def max_prefixlen(self) -> int: ...
@property
def num_addresses(self) -> int: ...
def overlaps(self, other: _BaseNetwork[_A]) -> bool: ...
@property
def prefixlen(self) -> int: ...
def subnets(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> Iterator[_T]: ...
def supernet(self: _T, prefixlen_diff: int = ..., new_prefix: Optional[int] = ...) -> _T: ...
@property
def with_hostmask(self) -> Text: ...
@property
def with_netmask(self) -> Text: ...
@property
def with_prefixlen(self) -> Text: ...
@property
def hostmask(self) -> _A: ...
class _BaseInterface(_BaseAddress, Generic[_A, _N]):
hostmask: _A
netmask: _A
network: _N
@property
def ip(self) -> _A: ...
@property
def with_hostmask(self) -> Text: ...
@property
def with_netmask(self) -> Text: ...
@property
def with_prefixlen(self) -> Text: ...
class IPv4Address(_BaseAddress): ...
class IPv4Network(_BaseNetwork[IPv4Address]): ...
class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ...
class IPv6Address(_BaseAddress):
@property
def ipv4_mapped(self) -> Optional[IPv4Address]: ...
@property
def is_site_local(self) -> bool: ...
@property
def sixtofour(self) -> Optional[IPv4Address]: ...
@property
def teredo(self) -> Optional[Tuple[IPv4Address, IPv4Address]]: ...
class IPv6Network(_BaseNetwork[IPv6Address]):
@property
def is_site_local(self) -> bool: ...
class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ...
def v4_int_to_packed(address: int) -> bytes: ...
def v6_int_to_packed(address: int) -> bytes: ...
@overload
def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ...
@overload
def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ...
def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ...
@overload
def get_mixed_type_key(obj: _A) -> Tuple[int, _A]: ...
@overload
def get_mixed_type_key(obj: IPv4Network) -> Tuple[int, IPv4Address, IPv4Address]: ...
@overload
def get_mixed_type_key(obj: IPv6Network) -> Tuple[int, IPv6Address, IPv6Address]: ...
class AddressValueError(ValueError): ...
class NetmaskValueError(ValueError): ...
| 5,107 | Python | .py | 135 | 33.77037 | 117 | 0.6219 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,336 | pymssql.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/pymssql.pyi | from datetime import date, datetime, time
from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple, Union
Scalar = Union[int, float, str, datetime, date, time]
Result = Union[Tuple[Scalar, ...], Dict[str, Scalar]]
class Connection(object):
def __init__(self, user, password, host, database, timeout, login_timeout, charset, as_dict) -> None: ...
def autocommit(self, status: bool) -> None: ...
def close(self) -> None: ...
def commit(self) -> None: ...
def cursor(self) -> Cursor: ...
def rollback(self) -> None: ...
class Cursor(object):
def __init__(self) -> None: ...
def __iter__(self): ...
def __next__(self) -> Any: ...
def callproc(self, procname: str, **kwargs) -> None: ...
def close(self) -> None: ...
def execute(self, stmt: str, params: Optional[Union[Scalar, Tuple[Scalar, ...], Dict[str, Scalar]]]) -> None: ...
def executemany(self, stmt: str, params: Optional[Sequence[Tuple[Scalar, ...]]]) -> None: ...
def fetchall(self) -> List[Result]: ...
def fetchmany(self, size: Optional[int]) -> List[Result]: ...
def fetchone(self) -> Result: ...
def connect(
server: Optional[str],
user: Optional[str],
password: Optional[str],
database: Optional[str],
timeout: Optional[int],
login_timeout: Optional[int],
charset: Optional[str],
as_dict: Optional[bool],
host: Optional[str],
appname: Optional[str],
port: Optional[str],
conn_properties: Optional[Union[str, Sequence[str]]],
autocommit: Optional[bool],
tds_version: Optional[str],
) -> Connection: ...
def get_max_connections() -> int: ...
def set_max_connections(n: int) -> None: ...
| 1,685 | Python | .py | 40 | 38.025 | 117 | 0.636197 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,337 | enum.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/enum.pyi | import sys
from abc import ABCMeta
from typing import Any, Dict, Iterator, List, Mapping, Type, TypeVar, Union
_T = TypeVar("_T")
_S = TypeVar("_S", bound=Type[Enum])
# Note: EnumMeta actually subclasses type directly, not ABCMeta.
# This is a temporary workaround to allow multiple creation of enums with builtins
# such as str as mixins, which due to the handling of ABCs of builtin types, cause
# spurious inconsistent metaclass structure. See #1595.
# Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself
class EnumMeta(ABCMeta):
def __iter__(self: Type[_T]) -> Iterator[_T]: ...
def __reversed__(self: Type[_T]) -> Iterator[_T]: ...
def __contains__(self: Type[_T], member: object) -> bool: ...
def __getitem__(self: Type[_T], name: str) -> _T: ...
@property
def __members__(self: Type[_T]) -> Mapping[str, _T]: ...
def __len__(self) -> int: ...
class Enum(metaclass=EnumMeta):
name: str
value: Any
_name_: str
_value_: Any
_member_names_: List[str] # undocumented
_member_map_: Dict[str, Enum] # undocumented
_value2member_map_: Dict[int, Enum] # undocumented
if sys.version_info >= (3, 7):
_ignore_: Union[str, List[str]]
_order_: str
__order__: str
@classmethod
def _missing_(cls, value: object) -> Any: ...
@staticmethod
def _generate_next_value_(name: str, start: int, count: int, last_values: List[Any]) -> Any: ...
def __new__(cls: Type[_T], value: object) -> _T: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
def __dir__(self) -> List[str]: ...
def __format__(self, format_spec: str) -> str: ...
def __hash__(self) -> Any: ...
def __reduce_ex__(self, proto: object) -> Any: ...
class IntEnum(int, Enum):
value: int
def unique(enumeration: _S) -> _S: ...
_auto_null: Any
# subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto()
class auto(IntFlag):
value: Any
class Flag(Enum):
def __contains__(self: _T, other: _T) -> bool: ...
def __repr__(self) -> str: ...
def __str__(self) -> str: ...
def __bool__(self) -> bool: ...
def __or__(self: _T, other: _T) -> _T: ...
def __and__(self: _T, other: _T) -> _T: ...
def __xor__(self: _T, other: _T) -> _T: ...
def __invert__(self: _T) -> _T: ...
class IntFlag(int, Flag):
def __or__(self: _T, other: Union[int, _T]) -> _T: ...
def __and__(self: _T, other: Union[int, _T]) -> _T: ...
def __xor__(self: _T, other: Union[int, _T]) -> _T: ...
__ror__ = __or__
__rand__ = __and__
__rxor__ = __xor__
| 2,643 | Python | .py | 64 | 37.28125 | 106 | 0.58249 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,338 | pathlib2.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/pathlib2.pyi | import os
import sys
from _typeshed import OpenBinaryMode, OpenBinaryModeReading, OpenBinaryModeUpdating, OpenBinaryModeWriting, OpenTextMode
from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper
from types import TracebackType
from typing import IO, Any, BinaryIO, Generator, List, Optional, Sequence, Text, TextIO, Tuple, Type, TypeVar, Union, overload
from typing_extensions import Literal
_P = TypeVar("_P", bound=PurePath)
_PurePathBase = object
_PathLike = PurePath
class PurePath(_PurePathBase):
parts: Tuple[str, ...]
drive: str
root: str
anchor: str
name: str
suffix: str
suffixes: List[str]
stem: str
def __new__(cls: Type[_P], *args: Union[str, _PathLike]) -> _P: ...
def __hash__(self) -> int: ...
def __lt__(self, other: PurePath) -> bool: ...
def __le__(self, other: PurePath) -> bool: ...
def __gt__(self, other: PurePath) -> bool: ...
def __ge__(self, other: PurePath) -> bool: ...
def __truediv__(self: _P, key: Union[str, _PathLike]) -> _P: ...
def __rtruediv__(self: _P, key: Union[str, _PathLike]) -> _P: ...
def __div__(self: _P, key: Union[str, PurePath]) -> _P: ...
def __bytes__(self) -> bytes: ...
def as_posix(self) -> str: ...
def as_uri(self) -> str: ...
def is_absolute(self) -> bool: ...
def is_reserved(self) -> bool: ...
def match(self, path_pattern: str) -> bool: ...
def relative_to(self: _P, *other: Union[str, _PathLike]) -> _P: ...
def with_name(self: _P, name: str) -> _P: ...
def with_suffix(self: _P, suffix: str) -> _P: ...
def joinpath(self: _P, *other: Union[str, _PathLike]) -> _P: ...
@property
def parents(self: _P) -> Sequence[_P]: ...
@property
def parent(self: _P) -> _P: ...
class PurePosixPath(PurePath): ...
class PureWindowsPath(PurePath): ...
class Path(PurePath):
def __new__(cls: Type[_P], *args: Union[str, _PathLike], **kwargs: Any) -> _P: ...
def __enter__(self) -> Path: ...
def __exit__(
self, exc_type: Optional[Type[BaseException]], exc_value: Optional[BaseException], traceback: Optional[TracebackType]
) -> Optional[bool]: ...
@classmethod
def cwd(cls: Type[_P]) -> _P: ...
def stat(self) -> os.stat_result: ...
def chmod(self, mode: int) -> None: ...
def exists(self) -> bool: ...
def glob(self, pattern: str) -> Generator[Path, None, None]: ...
def group(self) -> str: ...
def is_dir(self) -> bool: ...
def is_file(self) -> bool: ...
def is_symlink(self) -> bool: ...
def is_socket(self) -> bool: ...
def is_fifo(self) -> bool: ...
def is_block_device(self) -> bool: ...
def is_char_device(self) -> bool: ...
def iterdir(self) -> Generator[Path, None, None]: ...
def lchmod(self, mode: int) -> None: ...
def lstat(self) -> os.stat_result: ...
def mkdir(self, mode: int = ..., parents: bool = ...) -> None: ...
# Adapted from _io.open
def open(
self,
mode: Text = ...,
buffering: int = ...,
encoding: Optional[Text] = ...,
errors: Optional[Text] = ...,
newline: Optional[Text] = ...,
) -> IO[Any]: ...
def owner(self) -> str: ...
def rename(self, target: Union[str, PurePath]) -> None: ...
def replace(self, target: Union[str, PurePath]) -> None: ...
def resolve(self: _P) -> _P: ...
def rglob(self, pattern: str) -> Generator[Path, None, None]: ...
def rmdir(self) -> None: ...
def symlink_to(self, target: Union[str, Path], target_is_directory: bool = ...) -> None: ...
def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ...
def unlink(self) -> None: ...
@classmethod
def home(cls: Type[_P]) -> _P: ...
def absolute(self: _P) -> _P: ...
def expanduser(self: _P) -> _P: ...
def read_bytes(self) -> bytes: ...
def read_text(self, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> str: ...
def samefile(self, other_path: Union[str, bytes, int, Path]) -> bool: ...
def write_bytes(self, data: bytes) -> int: ...
def write_text(self, data: str, encoding: Optional[str] = ..., errors: Optional[str] = ...) -> int: ...
class PosixPath(Path, PurePosixPath): ...
class WindowsPath(Path, PureWindowsPath): ...
| 4,283 | Python | .py | 97 | 39.463918 | 126 | 0.585885 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,339 | util.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/util.pyi | from typing import Any, Dict
xrange: Any
class ObjectDict(Dict[Any, Any]):
def __getattr__(self, name): ...
def __setattr__(self, name, value): ...
class GzipDecompressor:
decompressobj: Any
def __init__(self) -> None: ...
def decompress(self, value, max_length=...): ...
@property
def unconsumed_tail(self): ...
def flush(self): ...
unicode_type: Any
basestring_type: Any
def import_object(name): ...
bytes_type: Any
def errno_from_exception(e): ...
class Configurable:
def __new__(cls, *args, **kwargs): ...
@classmethod
def configurable_base(cls): ...
@classmethod
def configurable_default(cls): ...
def initialize(self): ...
@classmethod
def configure(cls, impl, **kwargs): ...
@classmethod
def configured_class(cls): ...
class ArgReplacer:
name: Any
arg_pos: Any
def __init__(self, func, name) -> None: ...
def get_old_value(self, args, kwargs, default=...): ...
def replace(self, new_value, args, kwargs): ...
def timedelta_to_seconds(td): ...
def doctests(): ...
| 1,072 | Python | .py | 36 | 25.944444 | 59 | 0.635478 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,340 | process.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/process.pyi | from typing import Any, Optional
long = int
CalledProcessError: Any
def cpu_count() -> int: ...
def fork_processes(num_processes, max_restarts: int = ...) -> Optional[int]: ...
def task_id() -> int: ...
class Subprocess:
STREAM: Any = ...
io_loop: Any = ...
stdin: Any = ...
stdout: Any = ...
stderr: Any = ...
proc: Any = ...
returncode: Any = ...
def __init__(self, *args, **kwargs) -> None: ...
def set_exit_callback(self, callback): ...
def wait_for_exit(self, raise_error: bool = ...): ...
@classmethod
def initialize(cls, io_loop: Optional[Any] = ...): ...
@classmethod
def uninitialize(cls): ...
| 662 | Python | .py | 21 | 27.714286 | 80 | 0.584639 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,341 | locks.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/locks.pyi | from typing import Any, Optional
class _TimeoutGarbageCollector:
def __init__(self): ...
class Condition(_TimeoutGarbageCollector):
io_loop: Any
def __init__(self): ...
def wait(self, timeout: Optional[Any] = ...): ...
def notify(self, n: int = ...): ...
def notify_all(self): ...
class Event:
def __init__(self): ...
def is_set(self): ...
def set(self): ...
def clear(self): ...
def wait(self, timeout: Optional[Any] = ...): ...
class _ReleasingContextManager:
def __init__(self, obj): ...
def __enter__(self): ...
def __exit__(self, exc_type, exc_val, exc_tb): ...
class Semaphore(_TimeoutGarbageCollector):
def __init__(self, value: int = ...): ...
def release(self): ...
def acquire(self, timeout: Optional[Any] = ...): ...
def __enter__(self): ...
__exit__: Any
def __aenter__(self): ...
def __aexit__(self, typ, value, tb): ...
class BoundedSemaphore(Semaphore):
def __init__(self, value: int = ...): ...
def release(self): ...
class Lock:
def __init__(self): ...
def acquire(self, timeout: Optional[Any] = ...): ...
def release(self): ...
def __enter__(self): ...
__exit__: Any
def __aenter__(self): ...
def __aexit__(self, typ, value, tb): ...
| 1,279 | Python | .py | 38 | 29.315789 | 56 | 0.559968 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,342 | httputil.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/httputil.pyi | from typing import Any, Dict, List, NamedTuple, Optional
from tornado.util import ObjectDict
class SSLError(Exception): ...
class _NormalizedHeaderCache(Dict[Any, Any]):
size: Any
queue: Any
def __init__(self, size) -> None: ...
def __missing__(self, key): ...
class HTTPHeaders(Dict[Any, Any]):
def __init__(self, *args, **kwargs) -> None: ...
def add(self, name, value): ...
def get_list(self, name): ...
def get_all(self): ...
def parse_line(self, line): ...
@classmethod
def parse(cls, headers): ...
def __setitem__(self, name, value): ...
def __getitem__(self, name): ...
def __delitem__(self, name): ...
def __contains__(self, name): ...
def get(self, name, default=...): ...
def update(self, *args, **kwargs): ...
def copy(self): ...
__copy__: Any
def __deepcopy__(self, memo_dict): ...
class HTTPServerRequest:
path: str
query: str
method: Optional[str]
uri: Optional[str]
version: str
headers: HTTPHeaders
body: bytes
remote_ip: Any
protocol: Any
host: str
files: Dict[str, List[HTTPFile]]
connection: Optional[HTTPConnection]
arguments: Dict[str, List[bytes]]
query_arguments: Dict[str, List[bytes]]
body_arguments: Dict[str, List[bytes]]
def __init__(
self, method=..., uri=..., version=..., headers=..., body=..., host=..., files=..., connection=..., start_line=...
) -> None: ...
def supports_http_1_1(self): ...
@property
def cookies(self): ...
def write(self, chunk, callback=...): ...
def finish(self): ...
def full_url(self): ...
def request_time(self): ...
def get_ssl_certificate(self, binary_form=...): ...
class HTTPInputError(Exception): ...
class HTTPOutputError(Exception): ...
class HTTPServerConnectionDelegate:
def start_request(self, server_conn, request_conn): ...
def on_close(self, server_conn): ...
class HTTPMessageDelegate:
def headers_received(self, start_line, headers): ...
def data_received(self, chunk): ...
def finish(self): ...
def on_connection_close(self): ...
class HTTPConnection:
def write_headers(self, start_line, headers, chunk=..., callback=...): ...
def write(self, chunk, callback=...): ...
def finish(self): ...
def url_concat(url, args): ...
class HTTPFile(ObjectDict): ...
def parse_body_arguments(content_type, body, arguments, files, headers=...): ...
def parse_multipart_form_data(boundary, data, arguments, files): ...
def format_timestamp(ts): ...
class RequestStartLine(NamedTuple):
method: str
path: str
version: str
def parse_request_start_line(line): ...
class ResponseStartLine(NamedTuple):
version: str
code: str
reason: str
def parse_response_start_line(line): ...
def doctests(): ...
def split_host_and_port(netloc): ...
| 2,853 | Python | .py | 83 | 30.192771 | 122 | 0.635076 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,343 | ioloop.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/ioloop.pyi | from typing import Any
from tornado.util import Configurable
signal: Any
class TimeoutError(Exception): ...
class IOLoop(Configurable):
NONE: Any
READ: Any
WRITE: Any
ERROR: Any
@staticmethod
def instance(): ...
@staticmethod
def initialized(): ...
def install(self): ...
@staticmethod
def clear_instance(): ...
@staticmethod
def current(instance=...): ...
def make_current(self): ...
@staticmethod
def clear_current(): ...
@classmethod
def configurable_base(cls): ...
@classmethod
def configurable_default(cls): ...
def initialize(self, make_current=...): ...
def close(self, all_fds=...): ...
def add_handler(self, fd, handler, events): ...
def update_handler(self, fd, events): ...
def remove_handler(self, fd): ...
def set_blocking_signal_threshold(self, seconds, action): ...
def set_blocking_log_threshold(self, seconds): ...
def log_stack(self, signal, frame): ...
def start(self): ...
def stop(self): ...
def run_sync(self, func, timeout=...): ...
def time(self): ...
def add_timeout(self, deadline, callback, *args, **kwargs): ...
def call_later(self, delay, callback, *args, **kwargs): ...
def call_at(self, when, callback, *args, **kwargs): ...
def remove_timeout(self, timeout): ...
def add_callback(self, callback, *args, **kwargs): ...
def add_callback_from_signal(self, callback, *args, **kwargs): ...
def spawn_callback(self, callback, *args, **kwargs): ...
def add_future(self, future, callback): ...
def handle_callback_exception(self, callback): ...
def split_fd(self, fd): ...
def close_fd(self, fd): ...
class PollIOLoop(IOLoop):
time_func: Any
def initialize(self, impl, time_func=..., **kwargs): ...
def close(self, all_fds=...): ...
def add_handler(self, fd, handler, events): ...
def update_handler(self, fd, events): ...
def remove_handler(self, fd): ...
def set_blocking_signal_threshold(self, seconds, action): ...
def start(self): ...
def stop(self): ...
def time(self): ...
def call_at(self, deadline, callback, *args, **kwargs): ...
def remove_timeout(self, timeout): ...
def add_callback(self, callback, *args, **kwargs): ...
def add_callback_from_signal(self, callback, *args, **kwargs): ...
class _Timeout:
deadline: Any
callback: Any
tiebreaker: Any
def __init__(self, deadline, callback, io_loop) -> None: ...
def __lt__(self, other): ...
def __le__(self, other): ...
class PeriodicCallback:
callback: Any
callback_time: Any
io_loop: Any
def __init__(self, callback, callback_time, io_loop=...) -> None: ...
def start(self): ...
def stop(self): ...
def is_running(self): ...
| 2,798 | Python | .py | 78 | 31.192308 | 73 | 0.618504 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,344 | gen.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/gen.pyi | from typing import Any, Dict, NamedTuple, Tuple
singledispatch: Any
class KeyReuseError(Exception): ...
class UnknownKeyError(Exception): ...
class LeakedCallbackError(Exception): ...
class BadYieldError(Exception): ...
class ReturnValueIgnoredError(Exception): ...
class TimeoutError(Exception): ...
def engine(func): ...
def coroutine(func, replace_callback=...): ...
class Return(Exception):
value: Any
def __init__(self, value=...) -> None: ...
class WaitIterator:
current_index: Any
def __init__(self, *args, **kwargs) -> None: ...
def done(self): ...
def next(self): ...
class YieldPoint:
def start(self, runner): ...
def is_ready(self): ...
def get_result(self): ...
class Callback(YieldPoint):
key: Any
def __init__(self, key) -> None: ...
runner: Any
def start(self, runner): ...
def is_ready(self): ...
def get_result(self): ...
class Wait(YieldPoint):
key: Any
def __init__(self, key) -> None: ...
runner: Any
def start(self, runner): ...
def is_ready(self): ...
def get_result(self): ...
class WaitAll(YieldPoint):
keys: Any
def __init__(self, keys) -> None: ...
runner: Any
def start(self, runner): ...
def is_ready(self): ...
def get_result(self): ...
def Task(func, *args, **kwargs): ...
class YieldFuture(YieldPoint):
future: Any
io_loop: Any
def __init__(self, future, io_loop=...) -> None: ...
runner: Any
key: Any
result_fn: Any
def start(self, runner): ...
def is_ready(self): ...
def get_result(self): ...
class Multi(YieldPoint):
keys: Any
children: Any
unfinished_children: Any
quiet_exceptions: Any
def __init__(self, children, quiet_exceptions=...) -> None: ...
def start(self, runner): ...
def is_ready(self): ...
def get_result(self): ...
def multi_future(children, quiet_exceptions=...): ...
def maybe_future(x): ...
def with_timeout(timeout, future, io_loop=..., quiet_exceptions=...): ...
def sleep(duration): ...
moment: Any
class Runner:
gen: Any
result_future: Any
future: Any
yield_point: Any
pending_callbacks: Any
results: Any
running: Any
finished: Any
had_exception: Any
io_loop: Any
stack_context_deactivate: Any
def __init__(self, gen, result_future, first_yielded) -> None: ...
def register_callback(self, key): ...
def is_ready(self, key): ...
def set_result(self, key, result): ...
def pop_result(self, key): ...
def run(self): ...
def handle_yield(self, yielded): ...
def result_callback(self, key): ...
def handle_exception(self, typ, value, tb): ...
class Arguments(NamedTuple):
args: Tuple[str, ...]
kwargs: Dict[str, Any]
def convert_yielded(yielded): ...
| 2,785 | Python | .py | 93 | 25.924731 | 73 | 0.628037 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,345 | httpclient.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/httpclient.pyi | from typing import Any
from tornado.util import Configurable
class HTTPClient:
def __init__(self, async_client_class=..., **kwargs) -> None: ...
def __del__(self): ...
def close(self): ...
def fetch(self, request, **kwargs): ...
class AsyncHTTPClient(Configurable):
@classmethod
def configurable_base(cls): ...
@classmethod
def configurable_default(cls): ...
def __new__(cls, io_loop=..., force_instance=..., **kwargs): ...
io_loop: Any
defaults: Any
def initialize(self, io_loop, defaults=...): ...
def close(self): ...
def fetch(self, request, callback=..., raise_error=..., **kwargs): ...
def fetch_impl(self, request, callback): ...
@classmethod
def configure(cls, impl, **kwargs): ...
class HTTPRequest:
proxy_host: Any
proxy_port: Any
proxy_username: Any
proxy_password: Any
url: Any
method: Any
body_producer: Any
auth_username: Any
auth_password: Any
auth_mode: Any
connect_timeout: Any
request_timeout: Any
follow_redirects: Any
max_redirects: Any
user_agent: Any
decompress_response: Any
network_interface: Any
streaming_callback: Any
header_callback: Any
prepare_curl_callback: Any
allow_nonstandard_methods: Any
validate_cert: Any
ca_certs: Any
allow_ipv6: Any
client_key: Any
client_cert: Any
ssl_options: Any
expect_100_continue: Any
start_time: Any
def __init__(
self,
url,
method=...,
headers=...,
body=...,
auth_username=...,
auth_password=...,
auth_mode=...,
connect_timeout=...,
request_timeout=...,
if_modified_since=...,
follow_redirects=...,
max_redirects=...,
user_agent=...,
use_gzip=...,
network_interface=...,
streaming_callback=...,
header_callback=...,
prepare_curl_callback=...,
proxy_host=...,
proxy_port=...,
proxy_username=...,
proxy_password=...,
allow_nonstandard_methods=...,
validate_cert=...,
ca_certs=...,
allow_ipv6=...,
client_key=...,
client_cert=...,
body_producer=...,
expect_100_continue=...,
decompress_response=...,
ssl_options=...,
) -> None: ...
@property
def headers(self): ...
@headers.setter
def headers(self, value): ...
@property
def body(self): ...
@body.setter
def body(self, value): ...
class HTTPResponse:
request: Any
code: Any
reason: Any
headers: Any
buffer: Any
effective_url: Any
error: Any
request_time: Any
time_info: Any
def __init__(
self, request, code, headers=..., buffer=..., effective_url=..., error=..., request_time=..., time_info=..., reason=...
) -> None: ...
body: bytes
def rethrow(self): ...
class HTTPError(Exception):
code: Any
response: Any
def __init__(self, code, message=..., response=...) -> None: ...
class _RequestProxy:
request: Any
defaults: Any
def __init__(self, request, defaults) -> None: ...
def __getattr__(self, name): ...
def main(): ...
| 3,219 | Python | .py | 119 | 21.142857 | 127 | 0.576973 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,346 | tcpserver.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/tcpserver.pyi | from typing import Any
ssl: Any
class TCPServer:
io_loop: Any
ssl_options: Any
max_buffer_size: Any
read_chunk_size: Any
def __init__(self, io_loop=..., ssl_options=..., max_buffer_size=..., read_chunk_size=...) -> None: ...
def listen(self, port, address=...): ...
def add_sockets(self, sockets): ...
def add_socket(self, socket): ...
def bind(self, port, address=..., family=..., backlog=...): ...
def start(self, num_processes=...): ...
def stop(self): ...
def handle_stream(self, stream, address): ...
| 556 | Python | .py | 15 | 32.733333 | 107 | 0.597403 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,347 | httpserver.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/httpserver.pyi | from typing import Any
from tornado import httputil
from tornado.tcpserver import TCPServer
from tornado.util import Configurable
class HTTPServer(TCPServer, Configurable, httputil.HTTPServerConnectionDelegate):
def __init__(self, *args, **kwargs) -> None: ...
request_callback: Any
no_keep_alive: Any
xheaders: Any
protocol: Any
conn_params: Any
def initialize(
self,
request_callback,
no_keep_alive=...,
io_loop=...,
xheaders=...,
ssl_options=...,
protocol=...,
decompress_request=...,
chunk_size=...,
max_header_size=...,
idle_connection_timeout=...,
body_timeout=...,
max_body_size=...,
max_buffer_size=...,
): ...
@classmethod
def configurable_base(cls): ...
@classmethod
def configurable_default(cls): ...
def close_all_connections(self): ...
def handle_stream(self, stream, address): ...
def start_request(self, server_conn, request_conn): ...
def on_close(self, server_conn): ...
class _HTTPRequestContext:
address: Any
protocol: Any
address_family: Any
remote_ip: Any
def __init__(self, stream, address, protocol) -> None: ...
class _ServerRequestAdapter(httputil.HTTPMessageDelegate):
server: Any
connection: Any
request: Any
delegate: Any
def __init__(self, server, server_conn, request_conn) -> None: ...
def headers_received(self, start_line, headers): ...
def data_received(self, chunk): ...
def finish(self): ...
def on_connection_close(self): ...
HTTPRequest: Any
| 1,617 | Python | .py | 52 | 25.538462 | 81 | 0.637179 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,348 | web.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/web.pyi | import sys
from typing import Any, Callable, Dict, List, Optional
from tornado import httputil
MIN_SUPPORTED_SIGNED_VALUE_VERSION: Any
MAX_SUPPORTED_SIGNED_VALUE_VERSION: Any
DEFAULT_SIGNED_VALUE_VERSION: Any
DEFAULT_SIGNED_VALUE_MIN_VERSION: Any
if sys.version_info >= (3, 5):
from typing import Awaitable
_MethodType = Callable[..., Optional[Awaitable[None]]]
else:
_MethodType = Callable[..., Any]
class RequestHandler:
SUPPORTED_METHODS: Any
application: Application
request: httputil.HTTPServerRequest
path_args: List[str]
path_kwargs: Dict[str, str]
ui: Any
def __init__(self, application, request, **kwargs) -> None: ...
initialize: Callable[..., None] = ...
@property
def settings(self): ...
head: _MethodType
get: _MethodType
post: _MethodType
delete: _MethodType
patch: _MethodType
put: _MethodType
options: _MethodType
def prepare(self): ...
def on_finish(self): ...
def on_connection_close(self): ...
def clear(self): ...
def set_default_headers(self): ...
def set_status(self, status_code, reason=...): ...
def get_status(self): ...
def set_header(self, name, value): ...
def add_header(self, name, value): ...
def clear_header(self, name): ...
def get_argument(self, name, default=..., strip=...): ...
def get_arguments(self, name, strip=...): ...
def get_body_argument(self, name, default=..., strip=...): ...
def get_body_arguments(self, name, strip=...): ...
def get_query_argument(self, name, default=..., strip=...): ...
def get_query_arguments(self, name, strip=...): ...
def decode_argument(self, value, name=...): ...
@property
def cookies(self): ...
def get_cookie(self, name, default=...): ...
def set_cookie(self, name, value, domain=..., expires=..., path=..., expires_days=..., **kwargs): ...
def clear_cookie(self, name, path=..., domain=...): ...
def clear_all_cookies(self, path=..., domain=...): ...
def set_secure_cookie(self, name, value, expires_days=..., version=..., **kwargs): ...
def create_signed_value(self, name, value, version=...): ...
def get_secure_cookie(self, name, value=..., max_age_days=..., min_version=...): ...
def get_secure_cookie_key_version(self, name, value=...): ...
def redirect(self, url, permanent=..., status=...): ...
def write(self, chunk): ...
def render(self, template_name, **kwargs): ...
def render_string(self, template_name, **kwargs): ...
def get_template_namespace(self): ...
def create_template_loader(self, template_path): ...
def flush(self, include_footers=..., callback=...): ...
def finish(self, chunk=...): ...
def send_error(self, status_code=..., **kwargs): ...
def write_error(self, status_code, **kwargs): ...
@property
def locale(self): ...
@locale.setter
def locale(self, value): ...
def get_user_locale(self): ...
def get_browser_locale(self, default=...): ...
@property
def current_user(self): ...
@current_user.setter
def current_user(self, value): ...
def get_current_user(self): ...
def get_login_url(self): ...
def get_template_path(self): ...
@property
def xsrf_token(self): ...
def check_xsrf_cookie(self): ...
def xsrf_form_html(self): ...
def static_url(self, path, include_host=..., **kwargs): ...
def require_setting(self, name, feature=...): ...
def reverse_url(self, name, *args): ...
def compute_etag(self): ...
def set_etag_header(self): ...
def check_etag_header(self): ...
def data_received(self, chunk): ...
def log_exception(self, typ, value, tb): ...
def asynchronous(method): ...
def stream_request_body(cls): ...
def removeslash(method): ...
def addslash(method): ...
class Application(httputil.HTTPServerConnectionDelegate):
transforms: Any
handlers: Any
named_handlers: Any
default_host: Any
settings: Any
ui_modules: Any
ui_methods: Any
def __init__(self, handlers=..., default_host=..., transforms=..., **settings) -> None: ...
def listen(self, port, address=..., **kwargs): ...
def add_handlers(self, host_pattern, host_handlers): ...
def add_transform(self, transform_class): ...
def start_request(self, server_conn, request_conn): ...
def __call__(self, request): ...
def reverse_url(self, name, *args): ...
def log_request(self, handler): ...
class _RequestDispatcher(httputil.HTTPMessageDelegate):
application: Any
connection: Any
request: Any
chunks: Any
handler_class: Any
handler_kwargs: Any
path_args: Any
path_kwargs: Any
def __init__(self, application, connection) -> None: ...
def headers_received(self, start_line, headers): ...
stream_request_body: Any
def set_request(self, request): ...
def data_received(self, data): ...
def finish(self): ...
def on_connection_close(self): ...
handler: Any
def execute(self): ...
class HTTPError(Exception):
status_code: Any
log_message: Any
args: Any
reason: Any
def __init__(self, status_code, log_message=..., *args, **kwargs) -> None: ...
class Finish(Exception): ...
class MissingArgumentError(HTTPError):
arg_name: Any
def __init__(self, arg_name) -> None: ...
class ErrorHandler(RequestHandler):
def initialize(self, status_code): ...
def prepare(self): ...
def check_xsrf_cookie(self): ...
class RedirectHandler(RequestHandler):
def initialize(self, url, permanent=...): ...
def get(self): ...
class StaticFileHandler(RequestHandler):
CACHE_MAX_AGE: Any
root: Any
default_filename: Any
def initialize(self, path, default_filename=...): ...
@classmethod
def reset(cls): ...
def head(self, path): ...
path: Any
absolute_path: Any
modified: Any
def get(self, path, include_body=...): ...
def compute_etag(self): ...
def set_headers(self): ...
def should_return_304(self): ...
@classmethod
def get_absolute_path(cls, root, path): ...
def validate_absolute_path(self, root, absolute_path): ...
@classmethod
def get_content(cls, abspath, start=..., end=...): ...
@classmethod
def get_content_version(cls, abspath): ...
def get_content_size(self): ...
def get_modified_time(self): ...
def get_content_type(self): ...
def set_extra_headers(self, path): ...
def get_cache_time(self, path, modified, mime_type): ...
@classmethod
def make_static_url(cls, settings, path, include_version=...): ...
def parse_url_path(self, url_path): ...
@classmethod
def get_version(cls, settings, path): ...
class FallbackHandler(RequestHandler):
fallback: Any
def initialize(self, fallback): ...
def prepare(self): ...
class OutputTransform:
def __init__(self, request) -> None: ...
def transform_first_chunk(self, status_code, headers, chunk, finishing): ...
def transform_chunk(self, chunk, finishing): ...
class GZipContentEncoding(OutputTransform):
CONTENT_TYPES: Any
MIN_LENGTH: Any
def __init__(self, request) -> None: ...
def transform_first_chunk(self, status_code, headers, chunk, finishing): ...
def transform_chunk(self, chunk, finishing): ...
def authenticated(method): ...
class UIModule:
handler: Any
request: Any
ui: Any
locale: Any
def __init__(self, handler) -> None: ...
@property
def current_user(self): ...
def render(self, *args, **kwargs): ...
def embedded_javascript(self): ...
def javascript_files(self): ...
def embedded_css(self): ...
def css_files(self): ...
def html_head(self): ...
def html_body(self): ...
def render_string(self, path, **kwargs): ...
class _linkify(UIModule):
def render(self, text, **kwargs): ...
class _xsrf_form_html(UIModule):
def render(self): ...
class TemplateModule(UIModule):
def __init__(self, handler) -> None: ...
def render(self, path, **kwargs): ...
def embedded_javascript(self): ...
def javascript_files(self): ...
def embedded_css(self): ...
def css_files(self): ...
def html_head(self): ...
def html_body(self): ...
class _UIModuleNamespace:
handler: Any
ui_modules: Any
def __init__(self, handler, ui_modules) -> None: ...
def __getitem__(self, key): ...
def __getattr__(self, key): ...
class URLSpec:
regex: Any
handler_class: Any
kwargs: Any
name: Any
def __init__(self, pattern, handler, kwargs=..., name=...) -> None: ...
def reverse(self, *args): ...
url: Any
def create_signed_value(secret, name, value, version=..., clock=..., key_version=...): ...
def decode_signed_value(secret, name, value, max_age_days=..., clock=..., min_version=...): ...
def get_signature_key_version(value): ...
| 8,848 | Python | .py | 240 | 32.358333 | 105 | 0.631438 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,349 | testing.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/testing.pyi | import logging
import unittest
from typing import Any, Callable, Generator, Optional, overload
AsyncHTTPClient: Any
gen: Any
HTTPServer: Any
IOLoop: Any
netutil: Any
SimpleAsyncHTTPClient: Any
def get_unused_port(): ...
def bind_unused_port(): ...
class AsyncTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs): ...
io_loop: Any
def setUp(self): ...
def tearDown(self): ...
def get_new_ioloop(self): ...
def run(self, result: Optional[Any] = ...): ...
def stop(self, _arg: Optional[Any] = ..., **kwargs): ...
def wait(self, condition: Optional[Any] = ..., timeout: float = ...): ...
class AsyncHTTPTestCase(AsyncTestCase):
http_client: Any
http_server: Any
def setUp(self): ...
def get_http_client(self): ...
def get_http_server(self): ...
def get_app(self): ...
def fetch(self, path, **kwargs): ...
def get_httpserver_options(self): ...
def get_http_port(self): ...
def get_protocol(self): ...
def get_url(self, path): ...
def tearDown(self): ...
class AsyncHTTPSTestCase(AsyncHTTPTestCase):
def get_http_client(self): ...
def get_httpserver_options(self): ...
def get_ssl_options(self): ...
def get_protocol(self): ...
@overload
def gen_test(*, timeout: Optional[float] = ...) -> Callable[[Callable[..., Generator[Any, Any, Any]]], Callable[..., None]]: ...
@overload
def gen_test(func: Callable[..., Generator[Any, Any, Any]]) -> Callable[..., None]: ...
class LogTrapTestCase(unittest.TestCase):
def run(self, result: Optional[Any] = ...): ...
class ExpectLog(logging.Filter):
logger: Any
regex: Any
required: Any
matched: Any
def __init__(self, logger, regex, required: bool = ...): ...
def filter(self, record): ...
def __enter__(self): ...
def __exit__(self, typ, value, tb): ...
def main(**kwargs): ...
| 1,865 | Python | .py | 54 | 30.925926 | 128 | 0.63374 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,350 | concurrent.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/concurrent.pyi | from typing import Any
futures: Any
class ReturnValueIgnoredError(Exception): ...
class _TracebackLogger:
exc_info: Any
formatted_tb: Any
def __init__(self, exc_info) -> None: ...
def activate(self): ...
def clear(self): ...
def __del__(self): ...
class Future:
def __init__(self) -> None: ...
def cancel(self): ...
def cancelled(self): ...
def running(self): ...
def done(self): ...
def result(self, timeout=...): ...
def exception(self, timeout=...): ...
def add_done_callback(self, fn): ...
def set_result(self, result): ...
def set_exception(self, exception): ...
def exc_info(self): ...
def set_exc_info(self, exc_info): ...
def __del__(self): ...
TracebackFuture: Any
FUTURES: Any
def is_future(x): ...
class DummyExecutor:
def submit(self, fn, *args, **kwargs): ...
def shutdown(self, wait=...): ...
dummy_executor: Any
def run_on_executor(*args, **kwargs): ...
def return_future(f): ...
def chain_future(a, b): ...
| 1,016 | Python | .py | 34 | 26.147059 | 46 | 0.606372 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,351 | netutil.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/tornado/netutil.pyi | from typing import Any
from tornado.util import Configurable
ssl: Any
certifi: Any
xrange: Any
ssl_match_hostname: Any
SSLCertificateError: Any
def bind_sockets(port, address=..., family=..., backlog=..., flags=...): ...
def bind_unix_socket(file, mode=..., backlog=...): ...
def add_accept_handler(sock, callback, io_loop=...): ...
def is_valid_ip(ip): ...
class Resolver(Configurable):
@classmethod
def configurable_base(cls): ...
@classmethod
def configurable_default(cls): ...
def resolve(self, host, port, family=..., callback=...): ...
def close(self): ...
class ExecutorResolver(Resolver):
io_loop: Any
executor: Any
close_executor: Any
def initialize(self, io_loop=..., executor=..., close_executor=...): ...
def close(self): ...
def resolve(self, host, port, family=...): ...
class BlockingResolver(ExecutorResolver):
def initialize(self, io_loop=...): ...
class ThreadedResolver(ExecutorResolver):
def initialize(self, io_loop=..., num_threads=...): ...
class OverrideResolver(Resolver):
resolver: Any
mapping: Any
def initialize(self, resolver, mapping): ...
def close(self): ...
def resolve(self, host, port, *args, **kwargs): ...
def ssl_options_to_context(ssl_options): ...
def ssl_wrap_socket(socket, ssl_options, server_hostname=..., **kwargs): ...
| 1,350 | Python | .py | 37 | 33.189189 | 76 | 0.671012 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,352 | __init__.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/__init__.pyi | from __future__ import print_function
import types
import typing
import unittest
from __builtin__ import unichr as unichr
from functools import wraps as wraps
from StringIO import StringIO as StringIO
from typing import (
Any,
AnyStr,
Callable,
Dict,
ItemsView,
Iterable,
KeysView,
Mapping,
NoReturn,
Optional,
Pattern,
Text,
Tuple,
Type,
TypeVar,
Union,
ValuesView,
overload,
)
from . import moves
BytesIO = StringIO
_T = TypeVar("_T")
_K = TypeVar("_K")
_V = TypeVar("_V")
__version__: str
# TODO make constant, then move this stub to 2and3
# https://github.com/python/typeshed/issues/17
PY2 = True
PY3 = False
PY34 = False
string_types = (str, unicode)
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
MAXSIZE: int
def advance_iterator(it: typing.Iterator[_T]) -> _T: ...
next = advance_iterator
def callable(obj: object) -> bool: ...
def get_unbound_function(unbound: types.MethodType) -> types.FunctionType: ...
def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ...
def create_unbound_method(func: types.FunctionType, cls: Union[type, types.ClassType]) -> types.MethodType: ...
class Iterator:
def next(self) -> Any: ...
def get_method_function(meth: types.MethodType) -> types.FunctionType: ...
def get_method_self(meth: types.MethodType) -> Optional[object]: ...
def get_function_closure(fun: types.FunctionType) -> Optional[Tuple[types._Cell, ...]]: ...
def get_function_code(fun: types.FunctionType) -> types.CodeType: ...
def get_function_defaults(fun: types.FunctionType) -> Optional[Tuple[Any, ...]]: ...
def get_function_globals(fun: types.FunctionType) -> Dict[str, Any]: ...
def iterkeys(d: Mapping[_K, _V]) -> typing.Iterator[_K]: ...
def itervalues(d: Mapping[_K, _V]) -> typing.Iterator[_V]: ...
def iteritems(d: Mapping[_K, _V]) -> typing.Iterator[Tuple[_K, _V]]: ...
# def iterlists
def viewkeys(d: Mapping[_K, _V]) -> KeysView[_K]: ...
def viewvalues(d: Mapping[_K, _V]) -> ValuesView[_V]: ...
def viewitems(d: Mapping[_K, _V]) -> ItemsView[_K, _V]: ...
def b(s: str) -> binary_type: ...
def u(s: str) -> text_type: ...
int2byte = chr
def byte2int(bs: binary_type) -> int: ...
def indexbytes(buf: binary_type, i: int) -> int: ...
def iterbytes(buf: binary_type) -> typing.Iterator[int]: ...
def assertCountEqual(self: unittest.TestCase, first: Iterable[_T], second: Iterable[_T], msg: str = ...) -> None: ...
@overload
def assertRaisesRegex(self: unittest.TestCase, msg: str = ...) -> Any: ...
@overload
def assertRaisesRegex(self: unittest.TestCase, callable_obj: Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ...
def assertRegex(
self: unittest.TestCase, text: AnyStr, expected_regex: Union[AnyStr, Pattern[AnyStr]], msg: str = ...
) -> None: ...
def reraise(
tp: Optional[Type[BaseException]], value: Optional[BaseException], tb: Optional[types.TracebackType] = ...
) -> NoReturn: ...
def exec_(_code_: Union[unicode, types.CodeType], _globs_: Dict[str, Any] = ..., _locs_: Dict[str, Any] = ...): ...
def raise_from(value: Union[BaseException, Type[BaseException]], from_value: Optional[BaseException]) -> NoReturn: ...
print_ = print
def with_metaclass(meta: type, *bases: type) -> type: ...
def add_metaclass(metaclass: type) -> Callable[[_T], _T]: ...
def ensure_binary(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> bytes: ...
def ensure_str(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> str: ...
def ensure_text(s: Union[bytes, Text], encoding: str = ..., errors: str = ...) -> Text: ...
def python_2_unicode_compatible(klass: _T) -> _T: ...
class _LazyDescriptor:
name: str
def __init__(self, name: str) -> None: ...
def __get__(self, obj: Optional[object], type: Optional[type] = ...) -> Any: ...
class MovedModule(_LazyDescriptor):
mod: str
def __init__(self, name: str, old: str, new: Optional[str] = ...) -> None: ...
def __getattr__(self, attr: str) -> Any: ...
class MovedAttribute(_LazyDescriptor):
mod: str
attr: str
def __init__(
self, name: str, old_mod: str, new_mod: str, old_attr: Optional[str] = ..., new_attr: Optional[str] = ...
) -> None: ...
def add_move(move: Union[MovedModule, MovedAttribute]) -> None: ...
def remove_move(name: str) -> None: ...
| 4,390 | Python | .py | 107 | 38.579439 | 119 | 0.662207 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,353 | __init__.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/moves/__init__.pyi | # Stubs for six.moves
#
# Note: Commented out items means they weren't implemented at the time.
# Uncomment them when the modules have been added to the typeshed.
import __builtin__
import itertools
import os
import pipes
from __builtin__ import intern as intern, reduce as reduce, xrange as xrange
from cStringIO import StringIO as _cStringIO
from StringIO import StringIO as StringIO
from UserDict import UserDict as UserDict
from UserList import UserList as UserList
from UserString import UserString as UserString
# import Tkinter as tkinter
# import Dialog as tkinter_dialog
# import FileDialog as tkinter_filedialog
# import ScrolledText as tkinter_scrolledtext
# import SimpleDialog as tkinter_simpledialog
# import Tix as tkinter_tix
# import ttk as tkinter_ttk
# import Tkconstants as tkinter_constants
# import Tkdnd as tkinter_dnd
# import tkColorChooser as tkinter_colorchooser
# import tkCommonDialog as tkinter_commondialog
# import tkFileDialog as tkinter_tkfiledialog
# import tkFont as tkinter_font
# import tkMessageBox as tkinter_messagebox
# import tkSimpleDialog as tkinter_tksimpledialog
# import email.MIMEBase as email_mime_base
# import email.MIMEMultipart as email_mime_multipart
# import email.MIMENonMultipart as email_mime_nonmultipart
# import copy_reg as copyreg
# import gdbm as dbm_gnu
from . import (
BaseHTTPServer,
CGIHTTPServer,
SimpleHTTPServer,
_dummy_thread,
_thread,
configparser,
cPickle,
email_mime_text,
html_entities,
html_parser,
http_client,
http_cookiejar,
http_cookies,
queue,
reprlib,
socketserver,
urllib,
urllib_error,
urllib_parse,
urllib_robotparser,
xmlrpc_client,
)
# import SimpleXMLRPCServer as xmlrpc_server
builtins = __builtin__
input = __builtin__.raw_input
reload_module = __builtin__.reload
range = __builtin__.xrange
cStringIO = _cStringIO
filter = itertools.ifilter
filterfalse = itertools.ifilterfalse
map = itertools.imap
zip = itertools.izip
zip_longest = itertools.izip_longest
getcwdb = os.getcwd
getcwd = os.getcwdu
shlex_quote = pipes.quote
| 2,105 | Python | .py | 71 | 27.366197 | 76 | 0.796251 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,354 | response.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/moves/urllib/response.pyi | from urllib import addbase as addbase, addclosehook as addclosehook, addinfo as addinfo, addinfourl as addinfourl
| 114 | Python | .py | 1 | 113 | 113 | 0.849558 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,355 | __init__.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/moves/urllib/__init__.pyi | import six.moves.urllib.error as error
import six.moves.urllib.parse as parse
import six.moves.urllib.request as request
import six.moves.urllib.response as response
import six.moves.urllib.robotparser as robotparser
| 217 | Python | .py | 5 | 42.4 | 50 | 0.858491 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,356 | error.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/moves/urllib/error.pyi | from urllib import ContentTooShortError as ContentTooShortError
from urllib2 import HTTPError as HTTPError, URLError as URLError
| 129 | Python | .py | 2 | 63.5 | 64 | 0.889764 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,357 | request.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/moves/urllib/request.pyi | from urllib import (
FancyURLopener as FancyURLopener,
URLopener as URLopener,
getproxies as getproxies,
pathname2url as pathname2url,
proxy_bypass as proxy_bypass,
url2pathname as url2pathname,
urlcleanup as urlcleanup,
urlretrieve as urlretrieve,
)
from urllib2 import (
AbstractBasicAuthHandler as AbstractBasicAuthHandler,
AbstractDigestAuthHandler as AbstractDigestAuthHandler,
BaseHandler as BaseHandler,
CacheFTPHandler as CacheFTPHandler,
FileHandler as FileHandler,
FTPHandler as FTPHandler,
HTTPBasicAuthHandler as HTTPBasicAuthHandler,
HTTPCookieProcessor as HTTPCookieProcessor,
HTTPDefaultErrorHandler as HTTPDefaultErrorHandler,
HTTPDigestAuthHandler as HTTPDigestAuthHandler,
HTTPErrorProcessor as HTTPErrorProcessor,
HTTPHandler as HTTPHandler,
HTTPPasswordMgr as HTTPPasswordMgr,
HTTPPasswordMgrWithDefaultRealm as HTTPPasswordMgrWithDefaultRealm,
HTTPRedirectHandler as HTTPRedirectHandler,
HTTPSHandler as HTTPSHandler,
OpenerDirector as OpenerDirector,
ProxyBasicAuthHandler as ProxyBasicAuthHandler,
ProxyDigestAuthHandler as ProxyDigestAuthHandler,
ProxyHandler as ProxyHandler,
Request as Request,
UnknownHandler as UnknownHandler,
build_opener as build_opener,
install_opener as install_opener,
parse_http_list as parse_http_list,
parse_keqv_list as parse_keqv_list,
urlopen as urlopen,
)
| 1,453 | Python | .py | 39 | 32.666667 | 71 | 0.80976 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,358 | parse.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/six/moves/urllib/parse.pyi | from urllib import (
quote as quote,
quote_plus as quote_plus,
splitquery as splitquery,
splittag as splittag,
splituser as splituser,
unquote as unquote,
unquote_plus as unquote_plus,
urlencode as urlencode,
)
from urlparse import (
ParseResult as ParseResult,
SplitResult as SplitResult,
parse_qs as parse_qs,
parse_qsl as parse_qsl,
urldefrag as urldefrag,
urljoin as urljoin,
urlparse as urlparse,
urlsplit as urlsplit,
urlunparse as urlunparse,
urlunsplit as urlunsplit,
uses_fragment as uses_fragment,
uses_netloc as uses_netloc,
uses_params as uses_params,
uses_query as uses_query,
uses_relative as uses_relative,
)
unquote_to_bytes = unquote
| 744 | Python | .py | 28 | 22.25 | 35 | 0.728671 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,359 | util.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/routes/util.pyi | from typing import Any
class RoutesException(Exception): ...
class MatchException(RoutesException): ...
class GenerationException(RoutesException): ...
def url_for(*args, **kargs): ...
class URLGenerator:
mapper: Any
environ: Any
def __init__(self, mapper, environ) -> None: ...
def __call__(self, *args, **kargs): ...
def current(self, *args, **kwargs): ...
def redirect_to(*args, **kargs): ...
def cache_hostinfo(environ): ...
def controller_scan(directory=...): ...
def as_unicode(value, encoding, errors=...): ...
def ascii_characters(string): ...
| 576 | Python | .py | 16 | 33.5 | 52 | 0.670863 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,360 | __init__.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/routes/__init__.pyi | from . import mapper, util
class _RequestConfig:
def __getattr__(self, name): ...
def __setattr__(self, name, value): ...
def __delattr__(self, name): ...
def load_wsgi_environ(self, environ): ...
def request_config(original=...): ...
Mapper = mapper.Mapper
redirect_to = util.redirect_to
url_for = util.url_for
URLGenerator = util.URLGenerator
| 364 | Python | .py | 11 | 30.363636 | 45 | 0.674286 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,361 | mapper.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/routes/mapper.pyi | from typing import Any
COLLECTION_ACTIONS: Any
MEMBER_ACTIONS: Any
def strip_slashes(name): ...
class SubMapperParent:
def submapper(self, **kargs): ...
def collection(
self,
collection_name,
resource_name,
path_prefix=...,
member_prefix=...,
controller=...,
collection_actions=...,
member_actions=...,
member_options=...,
**kwargs,
): ...
class SubMapper(SubMapperParent):
kwargs: Any
obj: Any
collection_name: Any
member: Any
resource_name: Any
formatted: Any
def __init__(self, obj, resource_name=..., collection_name=..., actions=..., formatted=..., **kwargs) -> None: ...
def connect(self, *args, **kwargs): ...
def link(self, rel=..., name=..., action=..., method=..., formatted=..., **kwargs): ...
def new(self, **kwargs): ...
def edit(self, **kwargs): ...
def action(self, name=..., action=..., method=..., formatted=..., **kwargs): ...
def index(self, name=..., **kwargs): ...
def show(self, name=..., **kwargs): ...
def create(self, **kwargs): ...
def update(self, **kwargs): ...
def delete(self, **kwargs): ...
def add_actions(self, actions): ...
def __enter__(self): ...
def __exit__(self, type, value, tb): ...
class Mapper(SubMapperParent):
matchlist: Any
maxkeys: Any
minkeys: Any
urlcache: Any
prefix: Any
req_data: Any
directory: Any
always_scan: Any
controller_scan: Any
debug: Any
append_slash: Any
sub_domains: Any
sub_domains_ignore: Any
domain_match: Any
explicit: Any
encoding: Any
decode_errors: Any
hardcode_names: Any
minimization: Any
create_regs_lock: Any
def __init__(self, controller_scan=..., directory=..., always_scan=..., register=..., explicit=...) -> None: ...
environ: Any
def extend(self, routes, path_prefix=...): ...
def make_route(self, *args, **kargs): ...
def connect(self, *args, **kargs): ...
def create_regs(self, *args, **kwargs): ...
def match(self, url=..., environ=...): ...
def routematch(self, url=..., environ=...): ...
obj: Any
def generate(self, *args, **kargs): ...
def resource(self, member_name, collection_name, **kwargs): ...
def redirect(self, match_path, destination_path, *args, **kwargs): ...
| 2,362 | Python | .py | 72 | 27.569444 | 118 | 0.586433 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,362 | scribe.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/scribe/scribe.pyi | from typing import Any
import fb303.FacebookService
from thrift.Thrift import TProcessor # type: ignore # We don't have thrift stubs in typeshed
from .ttypes import * # noqa: F403
class Iface(fb303.FacebookService.Iface):
def Log(self, messages): ...
class Client(fb303.FacebookService.Client, Iface):
def __init__(self, iprot, oprot=...) -> None: ...
def Log(self, messages): ...
def send_Log(self, messages): ...
def recv_Log(self): ...
class Processor(fb303.FacebookService.Processor, Iface, TProcessor): # type: ignore
def __init__(self, handler) -> None: ...
def process(self, iprot, oprot): ...
def process_Log(self, seqid, iprot, oprot): ...
class Log_args:
thrift_spec: Any
messages: Any
def __init__(self, messages=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class Log_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
| 1,216 | Python | .py | 33 | 32.727273 | 94 | 0.622449 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,363 | ttypes.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/scribe/ttypes.pyi | from typing import Any
fastbinary: Any
class ResultCode:
OK: Any
TRY_LATER: Any
class LogEntry:
thrift_spec: Any
category: Any
message: Any
def __init__(self, category=..., message=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
| 383 | Python | .py | 15 | 21.4 | 62 | 0.591781 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,364 | crypto.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/OpenSSL/crypto.pyi | from datetime import datetime
from typing import Any, Callable, Iterable, List, Optional, Set, Text, Tuple, Union
from cryptography.hazmat.primitives.asymmetric import dsa, rsa
FILETYPE_PEM: int
FILETYPE_ASN1: int
FILETYPE_TEXT: int
TYPE_RSA: int
TYPE_DSA: int
class Error(Exception): ...
_Key = Union[rsa.RSAPublicKey, rsa.RSAPrivateKey, dsa.DSAPublicKey, dsa.DSAPrivateKey]
class PKey:
def __init__(self) -> None: ...
def to_cryptography_key(self) -> _Key: ...
@classmethod
def from_cryptography_key(cls, crypto_key: _Key): ...
def generate_key(self, type: int, bits: int) -> None: ...
def check(self) -> bool: ...
def type(self) -> int: ...
def bits(self) -> int: ...
class _EllipticCurve:
name: Text
def get_elliptic_curves() -> Set[_EllipticCurve]: ...
def get_elliptic_curve(name: str) -> _EllipticCurve: ...
class X509Name:
def __init__(self, name: X509Name) -> None: ...
countryName: Union[str, unicode]
stateOrProvinceName: Union[str, unicode]
localityName: Union[str, unicode]
organizationName: Union[str, unicode]
organizationalUnitName: Union[str, unicode]
commonName: Union[str, unicode]
emailAddress: Union[str, unicode]
C: Union[str, unicode]
ST: Union[str, unicode]
L: Union[str, unicode]
O: Union[str, unicode]
OU: Union[str, unicode]
CN: Union[str, unicode]
def hash(self) -> int: ...
def der(self) -> bytes: ...
def get_components(self) -> List[Tuple[str, str]]: ...
class X509Extension:
def __init__(
self, type_name: bytes, critical: bool, value: bytes, subject: Optional[X509] = ..., issuer: Optional[X509] = ...
) -> None: ...
def get_critical(self) -> bool: ...
def get_short_name(self) -> str: ...
def get_data(self) -> str: ...
class X509Req:
def __init__(self) -> None: ...
def set_pubkey(self, pkey: PKey) -> None: ...
def get_pubkey(self) -> PKey: ...
def set_version(self, version: int) -> None: ...
def get_version(self) -> int: ...
def get_subject(self) -> X509Name: ...
def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ...
def get_extensions(self) -> List[X509Extension]: ...
def sign(self, pkey: PKey, digest: str) -> None: ...
def verify(self, pkey: PKey) -> bool: ...
class X509:
def __init__(self) -> None: ...
def set_version(self, version: int) -> None: ...
def get_version(self) -> int: ...
def get_pubkey(self) -> PKey: ...
def set_pubkey(self, pkey: PKey) -> None: ...
def sign(self, pkey: PKey, digest: str) -> None: ...
def get_signature_algorithm(self) -> str: ...
def digest(self, digest_name: str) -> str: ...
def subject_name_hash(self) -> str: ...
def set_serial_number(self, serial: int) -> None: ...
def get_serial_number(self) -> int: ...
def gmtime_adj_notAfter(self, amount: int) -> None: ...
def gmtime_adj_notBefore(self, amount: int) -> None: ...
def has_expired(self) -> bool: ...
def get_notBefore(self) -> str: ...
def set_notBefore(self, when: str) -> None: ...
def get_notAfter(self) -> str: ...
def set_notAfter(self, when: str) -> None: ...
def get_issuer(self) -> X509Name: ...
def set_issuer(self, issuer: X509Name) -> None: ...
def get_subject(self) -> X509Name: ...
def set_subject(self, subject: X509Name) -> None: ...
def get_extension_count(self) -> int: ...
def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ...
def get_extension(self, index: int) -> X509Extension: ...
class X509StoreFlags:
CRL_CHECK: int
CRL_CHECK_ALL: int
IGNORE_CRITICAL: int
X509_STRICT: int
ALLOW_PROXY_CERTS: int
POLICY_CHECK: int
EXPLICIT_POLICY: int
INHIBIT_MAP: int
NOTIFY_POLICY: int
CHECK_SS_SIGNATURE: int
CB_ISSUER_CHECK: int
class X509Store:
def __init__(self) -> None: ...
def add_cert(self, cert: X509) -> None: ...
def add_crl(self, crl: CRL) -> None: ...
def set_flags(self, flags: int) -> None: ...
def set_time(self, vfy_time: datetime) -> None: ...
class X509StoreContextError(Exception):
certificate: X509
def __init__(self, message: str, certificate: X509) -> None: ...
class X509StoreContext:
def __init__(self, store: X509Store, certificate: X509) -> None: ...
def set_store(self, store: X509Store) -> None: ...
def verify_certificate(self) -> None: ...
def load_certificate(type: int, buffer: Union[str, unicode]) -> X509: ...
def dump_certificate(type: int, cert: X509) -> bytes: ...
def dump_publickey(type: int, pkey: PKey) -> bytes: ...
def dump_privatekey(
type: int, pkey: PKey, cipher: Optional[str] = ..., passphrase: Optional[Union[str, Callable[[int], int]]] = ...
) -> bytes: ...
class Revoked:
def __init__(self) -> None: ...
def set_serial(self, hex_str: str) -> None: ...
def get_serial(self) -> str: ...
def set_reason(self, reason: str) -> None: ...
def get_reason(self) -> str: ...
def all_reasons(self) -> List[str]: ...
def set_rev_date(self, when: str) -> None: ...
def get_rev_date(self) -> str: ...
class CRL:
def __init__(self) -> None: ...
def get_revoked(self) -> Tuple[Revoked, ...]: ...
def add_revoked(self, revoked: Revoked) -> None: ...
def get_issuer(self) -> X509Name: ...
def set_version(self, version: int) -> None: ...
def set_lastUpdate(self, when: str) -> None: ...
def set_nextUpdate(self, when: str) -> None: ...
def sign(self, issuer_cert: X509, issuer_key: PKey, digest: str) -> None: ...
def export(self, cert: X509, key: PKey, type: int = ..., days: int = ..., digest: str = ...) -> bytes: ...
class PKCS7:
def type_is_signed(self) -> bool: ...
def type_is_enveloped(self) -> bool: ...
def type_is_signedAndEnveloped(self) -> bool: ...
def type_is_data(self) -> bool: ...
def get_type_name(self) -> str: ...
class PKCS12:
def __init__(self) -> None: ...
def get_certificate(self) -> X509: ...
def set_certificate(self, cert: X509) -> None: ...
def get_privatekey(self) -> PKey: ...
def set_privatekey(self, pkey: PKey) -> None: ...
def get_ca_certificates(self) -> Tuple[X509, ...]: ...
def set_ca_certificates(self, cacerts: Iterable[X509]) -> None: ...
def set_friendlyname(self, name: bytes) -> None: ...
def get_friendlyname(self) -> bytes: ...
def export(self, passphrase: Optional[str] = ..., iter: int = ..., maciter: int = ...): ...
class NetscapeSPKI:
def __init__(self) -> None: ...
def sign(self, pkey: PKey, digest: str) -> None: ...
def verify(self, key: PKey) -> bool: ...
def b64_encode(self) -> str: ...
def get_pubkey(self) -> PKey: ...
def set_pubkey(self, pkey: PKey) -> None: ...
def load_publickey(type: int, buffer: Union[str, unicode]) -> PKey: ...
def load_privatekey(type: int, buffer: bytes, passphrase: Optional[Union[str, Callable[[int], int]]] = ...): ...
def dump_certificate_request(type: int, req: X509Req): ...
def load_certificate_request(type, buffer: Union[str, unicode]) -> X509Req: ...
def sign(pkey: PKey, data: Union[str, unicode], digest: str) -> bytes: ...
def verify(cert: X509, signature: bytes, data: Union[str, unicode], digest: str) -> None: ...
def dump_crl(type: int, crl: CRL) -> bytes: ...
def load_crl(type: int, buffer: Union[str, unicode]) -> CRL: ...
def load_pkcs7_data(type: int, buffer: Union[str, unicode]) -> PKCS7: ...
def load_pkcs12(buffer: Union[str, unicode], passphrase: Optional[Union[str, Callable[[int], int]]] = ...) -> PKCS12: ...
| 7,588 | Python | .py | 169 | 40.739645 | 121 | 0.623361 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,365 | process.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/concurrent/futures/process.pyi | from typing import Any, Optional
from ._base import Executor
EXTRA_QUEUED_CALLS: Any
class ProcessPoolExecutor(Executor):
def __init__(self, max_workers: Optional[int] = ...) -> None: ...
| 195 | Python | .py | 5 | 36.6 | 69 | 0.727273 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,366 | __init__.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/concurrent/futures/__init__.pyi | from ._base import (
ALL_COMPLETED as ALL_COMPLETED,
FIRST_COMPLETED as FIRST_COMPLETED,
FIRST_EXCEPTION as FIRST_EXCEPTION,
CancelledError as CancelledError,
Executor as Executor,
Future as Future,
TimeoutError as TimeoutError,
as_completed as as_completed,
wait as wait,
)
from .process import ProcessPoolExecutor as ProcessPoolExecutor
from .thread import ThreadPoolExecutor as ThreadPoolExecutor
| 436 | Python | .py | 13 | 29.769231 | 63 | 0.787234 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,367 | thread.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/concurrent/futures/thread.pyi | from typing import Any, Callable, Generic, Iterable, Mapping, Optional, Tuple, TypeVar
from ._base import Executor, Future
_S = TypeVar("_S")
class ThreadPoolExecutor(Executor):
def __init__(self, max_workers: Optional[int] = ..., thread_name_prefix: str = ...) -> None: ...
class _WorkItem(Generic[_S]):
future: Future[_S]
fn: Callable[..., _S]
args: Iterable[Any]
kwargs: Mapping[str, Any]
def __init__(self, future: Future[_S], fn: Callable[..., _S], args: Iterable[Any], kwargs: Mapping[str, Any]) -> None: ...
def run(self) -> None: ...
| 574 | Python | .py | 12 | 44.166667 | 126 | 0.641577 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,368 | _base.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/concurrent/futures/_base.pyi | import threading
from abc import abstractmethod
from logging import Logger
from types import TracebackType
from typing import Any, Callable, Container, Generic, Iterable, Iterator, List, Optional, Protocol, Set, Tuple, TypeVar
FIRST_COMPLETED: str
FIRST_EXCEPTION: str
ALL_COMPLETED: str
PENDING: str
RUNNING: str
CANCELLED: str
CANCELLED_AND_NOTIFIED: str
FINISHED: str
LOGGER: Logger
class Error(Exception): ...
class CancelledError(Error): ...
class TimeoutError(Error): ...
_T = TypeVar("_T")
_T_co = TypeVar("_T_co", covariant=True)
# Copied over Collection implementation as it does not exist in Python 2 and <3.6.
# Also to solve pytype issues with _Collection.
class _Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]):
# Implement Sized (but don't have it as a base class).
@abstractmethod
def __len__(self) -> int: ...
class Future(Generic[_T]):
def __init__(self) -> None: ...
def cancel(self) -> bool: ...
def cancelled(self) -> bool: ...
def running(self) -> bool: ...
def done(self) -> bool: ...
def add_done_callback(self, fn: Callable[[Future[_T]], Any]) -> None: ...
def result(self, timeout: Optional[float] = ...) -> _T: ...
def set_running_or_notify_cancel(self) -> bool: ...
def set_result(self, result: _T) -> None: ...
def exception(self, timeout: Optional[float] = ...) -> Any: ...
def exception_info(self, timeout: Optional[float] = ...) -> Tuple[Any, Optional[TracebackType]]: ...
def set_exception(self, exception: Any) -> None: ...
def set_exception_info(self, exception: Any, traceback: Optional[TracebackType]) -> None: ...
class Executor:
def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ...
def map(self, func: Callable[..., _T], *iterables: Iterable[Any], timeout: Optional[float] = ...) -> Iterator[_T]: ...
def shutdown(self, wait: bool = ...) -> None: ...
def __enter__(self: _T) -> _T: ...
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> Optional[bool]: ...
def as_completed(fs: Iterable[Future[_T]], timeout: Optional[float] = ...) -> Iterator[Future[_T]]: ...
def wait(
fs: _Collection[Future[_T]], timeout: Optional[float] = ..., return_when: str = ...
) -> Tuple[Set[Future[_T]], Set[Future[_T]]]: ...
class _Waiter:
event: threading.Event
finished_futures: List[Future[Any]]
def __init__(self) -> None: ...
def add_result(self, future: Future[Any]) -> None: ...
def add_exception(self, future: Future[Any]) -> None: ...
def add_cancelled(self, future: Future[Any]) -> None: ...
class _AsCompletedWaiter(_Waiter):
lock: threading.Lock
def __init__(self) -> None: ...
def add_result(self, future: Future[Any]) -> None: ...
def add_exception(self, future: Future[Any]) -> None: ...
def add_cancelled(self, future: Future[Any]) -> None: ...
class _FirstCompletedWaiter(_Waiter):
def add_result(self, future: Future[Any]) -> None: ...
def add_exception(self, future: Future[Any]) -> None: ...
def add_cancelled(self, future: Future[Any]) -> None: ...
class _AllCompletedWaiter(_Waiter):
num_pending_calls: int
stop_on_exception: bool
lock: threading.Lock
def __init__(self, num_pending_calls: int, stop_on_exception: bool) -> None: ...
def add_result(self, future: Future[Any]) -> None: ...
def add_exception(self, future: Future[Any]) -> None: ...
def add_cancelled(self, future: Future[Any]) -> None: ...
class _AcquireFutures:
futures: Iterable[Future[Any]]
def __init__(self, futures: Iterable[Future[Any]]) -> None: ...
def __enter__(self) -> None: ...
def __exit__(self, *args: Any) -> None: ...
| 3,701 | Python | .py | 79 | 43.303797 | 122 | 0.643946 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,369 | FacebookService.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/fb303/FacebookService.pyi | from typing import Any
from thrift.Thrift import TProcessor # type: ignore
fastbinary: Any
class Iface:
def getName(self): ...
def getVersion(self): ...
def getStatus(self): ...
def getStatusDetails(self): ...
def getCounters(self): ...
def getCounter(self, key): ...
def setOption(self, key, value): ...
def getOption(self, key): ...
def getOptions(self): ...
def getCpuProfile(self, profileDurationInSec): ...
def aliveSince(self): ...
def reinitialize(self): ...
def shutdown(self): ...
class Client(Iface):
def __init__(self, iprot, oprot=...) -> None: ...
def getName(self): ...
def send_getName(self): ...
def recv_getName(self): ...
def getVersion(self): ...
def send_getVersion(self): ...
def recv_getVersion(self): ...
def getStatus(self): ...
def send_getStatus(self): ...
def recv_getStatus(self): ...
def getStatusDetails(self): ...
def send_getStatusDetails(self): ...
def recv_getStatusDetails(self): ...
def getCounters(self): ...
def send_getCounters(self): ...
def recv_getCounters(self): ...
def getCounter(self, key): ...
def send_getCounter(self, key): ...
def recv_getCounter(self): ...
def setOption(self, key, value): ...
def send_setOption(self, key, value): ...
def recv_setOption(self): ...
def getOption(self, key): ...
def send_getOption(self, key): ...
def recv_getOption(self): ...
def getOptions(self): ...
def send_getOptions(self): ...
def recv_getOptions(self): ...
def getCpuProfile(self, profileDurationInSec): ...
def send_getCpuProfile(self, profileDurationInSec): ...
def recv_getCpuProfile(self): ...
def aliveSince(self): ...
def send_aliveSince(self): ...
def recv_aliveSince(self): ...
def reinitialize(self): ...
def send_reinitialize(self): ...
def shutdown(self): ...
def send_shutdown(self): ...
class Processor(Iface, TProcessor): # type: ignore
def __init__(self, handler) -> None: ...
def process(self, iprot, oprot): ...
def process_getName(self, seqid, iprot, oprot): ...
def process_getVersion(self, seqid, iprot, oprot): ...
def process_getStatus(self, seqid, iprot, oprot): ...
def process_getStatusDetails(self, seqid, iprot, oprot): ...
def process_getCounters(self, seqid, iprot, oprot): ...
def process_getCounter(self, seqid, iprot, oprot): ...
def process_setOption(self, seqid, iprot, oprot): ...
def process_getOption(self, seqid, iprot, oprot): ...
def process_getOptions(self, seqid, iprot, oprot): ...
def process_getCpuProfile(self, seqid, iprot, oprot): ...
def process_aliveSince(self, seqid, iprot, oprot): ...
def process_reinitialize(self, seqid, iprot, oprot): ...
def process_shutdown(self, seqid, iprot, oprot): ...
class getName_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getName_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getVersion_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getVersion_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getStatus_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getStatus_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getStatusDetails_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getStatusDetails_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getCounters_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getCounters_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getCounter_args:
thrift_spec: Any
key: Any
def __init__(self, key=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getCounter_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class setOption_args:
thrift_spec: Any
key: Any
value: Any
def __init__(self, key=..., value=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class setOption_result:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getOption_args:
thrift_spec: Any
key: Any
def __init__(self, key=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getOption_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getOptions_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getOptions_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getCpuProfile_args:
thrift_spec: Any
profileDurationInSec: Any
def __init__(self, profileDurationInSec=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class getCpuProfile_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class aliveSince_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class aliveSince_result:
thrift_spec: Any
success: Any
def __init__(self, success=...) -> None: ...
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class reinitialize_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
class shutdown_args:
thrift_spec: Any
def read(self, iprot): ...
def write(self, oprot): ...
def validate(self): ...
def __eq__(self, other): ...
def __ne__(self, other): ...
| 8,692 | Python | .py | 269 | 27.650558 | 64 | 0.580176 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,370 | client.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/kazoo/client.pyi | from typing import Any
string_types: Any
bytes_types: Any
LOST_STATES: Any
ENVI_VERSION: Any
ENVI_VERSION_KEY: Any
log: Any
class KazooClient:
logger: Any
handler: Any
auth_data: Any
default_acl: Any
randomize_hosts: Any
hosts: Any
chroot: Any
state: Any
state_listeners: Any
read_only: Any
retry: Any
Barrier: Any
Counter: Any
DoubleBarrier: Any
ChildrenWatch: Any
DataWatch: Any
Election: Any
NonBlockingLease: Any
MultiNonBlockingLease: Any
Lock: Any
Party: Any
Queue: Any
LockingQueue: Any
SetPartitioner: Any
Semaphore: Any
ShallowParty: Any
def __init__(
self,
hosts=...,
timeout=...,
client_id=...,
handler=...,
default_acl=...,
auth_data=...,
read_only=...,
randomize_hosts=...,
connection_retry=...,
command_retry=...,
logger=...,
**kwargs,
) -> None: ...
@property
def client_state(self): ...
@property
def client_id(self): ...
@property
def connected(self): ...
def set_hosts(self, hosts, randomize_hosts=...): ...
def add_listener(self, listener): ...
def remove_listener(self, listener): ...
def start(self, timeout=...): ...
def start_async(self): ...
def stop(self): ...
def restart(self): ...
def close(self): ...
def command(self, cmd=...): ...
def server_version(self, retries=...): ...
def add_auth(self, scheme, credential): ...
def add_auth_async(self, scheme, credential): ...
def unchroot(self, path): ...
def sync_async(self, path): ...
def sync(self, path): ...
def create(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ...
def create_async(self, path, value=..., acl=..., ephemeral=..., sequence=..., makepath=...): ...
def ensure_path(self, path, acl=...): ...
def ensure_path_async(self, path, acl=...): ...
def exists(self, path, watch=...): ...
def exists_async(self, path, watch=...): ...
def get(self, path, watch=...): ...
def get_async(self, path, watch=...): ...
def get_children(self, path, watch=..., include_data=...): ...
def get_children_async(self, path, watch=..., include_data=...): ...
def get_acls(self, path): ...
def get_acls_async(self, path): ...
def set_acls(self, path, acls, version=...): ...
def set_acls_async(self, path, acls, version=...): ...
def set(self, path, value, version=...): ...
def set_async(self, path, value, version=...): ...
def transaction(self): ...
def delete(self, path, version=..., recursive=...): ...
def delete_async(self, path, version=...): ...
def reconfig(self, joining, leaving, new_members, from_config=...): ...
def reconfig_async(self, joining, leaving, new_members, from_config): ...
class TransactionRequest:
client: Any
operations: Any
committed: Any
def __init__(self, client) -> None: ...
def create(self, path, value=..., acl=..., ephemeral=..., sequence=...): ...
def delete(self, path, version=...): ...
def set_data(self, path, value, version=...): ...
def check(self, path, version): ...
def commit_async(self): ...
def commit(self): ...
def __enter__(self): ...
def __exit__(self, exc_type, exc_value, exc_tb): ...
class KazooState: ...
| 3,400 | Python | .py | 105 | 27.228571 | 100 | 0.581282 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,371 | exceptions.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/kazoo/exceptions.pyi | from typing import Any
class KazooException(Exception): ...
class ZookeeperError(KazooException): ...
class CancelledError(KazooException): ...
class ConfigurationError(KazooException): ...
class ZookeeperStoppedError(KazooException): ...
class ConnectionDropped(KazooException): ...
class LockTimeout(KazooException): ...
class WriterNotClosedException(KazooException): ...
EXCEPTIONS: Any
class RolledBackError(ZookeeperError): ...
class SystemZookeeperError(ZookeeperError): ...
class RuntimeInconsistency(ZookeeperError): ...
class DataInconsistency(ZookeeperError): ...
class ConnectionLoss(ZookeeperError): ...
class MarshallingError(ZookeeperError): ...
class UnimplementedError(ZookeeperError): ...
class OperationTimeoutError(ZookeeperError): ...
class BadArgumentsError(ZookeeperError): ...
class NewConfigNoQuorumError(ZookeeperError): ...
class ReconfigInProcessError(ZookeeperError): ...
class APIError(ZookeeperError): ...
class NoNodeError(ZookeeperError): ...
class NoAuthError(ZookeeperError): ...
class BadVersionError(ZookeeperError): ...
class NoChildrenForEphemeralsError(ZookeeperError): ...
class NodeExistsError(ZookeeperError): ...
class NotEmptyError(ZookeeperError): ...
class SessionExpiredError(ZookeeperError): ...
class InvalidCallbackError(ZookeeperError): ...
class InvalidACLError(ZookeeperError): ...
class AuthFailedError(ZookeeperError): ...
class SessionMovedError(ZookeeperError): ...
class NotReadOnlyCallError(ZookeeperError): ...
class ConnectionClosedError(SessionExpiredError): ...
ConnectionLossException: Any
MarshallingErrorException: Any
SystemErrorException: Any
RuntimeInconsistencyException: Any
DataInconsistencyException: Any
UnimplementedException: Any
OperationTimeoutException: Any
BadArgumentsException: Any
ApiErrorException: Any
NoNodeException: Any
NoAuthException: Any
BadVersionException: Any
NoChildrenForEphemeralsException: Any
NodeExistsException: Any
InvalidACLException: Any
AuthFailedException: Any
NotEmptyException: Any
SessionExpiredException: Any
InvalidCallbackException: Any
| 2,054 | Python | .py | 54 | 36.962963 | 55 | 0.846192 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,372 | watchers.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2/kazoo/recipe/watchers.pyi | from typing import Any
log: Any
class DataWatch:
def __init__(self, client, path, func=..., *args, **kwargs) -> None: ...
def __call__(self, func): ...
class ChildrenWatch:
def __init__(self, client, path, func=..., allow_session_lost=..., send_event=...) -> None: ...
def __call__(self, func): ...
class PatientChildrenWatch:
client: Any
path: Any
children: Any
time_boundary: Any
children_changed: Any
def __init__(self, client, path, time_boundary=...) -> None: ...
asy: Any
def start(self): ...
| 551 | Python | .py | 17 | 28.352941 | 99 | 0.603774 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,373 | polib.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/polib.pyi | import textwrap
from typing import IO, Any, Callable, Dict, Generic, List, Optional, Text, Tuple, Type, TypeVar, Union, overload
_TB = TypeVar("_TB", bound="_BaseEntry")
_TP = TypeVar("_TP", bound="POFile")
_TM = TypeVar("_TM", bound="MOFile")
default_encoding: str
# wrapwidth: int
# encoding: str
# check_for_duplicates: bool
@overload
def pofile(pofile: Text, *, klass: Type[_TP], **kwargs: Any) -> _TP: ...
@overload
def pofile(pofile: Text, **kwargs: Any) -> POFile: ...
@overload
def mofile(mofile: Text, *, klass: Type[_TM], **kwargs: Any) -> _TM: ...
@overload
def mofile(mofile: Text, **kwargs: Any) -> MOFile: ...
def detect_encoding(file: Union[bytes, Text], binary_mode: bool = ...) -> str: ...
def escape(st: Text) -> Text: ...
def unescape(st: Text) -> Text: ...
class _BaseFile(List[_TB]):
fpath: Text
wrapwidth: int
encoding: Text
check_for_duplicates: bool
header: Text
metadata: Dict[Text, Text]
metadata_is_fuzzy: bool
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def __unicode__(self) -> Text: ...
def __contains__(self, entry: _TB) -> bool: ... # type: ignore # AttributeError otherwise
def __eq__(self, other: object) -> bool: ...
def append(self, entry: _TB) -> None: ...
def insert(self, index: int, entry: _TB) -> None: ...
def metadata_as_entry(self) -> POEntry: ...
def save(self, fpath: Optional[Text] = ..., repr_method: str = ...) -> None: ...
def find(self, st: Text, by: str = ..., include_obsolete_entries: bool = ..., msgctxt: bool = ...) -> Optional[_TB]: ...
def ordered_metadata(self) -> List[Text]: ...
def to_binary(self) -> bytes: ...
class POFile(_BaseFile):
def __unicode__(self) -> Text: ...
def save_as_mofile(self, fpath: Text) -> None: ...
def percent_translated(self) -> int: ...
def translated_entries(self) -> List[POEntry]: ...
def untranslated_entries(self) -> List[POEntry]: ...
def fuzzy_entries(self) -> List[POEntry]: ...
def obsolete_entries(self) -> List[POEntry]: ...
def merge(self, refpot: POFile) -> None: ...
class MOFile(_BaseFile):
MAGIC: int
MAGIC_SWAPPED: int
magic_number: Optional[int]
version: int
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def save_as_pofile(self, fpath: str) -> None: ...
def save(self, fpath: Optional[Text] = ...) -> None: ... # type: ignore # binary file does not allow argument repr_method
def percent_translated(self) -> int: ...
def translated_entries(self) -> List[MOEntry]: ...
def untranslated_entries(self) -> List[MOEntry]: ...
def fuzzy_entries(self) -> List[MOEntry]: ...
def obsolete_entries(self) -> List[MOEntry]: ...
class _BaseEntry(object):
msgid: Text
msgstr: Text
msgid_plural: List[Text]
msgstr_plural: List[Text]
msgctxt: Text
obsolete: bool
encoding: str
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def __unicode__(self, wrapwidth: int = ...) -> Text: ...
def __eq__(self, other: object) -> bool: ...
class POEntry(_BaseEntry):
comment: Text
tcomment: Text
occurrences: List[Tuple[str, int]]
flags: List[Text]
previous_msgctxt: Optional[Text]
previous_msgid: Optional[Text]
previous_msgid_plural: Optional[Text]
linenum: Optional[int]
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def __unicode__(self, wrapwidth: int = ...) -> Text: ...
def __cmp__(self, other: POEntry) -> int: ...
def __gt__(self, other: POEntry) -> bool: ...
def __lt__(self, other: POEntry) -> bool: ...
def __ge__(self, other: POEntry) -> bool: ...
def __le__(self, other: POEntry) -> bool: ...
def __eq__(self, other: Any) -> bool: ...
def __ne__(self, other: Any) -> bool: ...
def translated(self) -> bool: ...
def merge(self, other: POEntry) -> None: ...
@property
def fuzzy(self) -> bool: ...
@property
def msgid_with_context(self) -> Text: ...
def __hash__(self) -> int: ...
class MOEntry(_BaseEntry):
comment: Text
tcomment: Text
occurrences: List[Tuple[str, int]]
flags: List[Text]
previous_msgctxt: Optional[Text]
previous_msgid: Optional[Text]
previous_msgid_plural: Optional[Text]
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def __hash__(self) -> int: ...
class _POFileParser(Generic[_TP]):
fhandle: IO[Text]
instance: _TP
transitions: Dict[Tuple[str, str], Tuple[Callable[[], bool], str]]
current_line: int
current_entry: POEntry
current_state: str
current_token: Optional[str]
msgstr_index: int
entry_obsolete: int
def __init__(self, pofile: Text, *args: Any, **kwargs: Any) -> None: ...
def parse(self) -> _TP: ...
def add(self, symbol: str, states: List[str], next_state: str) -> None: ...
def process(self, symbol: str) -> None: ...
def handle_he(self) -> bool: ...
def handle_tc(self) -> bool: ...
def handle_gc(self) -> bool: ...
def handle_oc(self) -> bool: ...
def handle_fl(self) -> bool: ...
def handle_pp(self) -> bool: ...
def handle_pm(self) -> bool: ...
def handle_pc(self) -> bool: ...
def handle_ct(self) -> bool: ...
def handle_mi(self) -> bool: ...
def handle_mp(self) -> bool: ...
def handle_ms(self) -> bool: ...
def handle_mx(self) -> bool: ...
def handle_mc(self) -> bool: ...
class _MOFileParser(Generic[_TM]):
fhandle: IO[bytes]
instance: _TM
def __init__(self, mofile: Text, *args: Any, **kwargs: Any) -> None: ...
def __del__(self) -> None: ...
def parse(self) -> _TM: ...
class TextWrapper(textwrap.TextWrapper):
drop_whitespace: bool
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
| 5,764 | Python | .py | 144 | 35.75 | 126 | 0.603602 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,374 | tabulate.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/tabulate.pyi | from typing import Any, Callable, Container, Dict, Iterable, List, Mapping, NamedTuple, Optional, Sequence, Union
PRESERVE_WHITESPACE: bool
WIDE_CHARS_MODE: bool
tabulate_formats: List[str]
class Line(NamedTuple):
begin: str
hline: str
sep: str
end: str
class DataRow(NamedTuple):
begin: str
sep: str
end: str
_TableFormatLine = Union[None, Line, Callable[[List[int], List[str]], str]]
_TableFormatRow = Union[None, DataRow, Callable[[List[Any], List[int], List[str]], str]]
class TableFormat(NamedTuple):
lineabove: _TableFormatLine
linebelowheader: _TableFormatLine
linebetweenrows: _TableFormatLine
linebelow: _TableFormatLine
headerrow: _TableFormatRow
datarow: _TableFormatRow
padding: int
with_header_hide: Optional[Container[str]]
def simple_separated_format(separator: str) -> TableFormat: ...
def tabulate(
tabular_data: Union[Mapping[str, Iterable[Any]], Iterable[Iterable[Any]]],
headers: Union[str, Dict[str, str], Sequence[str]] = ...,
tablefmt: Union[str, TableFormat] = ...,
floatfmt: Union[str, Iterable[str]] = ...,
numalign: Optional[str] = ...,
stralign: Optional[str] = ...,
missingval: Union[str, Iterable[str]] = ...,
showindex: Union[str, bool, Iterable[Any]] = ...,
disable_numparse: Union[bool, Iterable[int]] = ...,
colalign: Optional[Iterable[Optional[str]]] = ...,
) -> str: ...
| 1,413 | Python | .py | 37 | 34.324324 | 113 | 0.69562 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,375 | toml.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/toml.pyi | import sys
from _typeshed import StrPath, SupportsWrite
from typing import IO, Any, List, Mapping, MutableMapping, Text, Type, Union
if sys.version_info >= (3, 6):
_PathLike = StrPath
elif sys.version_info >= (3, 4):
import pathlib
_PathLike = Union[StrPath, pathlib.PurePath]
else:
_PathLike = StrPath
class TomlDecodeError(Exception): ...
def load(f: Union[_PathLike, List[Text], IO[str]], _dict: Type[MutableMapping[str, Any]] = ...) -> MutableMapping[str, Any]: ...
def loads(s: Text, _dict: Type[MutableMapping[str, Any]] = ...) -> MutableMapping[str, Any]: ...
def dump(o: Mapping[str, Any], f: SupportsWrite[str]) -> str: ...
def dumps(o: Mapping[str, Any]) -> str: ...
| 697 | Python | .py | 15 | 44.133333 | 128 | 0.684366 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,376 | mypy_extensions.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/mypy_extensions.pyi | import abc
import sys
from typing import Any, Callable, Dict, Generic, ItemsView, KeysView, Mapping, Optional, Type, TypeVar, Union, ValuesView
_T = TypeVar("_T")
_U = TypeVar("_U")
# Internal mypy fallback type for all typed dicts (does not exist at runtime)
class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta):
def copy(self: _T) -> _T: ...
# Using NoReturn so that only calls using mypy plugin hook that specialize the signature
# can go through.
def setdefault(self, k: NoReturn, default: object) -> object: ...
# Mypy plugin hook for 'pop' expects that 'default' has a type variable type.
def pop(self, k: NoReturn, default: _T = ...) -> object: ...
def update(self: _T, __m: _T) -> None: ...
if sys.version_info < (3, 0):
def has_key(self, k: str) -> bool: ...
def viewitems(self) -> ItemsView[str, object]: ...
def viewkeys(self) -> KeysView[str]: ...
def viewvalues(self) -> ValuesView[object]: ...
else:
def items(self) -> ItemsView[str, object]: ...
def keys(self) -> KeysView[str]: ...
def values(self) -> ValuesView[object]: ...
def __delitem__(self, k: NoReturn) -> None: ...
def TypedDict(typename: str, fields: Dict[str, Type[_T]], total: bool = ...) -> Type[Dict[str, Any]]: ...
def Arg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def DefaultArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def NamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def DefaultNamedArg(type: _T = ..., name: Optional[str] = ...) -> _T: ...
def VarArg(type: _T = ...) -> _T: ...
def KwArg(type: _T = ...) -> _T: ...
# Return type that indicates a function does not return.
# This type is equivalent to the None type, but the no-op Union is necessary to
# distinguish the None type from the None value.
NoReturn = Union[None] # Deprecated: Use typing.NoReturn instead.
# This is intended as a class decorator, but mypy rejects abstract classes
# when a Type[_T] is expected, so we can't give it the type we want
def trait(cls: Any) -> Any: ...
def mypyc_attr(*attrs: str, **kwattrs: object) -> Callable[[_T], _T]: ...
class FlexibleAlias(Generic[_T, _U]): ...
| 2,198 | Python | .py | 40 | 51.4 | 121 | 0.629182 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,377 | termcolor.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/termcolor.pyi | from typing import Any, Iterable, Optional, Text
def colored(
text: Text, color: Optional[Text] = ..., on_color: Optional[Text] = ..., attrs: Optional[Iterable[Text]] = ...
) -> Text: ...
def cprint(
text: Text, color: Optional[Text] = ..., on_color: Optional[Text] = ..., attrs: Optional[Iterable[Text]] = ..., **kwargs: Any
) -> None: ...
| 350 | Python | .py | 7 | 47.714286 | 129 | 0.622807 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,378 | first.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/first.pyi | from typing import Any, Callable, Iterable, Optional, TypeVar, Union, overload
_T = TypeVar("_T")
_S = TypeVar("_S")
@overload
def first(iterable: Iterable[_T]) -> Optional[_T]: ...
@overload
def first(iterable: Iterable[_T], default: _S) -> Union[_T, _S]: ...
@overload
def first(iterable: Iterable[_T], default: _S, key: Optional[Callable[[_T], Any]]) -> Union[_T, _S]: ...
@overload
def first(iterable: Iterable[_T], *, key: Optional[Callable[[_T], Any]]) -> Optional[_T]: ...
| 481 | Python | .py | 11 | 42.636364 | 104 | 0.654584 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,379 | ujson.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/ujson.pyi | from typing import IO, Any, AnyStr
__version__: str
def encode(
obj: Any,
ensure_ascii: bool = ...,
double_precision: int = ...,
encode_html_chars: bool = ...,
escape_forward_slashes: bool = ...,
sort_keys: bool = ...,
indent: int = ...,
) -> str: ...
def dumps(
obj: Any,
ensure_ascii: bool = ...,
double_precision: int = ...,
encode_html_chars: bool = ...,
escape_forward_slashes: bool = ...,
sort_keys: bool = ...,
indent: int = ...,
) -> str: ...
def dump(
obj: Any,
fp: IO[str],
ensure_ascii: bool = ...,
double_precision: int = ...,
encode_html_chars: bool = ...,
escape_forward_slashes: bool = ...,
sort_keys: bool = ...,
indent: int = ...,
) -> None: ...
def decode(s: AnyStr, precise_float: bool = ...) -> Any: ...
def loads(s: AnyStr, precise_float: bool = ...) -> Any: ...
def load(fp: IO[AnyStr], precise_float: bool = ...) -> Any: ...
| 938 | Python | .py | 33 | 24.69697 | 63 | 0.54485 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,380 | typing_extensions.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/typing_extensions.pyi | import abc
import sys
from typing import (
TYPE_CHECKING as TYPE_CHECKING,
Any,
Callable,
ClassVar as ClassVar,
ContextManager as ContextManager,
Counter as Counter,
DefaultDict as DefaultDict,
Deque as Deque,
Dict,
ItemsView,
KeysView,
Mapping,
NewType as NewType,
NoReturn as NoReturn,
Optional,
Text as Text,
Tuple,
Type as Type,
TypeVar,
ValuesView,
overload as overload,
)
_T = TypeVar("_T")
_F = TypeVar("_F", bound=Callable[..., Any])
_TC = TypeVar("_TC", bound=Type[object])
class _SpecialForm:
def __getitem__(self, typeargs: Any) -> Any: ...
def runtime_checkable(cls: _TC) -> _TC: ...
# This alias for above is kept here for backwards compatibility.
runtime = runtime_checkable
Protocol: _SpecialForm = ...
Final: _SpecialForm = ...
def final(f: _F) -> _F: ...
Literal: _SpecialForm = ...
def IntVar(__name: str) -> Any: ... # returns a new TypeVar
# Internal mypy fallback type for all typed dicts (does not exist at runtime)
class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta):
def copy(self: _T) -> _T: ...
# Using NoReturn so that only calls using mypy plugin hook that specialize the signature
# can go through.
def setdefault(self, k: NoReturn, default: object) -> object: ...
# Mypy plugin hook for 'pop' expects that 'default' has a type variable type.
def pop(self, k: NoReturn, default: _T = ...) -> object: ...
def update(self: _T, __m: _T) -> None: ...
if sys.version_info < (3, 0):
def has_key(self, k: str) -> bool: ...
def viewitems(self) -> ItemsView[str, object]: ...
def viewkeys(self) -> KeysView[str]: ...
def viewvalues(self) -> ValuesView[object]: ...
else:
def items(self) -> ItemsView[str, object]: ...
def keys(self) -> KeysView[str]: ...
def values(self) -> ValuesView[object]: ...
def __delitem__(self, k: NoReturn) -> None: ...
# TypedDict is a (non-subscriptable) special form.
TypedDict: object = ...
if sys.version_info >= (3, 3):
from typing import ChainMap as ChainMap
if sys.version_info >= (3, 5):
from typing import (
AsyncContextManager as AsyncContextManager,
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
Awaitable as Awaitable,
Coroutine as Coroutine,
)
if sys.version_info >= (3, 6):
from typing import AsyncGenerator as AsyncGenerator
def get_type_hints(
obj: Callable[..., Any],
globalns: Optional[Dict[str, Any]] = ...,
localns: Optional[Dict[str, Any]] = ...,
include_extras: bool = ...,
) -> Dict[str, Any]: ...
if sys.version_info >= (3, 7):
def get_args(tp: Any) -> Tuple[Any, ...]: ...
def get_origin(tp: Any) -> Optional[Any]: ...
Annotated: _SpecialForm = ...
_AnnotatedAlias: Any = ... # undocumented
# TypeAlias is a (non-subscriptable) special form.
class TypeAlias: ...
@runtime_checkable
class SupportsIndex(Protocol, metaclass=abc.ABCMeta):
@abc.abstractmethod
def __index__(self) -> int: ...
# PEP 612 support for Python < 3.9
if sys.version_info >= (3, 10):
from typing import Concatenate as Concatenate, ParamSpec as ParamSpec
else:
class ParamSpec:
__name__: str
def __init__(self, name: str) -> None: ...
Concatenate: _SpecialForm = ...
| 3,358 | Python | .py | 96 | 30.666667 | 92 | 0.646732 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,381 | mock.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/mock.pyi | import sys
from typing import Any, Callable, Generic, List, Mapping, Optional, Sequence, Text, Tuple, Type, TypeVar, Union, overload
_F = TypeVar("_F", bound=Callable[..., Any])
_T = TypeVar("_T")
_TT = TypeVar("_TT", bound=Type[Any])
_R = TypeVar("_R")
__all__ = [
"Mock",
"MagicMock",
"patch",
"sentinel",
"DEFAULT",
"ANY",
"call",
"create_autospec",
"AsyncMock",
"FILTER_DIR",
"NonCallableMock",
"NonCallableMagicMock",
"mock_open",
"PropertyMock",
"seal",
]
__version__: str
FILTER_DIR: Any
class _slotted: ...
class _SentinelObject:
name: Any
def __init__(self, name: Any) -> None: ...
class _Sentinel:
def __init__(self) -> None: ...
def __getattr__(self, name: str) -> Any: ...
sentinel: Any
DEFAULT: Any
class _Call(Tuple[Any, ...]):
def __new__(
cls, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...
) -> Any: ...
name: Any
parent: Any
from_kall: Any
def __init__(
self, value: Any = ..., name: Optional[Any] = ..., parent: Optional[Any] = ..., two: bool = ..., from_kall: bool = ...
) -> None: ...
def __eq__(self, other: Any) -> bool: ...
__ne__: Any
def __call__(self, *args: Any, **kwargs: Any) -> _Call: ...
def __getattr__(self, attr: Any) -> Any: ...
def count(self, *args: Any, **kwargs: Any) -> Any: ...
def index(self, *args: Any, **kwargs: Any) -> Any: ...
def call_list(self) -> Any: ...
call: _Call
class _CallList(List[_Call]):
def __contains__(self, value: Any) -> bool: ...
class _MockIter:
obj: Any
def __init__(self, obj: Any) -> None: ...
def __iter__(self) -> Any: ...
def __next__(self) -> Any: ...
class Base:
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
class NonCallableMock(Base, Any): # type: ignore
def __new__(__cls, *args: Any, **kw: Any) -> NonCallableMock: ...
def __init__(
self,
spec: Union[List[str], object, Type[object], None] = ...,
wraps: Optional[Any] = ...,
name: Optional[str] = ...,
spec_set: Union[List[str], object, Type[object], None] = ...,
parent: Optional[NonCallableMock] = ...,
_spec_state: Optional[Any] = ...,
_new_name: str = ...,
_new_parent: Optional[NonCallableMock] = ...,
_spec_as_instance: bool = ...,
_eat_self: Optional[bool] = ...,
unsafe: bool = ...,
**kwargs: Any,
) -> None: ...
def __getattr__(self, name: str) -> Any: ...
if sys.version_info >= (3, 8):
def _calls_repr(self, prefix: str = ...) -> str: ...
def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ...
def assert_not_called(self) -> None: ...
def assert_called_once_with(self, *args: Any, **kwargs: Any) -> None: ...
def _format_mock_failure_message(self, args: Any, kwargs: Any, action: str = ...) -> str: ...
elif sys.version_info >= (3, 5):
def assert_called_with(_mock_self, *args: Any, **kwargs: Any) -> None: ...
def assert_not_called(_mock_self) -> None: ...
def assert_called_once_with(_mock_self, *args: Any, **kwargs: Any) -> None: ...
def _format_mock_failure_message(self, args: Any, kwargs: Any) -> str: ...
if sys.version_info >= (3, 8):
def assert_called(self) -> None: ...
def assert_called_once(self) -> None: ...
elif sys.version_info >= (3, 6):
def assert_called(_mock_self) -> None: ...
def assert_called_once(_mock_self) -> None: ...
if sys.version_info >= (3, 6):
def reset_mock(self, visited: Any = ..., *, return_value: bool = ..., side_effect: bool = ...) -> None: ...
elif sys.version_info >= (3, 5):
def reset_mock(self, visited: Any = ...) -> None: ...
if sys.version_info >= (3, 7):
def _extract_mock_name(self) -> str: ...
def _get_call_signature_from_name(self, name: str) -> Any: ...
def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ...
def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = ...) -> None: ...
def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ...
def _mock_add_spec(self, spec: Any, spec_set: bool, _spec_as_instance: bool = ..., _eat_self: bool = ...) -> None: ...
def attach_mock(self, mock: NonCallableMock, attribute: str) -> None: ...
def configure_mock(self, **kwargs: Any) -> None: ...
return_value: Any
side_effect: Any
called: bool
call_count: int
call_args: Any
call_args_list: _CallList
mock_calls: _CallList
def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ...
def _call_matcher(self, _call: Tuple[_Call, ...]) -> _Call: ...
def _get_child_mock(self, **kw: Any) -> NonCallableMock: ...
class CallableMixin(Base):
side_effect: Any
def __init__(
self,
spec: Optional[Any] = ...,
side_effect: Optional[Any] = ...,
return_value: Any = ...,
wraps: Optional[Any] = ...,
name: Optional[Any] = ...,
spec_set: Optional[Any] = ...,
parent: Optional[Any] = ...,
_spec_state: Optional[Any] = ...,
_new_name: Any = ...,
_new_parent: Optional[Any] = ...,
**kwargs: Any,
) -> None: ...
def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ...
class Mock(CallableMixin, NonCallableMock): ...
class _patch(Generic[_T]):
attribute_name: Any
getter: Callable[[], Any]
attribute: str
new: _T
new_callable: Any
spec: Any
create: bool
has_local: Any
spec_set: Any
autospec: Any
kwargs: Mapping[str, Any]
additional_patchers: Any
if sys.version_info >= (3, 8):
@overload
def __init__(
self: _patch[Union[MagicMock, AsyncMock]],
getter: Callable[[], Any],
attribute: str,
*,
spec: Optional[Any],
create: bool,
spec_set: Optional[Any],
autospec: Optional[Any],
new_callable: Optional[Any],
kwargs: Mapping[str, Any],
) -> None: ...
# This overload also covers the case, where new==DEFAULT. In this case, self is _patch[Any].
# Ideally we'd be able to add an overload for it so that self is _patch[MagicMock],
# but that's impossible with the current type system.
@overload
def __init__(
self: _patch[_T],
getter: Callable[[], Any],
attribute: str,
new: _T,
spec: Optional[Any],
create: bool,
spec_set: Optional[Any],
autospec: Optional[Any],
new_callable: Optional[Any],
kwargs: Mapping[str, Any],
) -> None: ...
else:
@overload
def __init__(
self: _patch[MagicMock],
getter: Callable[[], Any],
attribute: str,
*,
spec: Optional[Any],
create: bool,
spec_set: Optional[Any],
autospec: Optional[Any],
new_callable: Optional[Any],
kwargs: Mapping[str, Any],
) -> None: ...
@overload
def __init__(
self: _patch[_T],
getter: Callable[[], Any],
attribute: str,
new: _T,
spec: Optional[Any],
create: bool,
spec_set: Optional[Any],
autospec: Optional[Any],
new_callable: Optional[Any],
kwargs: Mapping[str, Any],
) -> None: ...
def copy(self) -> _patch[_T]: ...
def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ...
def decorate_class(self, klass: _TT) -> _TT: ...
def decorate_callable(self, func: _F) -> _F: ...
def get_original(self) -> Tuple[Any, bool]: ...
target: Any
temp_original: Any
is_local: bool
def __enter__(self) -> _T: ...
def __exit__(self, *exc_info: Any) -> None: ...
def start(self) -> _T: ...
def stop(self) -> None: ...
class _patch_dict:
in_dict: Any
values: Any
clear: Any
def __init__(self, in_dict: Any, values: Any = ..., clear: Any = ..., **kwargs: Any) -> None: ...
def __call__(self, f: Any) -> Any: ...
def decorate_class(self, klass: Any) -> Any: ...
def __enter__(self) -> Any: ...
def __exit__(self, *args: Any) -> Any: ...
start: Any
stop: Any
class _patcher:
TEST_PREFIX: str
dict: Type[_patch_dict]
if sys.version_info >= (3, 8):
@overload
def __call__( # type: ignore
self,
target: Any,
*,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[Union[MagicMock, AsyncMock]]: ...
# This overload also covers the case, where new==DEFAULT. In this case, the return type is _patch[Any].
# Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock],
# but that's impossible with the current type system.
@overload
def __call__(
self,
target: Any,
new: _T,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[_T]: ...
else:
@overload
def __call__( # type: ignore
self,
target: Any,
*,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[MagicMock]: ...
@overload
def __call__(
self,
target: Any,
new: _T,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[_T]: ...
if sys.version_info >= (3, 8):
@overload
def object( # type: ignore
self,
target: Any,
attribute: Text,
*,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[Union[MagicMock, AsyncMock]]: ...
@overload
def object(
self,
target: Any,
attribute: Text,
new: _T = ...,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[_T]: ...
else:
@overload
def object( # type: ignore
self,
target: Any,
attribute: Text,
*,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[MagicMock]: ...
@overload
def object(
self,
target: Any,
attribute: Text,
new: _T = ...,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: Any,
) -> _patch[_T]: ...
def multiple(
self,
target: Any,
spec: Optional[Any] = ...,
create: bool = ...,
spec_set: Optional[Any] = ...,
autospec: Optional[Any] = ...,
new_callable: Optional[Any] = ...,
**kwargs: _T,
) -> _patch[_T]: ...
def stopall(self) -> None: ...
patch: _patcher
class MagicMixin:
def __init__(self, *args: Any, **kw: Any) -> None: ...
class NonCallableMagicMock(MagicMixin, NonCallableMock):
def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ...
class MagicMock(MagicMixin, Mock):
def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ...
if sys.version_info >= (3, 8):
class AsyncMockMixin(Base):
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ...
def assert_awaited(self) -> None: ...
def assert_awaited_once(self) -> None: ...
def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ...
def assert_awaited_once_with(self, *args: Any, **kwargs: Any) -> None: ...
def assert_any_await(self, *args: Any, **kwargs: Any) -> None: ...
def assert_has_awaits(self, calls: _CallList, any_order: bool = ...) -> None: ...
def assert_not_awaited(self) -> None: ...
def reset_mock(self, *args, **kwargs) -> None: ...
await_count: int
await_args: Optional[_Call]
await_args_list: _CallList
class AsyncMagicMixin(MagicMixin):
def __init__(self, *args: Any, **kw: Any) -> None: ...
class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ...
class MagicProxy:
name: Any
parent: Any
def __init__(self, name: Any, parent: Any) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
def create_mock(self) -> Any: ...
def __get__(self, obj: Any, _type: Optional[Any] = ...) -> Any: ...
class _ANY:
def __eq__(self, other: Any) -> bool: ...
def __ne__(self, other: Any) -> bool: ...
ANY: Any
def create_autospec(
spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Optional[Any] = ..., _name: Optional[Any] = ..., **kwargs: Any
) -> Any: ...
class _SpecState:
spec: Any
ids: Any
spec_set: Any
parent: Any
instance: Any
name: Any
def __init__(
self,
spec: Any,
spec_set: Any = ...,
parent: Optional[Any] = ...,
name: Optional[Any] = ...,
ids: Optional[Any] = ...,
instance: Any = ...,
) -> None: ...
def mock_open(mock: Optional[Any] = ..., read_data: Any = ...) -> Any: ...
PropertyMock = Any
if sys.version_info >= (3, 7):
def seal(mock: Any) -> None: ...
| 14,819 | Python | .py | 410 | 28.073171 | 128 | 0.508833 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,382 | gflags.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/gflags.pyi | from types import ModuleType
from typing import IO, Any, Callable, Dict, Iterable, List, Optional, Sequence, Text, Union
class Error(Exception): ...
FlagsError = Error
class DuplicateFlag(FlagsError): ...
class CantOpenFlagFileError(FlagsError): ...
class DuplicateFlagCannotPropagateNoneToSwig(DuplicateFlag): ...
class DuplicateFlagError(DuplicateFlag):
def __init__(self, flagname: str, flag_values: FlagValues, other_flag_values: FlagValues = ...) -> None: ...
class IllegalFlagValueError(FlagsError): ...
IllegalFlagValue = IllegalFlagValueError
class UnrecognizedFlag(FlagsError): ...
class UnrecognizedFlagError(UnrecognizedFlag):
def __init__(self, flagname: str, flagvalue: str = ...) -> None: ...
def get_help_width() -> int: ...
GetHelpWidth = get_help_width
def text_wrap(text: str, length: int = ..., indent: str = ..., firstline_indent: str = ..., tabs: str = ...) -> str: ...
TextWrap = text_wrap
def doc_to_help(doc: str) -> str: ...
DocToHelp = doc_to_help
class FlagValues:
def __init__(self) -> None: ...
def UseGnuGetOpt(self, use_gnu_getopt: bool = ...) -> None: ...
def is_gnu_getopt(self) -> bool: ...
IsGnuGetOpt = is_gnu_getopt
# TODO dict type
def FlagDict(self) -> Dict[Any, Any]: ...
def flags_by_module_dict(self) -> Dict[str, List[Flag]]: ...
FlagsByModuleDict = flags_by_module_dict
def flags_by_module_id_dict(self) -> Dict[int, List[Flag]]: ...
FlagsByModuleIdDict = flags_by_module_id_dict
def key_flags_by_module_dict(self) -> Dict[str, List[Flag]]: ...
KeyFlagsByModuleDict = key_flags_by_module_dict
def find_module_defining_flag(self, flagname: str, default: str = ...) -> str: ...
FindModuleDefiningFlag = find_module_defining_flag
def find_module_id_defining_flag(self, flagname: str, default: int = ...) -> int: ...
FindModuleIdDefiningFlag = find_module_id_defining_flag
def append_flag_values(self, flag_values: FlagValues) -> None: ...
AppendFlagValues = append_flag_values
def remove_flag_values(self, flag_values: FlagValues) -> None: ...
RemoveFlagValues = remove_flag_values
def __setitem__(self, name: str, flag: Flag) -> None: ...
def __getitem__(self, name: str) -> Flag: ...
def __getattr__(self, name: str) -> Any: ...
def __setattr__(self, name: str, value: Any) -> None: ...
def __delattr__(self, flag_name: str) -> None: ...
def set_default(self, name: str, value: Any) -> None: ...
SetDefault = set_default
def __contains__(self, name: str) -> bool: ...
has_key = __contains__
def __iter__(self) -> Iterable[str]: ...
def __call__(self, argv: List[str], known_only: bool = ...) -> List[str]: ...
def reset(self) -> None: ...
Reset = reset
def RegisteredFlags(self) -> List[str]: ...
def flag_values_dict(self) -> Dict[str, Any]: ...
FlagValuesDict = flag_values_dict
def __str__(self) -> str: ...
def GetHelp(self, prefix: str = ...) -> str: ...
def module_help(self, module: Union[ModuleType, str]) -> str: ...
ModuleHelp = module_help
def main_module_help(self) -> str: ...
MainModuleHelp = main_module_help
def get(self, name: str, default: Any) -> Any: ...
def ShortestUniquePrefixes(self, fl: Dict[str, Flag]) -> Dict[str, str]: ...
def ExtractFilename(self, flagfile_str: str) -> str: ...
def read_flags_from_files(self, argv: List[str], force_gnu: bool = ...) -> List[str]: ...
ReadFlagsFromFiles = read_flags_from_files
def flags_into_string(self) -> str: ...
FlagsIntoString = flags_into_string
def append_flags_into_file(self, filename: str) -> None: ...
AppendFlagsIntoFile = append_flags_into_file
def write_help_in_xml_format(self, outfile: IO[str] = ...) -> None: ...
WriteHelpInXMLFormat = write_help_in_xml_format
# TODO validator: gflags_validators.Validator
def AddValidator(self, validator: Any) -> None: ...
def is_parsed(self) -> bool: ...
IsParsed = is_parsed
FLAGS: FlagValues
class Flag:
name: str
default: Any
default_as_str: str
value: Any
help: str
short_name: str
boolean = False
present = False
parser: ArgumentParser
serializer: ArgumentSerializer
allow_override = False
def __init__(
self,
parser: ArgumentParser,
serializer: ArgumentSerializer,
name: str,
default: Optional[str],
help_string: str,
short_name: str = ...,
boolean: bool = ...,
allow_override: bool = ...,
) -> None: ...
def Parse(self, argument: Any) -> Any: ...
def Unparse(self) -> None: ...
def Serialize(self) -> str: ...
def SetDefault(self, value: Any) -> None: ...
def Type(self) -> str: ...
def WriteInfoInXMLFormat(self, outfile: IO[str], module_name: str, is_key: bool = ..., indent: str = ...) -> None: ...
class ArgumentParser(object):
syntactic_help: str
# TODO what is this
def parse(self, argument: Any) -> Any: ...
Parser = parse
def flag_type(self) -> str: ...
Type = flag_type
def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ...
class ArgumentSerializer:
def Serialize(self, value: Any) -> Text: ...
class ListSerializer(ArgumentSerializer):
def __init__(self, list_sep: str) -> None: ...
def Serialize(self, value: List[Any]) -> str: ...
def register_validator(
flag_name: str, checker: Callable[[Any], bool], message: str = ..., flag_values: FlagValues = ...
) -> None: ...
RegisterValidator = register_validator
def mark_flag_as_required(flag_name: str, flag_values: FlagValues = ...) -> None: ...
MarkFlagAsRequired = mark_flag_as_required
def mark_flags_as_required(flag_names: Iterable[str], flag_values: FlagValues = ...) -> None: ...
MarkFlagsAsRequired = mark_flags_as_required
def mark_flags_as_mutual_exclusive(flag_names: Iterable[str], required: bool = ..., flag_values: FlagValues = ...) -> None: ...
MarkFlagsAsMutualExclusive = mark_flags_as_mutual_exclusive
def DEFINE(
parser: ArgumentParser,
name: str,
default: Any,
help: str,
flag_values: FlagValues = ...,
serializer: ArgumentSerializer = ...,
**args: Any,
) -> None: ...
def DEFINE_flag(flag: Flag, flag_values: FlagValues = ...) -> None: ...
def declare_key_flag(flag_name: str, flag_values: FlagValues = ...) -> None: ...
DECLARE_key_flag = declare_key_flag
def adopt_module_key_flags(module: ModuleType, flag_values: FlagValues = ...) -> None: ...
ADOPT_module_key_flags = adopt_module_key_flags
def DEFINE_string(name: str, default: Optional[str], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ...
class BooleanParser(ArgumentParser):
def Convert(self, argument: Any) -> bool: ...
def Parse(self, argument: Any) -> bool: ...
class BooleanFlag(Flag):
def __init__(self, name: str, default: Optional[bool], help: str, short_name: str = ..., **args: Any) -> None: ...
def DEFINE_boolean(name: str, default: Optional[bool], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ...
DEFINE_bool = DEFINE_boolean
class HelpFlag(BooleanFlag):
def __init__(self) -> None: ...
def Parse(self, arg: Any) -> None: ...
class HelpXMLFlag(BooleanFlag):
def __init__(self) -> None: ...
def Parse(self, arg: Any) -> None: ...
class HelpshortFlag(BooleanFlag):
def __init__(self) -> None: ...
def Parse(self, arg: Any) -> None: ...
class NumericParser(ArgumentParser):
def IsOutsideBounds(self, val: float) -> bool: ...
def Parse(self, argument: Any) -> float: ...
def WriteCustomInfoInXMLFormat(self, outfile: IO[str], indent: str) -> None: ...
def Convert(self, argument: Any) -> Any: ...
class FloatParser(NumericParser):
number_article: str
number_name: str
syntactic_help: str
def __init__(self, lower_bound: float = ..., upper_bound: float = ...) -> None: ...
def Convert(self, argument: Any) -> float: ...
def DEFINE_float(
name: str,
default: Optional[float],
help: str,
lower_bound: float = ...,
upper_bound: float = ...,
flag_values: FlagValues = ...,
**args: Any,
) -> None: ...
class IntegerParser(NumericParser):
number_article: str
number_name: str
syntactic_help: str
def __init__(self, lower_bound: int = ..., upper_bound: int = ...) -> None: ...
def Convert(self, argument: Any) -> int: ...
def DEFINE_integer(
name: str,
default: Optional[int],
help: str,
lower_bound: int = ...,
upper_bound: int = ...,
flag_values: FlagValues = ...,
**args: Any,
) -> None: ...
class EnumParser(ArgumentParser):
def __init__(self, enum_values: List[str]) -> None: ...
def Parse(self, argument: Any) -> Any: ...
class EnumFlag(Flag):
def __init__(
self, name: str, default: Optional[str], help: str, enum_values: List[str], short_name: str, **args: Any
) -> None: ...
def DEFINE_enum(
name: str, default: Optional[str], enum_values: Iterable[str], help: str, flag_values: FlagValues = ..., **args: Any
) -> None: ...
class BaseListParser(ArgumentParser):
def __init__(self, token: str = ..., name: str = ...) -> None: ...
def Parse(self, argument: Any) -> List[Any]: ...
class ListParser(BaseListParser):
def __init__(self) -> None: ...
class WhitespaceSeparatedListParser(BaseListParser):
def __init__(self) -> None: ...
def DEFINE_list(name: str, default: Optional[List[str]], help: str, flag_values: FlagValues = ..., **args: Any) -> None: ...
def DEFINE_spaceseplist(
name: str, default: Optional[List[str]], help: str, flag_values: FlagValues = ..., **args: Any
) -> None: ...
class MultiFlag(Flag):
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
def Parse(self, arguments: Any) -> None: ...
def Serialize(self) -> str: ...
def DEFINE_multi_string(
name: str, default: Optional[Union[str, List[str]]], help: str, flag_values: FlagValues = ..., **args: Any
) -> None: ...
DEFINE_multistring = DEFINE_multi_string
def DEFINE_multi_integer(
name: str,
default: Optional[Union[int, List[int]]],
help: str,
lower_bound: int = ...,
upper_bound: int = ...,
flag_values: FlagValues = ...,
**args: Any,
) -> None: ...
DEFINE_multi_int = DEFINE_multi_integer
def DEFINE_multi_float(
name: str,
default: Optional[Union[float, List[float]]],
help: str,
lower_bound: float = ...,
upper_bound: float = ...,
flag_values: FlagValues = ...,
**args: Any,
) -> None: ...
def DEFINE_multi_enum(
name: str,
default: Optional[Union[Sequence[str], str]],
enum_values: Sequence[str],
help: str,
flag_values: FlagValues = ...,
case_sensitive: bool = ...,
**args: Any,
) -> None: ...
| 10,776 | Python | .py | 254 | 38.23622 | 127 | 0.63943 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,383 | itsdangerous.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/itsdangerous.pyi | from datetime import datetime
from typing import IO, Any, Callable, Generator, Mapping, MutableMapping, Optional, Text, Tuple, Union
_serializer = Any # must be an object that has "dumps" and "loads" attributes (e.g. the json module)
def want_bytes(s: Union[Text, bytes], encoding: Text = ..., errors: Text = ...) -> bytes: ...
class BadData(Exception):
message: str
def __init__(self, message: str) -> None: ...
class BadPayload(BadData):
original_error: Optional[Exception]
def __init__(self, message: str, original_error: Optional[Exception] = ...) -> None: ...
class BadSignature(BadData):
payload: Optional[Any]
def __init__(self, message: str, payload: Optional[Any] = ...) -> None: ...
class BadTimeSignature(BadSignature):
date_signed: Optional[int]
def __init__(self, message: str, payload: Optional[Any] = ..., date_signed: Optional[int] = ...) -> None: ...
class BadHeader(BadSignature):
header: Any
original_error: Any
def __init__(
self, message: str, payload: Optional[Any] = ..., header: Optional[Any] = ..., original_error: Optional[Any] = ...
) -> None: ...
class SignatureExpired(BadTimeSignature): ...
def base64_encode(string: Union[Text, bytes]) -> bytes: ...
def base64_decode(string: Union[Text, bytes]) -> bytes: ...
class SigningAlgorithm(object):
def get_signature(self, key: bytes, value: bytes) -> bytes: ...
def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: ...
class NoneAlgorithm(SigningAlgorithm):
def get_signature(self, key: bytes, value: bytes) -> bytes: ...
class HMACAlgorithm(SigningAlgorithm):
default_digest_method: Callable[..., Any]
digest_method: Callable[..., Any]
def __init__(self, digest_method: Optional[Callable[..., Any]] = ...) -> None: ...
def get_signature(self, key: bytes, value: bytes) -> bytes: ...
class Signer(object):
default_digest_method: Callable[..., Any] = ...
default_key_derivation: str = ...
secret_key: bytes
sep: bytes
salt: Union[Text, bytes]
key_derivation: str
digest_method: Callable[..., Any]
algorithm: SigningAlgorithm
def __init__(
self,
secret_key: Union[Text, bytes],
salt: Optional[Union[Text, bytes]] = ...,
sep: Optional[Union[Text, bytes]] = ...,
key_derivation: Optional[str] = ...,
digest_method: Optional[Callable[..., Any]] = ...,
algorithm: Optional[SigningAlgorithm] = ...,
) -> None: ...
def derive_key(self) -> bytes: ...
def get_signature(self, value: Union[Text, bytes]) -> bytes: ...
def sign(self, value: Union[Text, bytes]) -> bytes: ...
def verify_signature(self, value: bytes, sig: Union[Text, bytes]) -> bool: ...
def unsign(self, signed_value: Union[Text, bytes]) -> bytes: ...
def validate(self, signed_value: Union[Text, bytes]) -> bool: ...
class TimestampSigner(Signer):
def get_timestamp(self) -> int: ...
def timestamp_to_datetime(self, ts: float) -> datetime: ...
def sign(self, value: Union[Text, bytes]) -> bytes: ...
def unsign(
self, value: Union[Text, bytes], max_age: Optional[int] = ..., return_timestamp: bool = ...
) -> Any: ... # morally -> Union[bytes, Tuple[bytes, datetime]]
def validate(self, signed_value: Union[Text, bytes], max_age: Optional[int] = ...) -> bool: ...
class Serializer(object):
default_serializer: _serializer = ...
default_signer: Callable[..., Signer] = ...
secret_key: bytes
salt: bytes
serializer: _serializer
is_text_serializer: bool
signer: Callable[..., Signer]
signer_kwargs: MutableMapping[str, Any]
def __init__(
self,
secret_key: Union[Text, bytes],
salt: Optional[Union[Text, bytes]] = ...,
serializer: Optional[_serializer] = ...,
signer: Optional[Callable[..., Signer]] = ...,
signer_kwargs: Optional[MutableMapping[str, Any]] = ...,
) -> None: ...
def load_payload(self, payload: bytes, serializer: Optional[_serializer] = ...) -> Any: ...
def dump_payload(self, obj: Any) -> bytes: ...
def make_signer(self, salt: Optional[Union[Text, bytes]] = ...) -> Signer: ...
def iter_unsigners(self, salt: Optional[Union[Text, bytes]] = ...) -> Generator[Any, None, None]: ...
def dumps(self, obj: Any, salt: Optional[Union[Text, bytes]] = ...) -> Any: ... # morally -> Union[str, bytes]
def dump(self, obj: Any, f: IO[Any], salt: Optional[Union[Text, bytes]] = ...) -> None: ...
def loads(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ...) -> Any: ...
def load(self, f: IO[Any], salt: Optional[Union[Text, bytes]] = ...) -> Any: ...
def loads_unsafe(self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ...) -> Tuple[bool, Optional[Any]]: ...
def load_unsafe(self, f: IO[Any], salt: Optional[Union[Text, bytes]] = ...) -> Tuple[bool, Optional[Any]]: ...
class TimedSerializer(Serializer):
def loads(
self,
s: Union[Text, bytes],
salt: Optional[Union[Text, bytes]] = ...,
max_age: Optional[int] = ...,
return_timestamp: bool = ...,
) -> Any: ... # morally -> Union[Any, Tuple[Any, datetime]]
def loads_unsafe(
self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., max_age: Optional[int] = ...
) -> Tuple[bool, Any]: ...
class JSONWebSignatureSerializer(Serializer):
jws_algorithms: MutableMapping[Text, SigningAlgorithm] = ...
default_algorithm: Text = ...
default_serializer: Any = ...
algorithm_name: Text
algorithm: SigningAlgorithm
def __init__(
self,
secret_key: Union[Text, bytes],
salt: Optional[Union[Text, bytes]] = ...,
serializer: Optional[_serializer] = ...,
signer: Optional[Callable[..., Signer]] = ...,
signer_kwargs: Optional[MutableMapping[str, Any]] = ...,
algorithm_name: Optional[Text] = ...,
) -> None: ...
def load_payload(
self, payload: Union[Text, bytes], serializer: Optional[_serializer] = ..., return_header: bool = ...
) -> Any: ... # morally -> Union[Any, Tuple[Any, MutableMapping[str, Any]]]
def dump_payload(self, header: Mapping[str, Any], obj: Any) -> bytes: ... # type: ignore
def make_algorithm(self, algorithm_name: Text) -> SigningAlgorithm: ...
def make_signer(self, salt: Optional[Union[Text, bytes]] = ..., algorithm: SigningAlgorithm = ...) -> Signer: ...
def make_header(self, header_fields: Optional[Mapping[str, Any]]) -> MutableMapping[str, Any]: ...
def dumps(
self, obj: Any, salt: Optional[Union[Text, bytes]] = ..., header_fields: Optional[Mapping[str, Any]] = ...
) -> bytes: ...
def loads(
self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., return_header: bool = ...
) -> Any: ... # morally -> Union[Any, Tuple[Any, MutableMapping[str, Any]]]
def loads_unsafe(
self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., return_header: bool = ...
) -> Tuple[bool, Any]: ...
class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer):
DEFAULT_EXPIRES_IN: int = ...
expires_in: int
def __init__(
self,
secret_key: Union[Text, bytes],
expires_in: Optional[int] = ...,
salt: Optional[Union[Text, bytes]] = ...,
serializer: Optional[_serializer] = ...,
signer: Optional[Callable[..., Signer]] = ...,
signer_kwargs: Optional[MutableMapping[str, Any]] = ...,
algorithm_name: Optional[Text] = ...,
) -> None: ...
def make_header(self, header_fields: Optional[Mapping[str, Any]]) -> MutableMapping[str, Any]: ...
def loads(
self, s: Union[Text, bytes], salt: Optional[Union[Text, bytes]] = ..., return_header: bool = ...
) -> Any: ... # morally -> Union[Any, Tuple[Any, MutableMapping[str, Any]]]
def get_issue_date(self, header: Mapping[str, Any]) -> Optional[datetime]: ...
def now(self) -> int: ...
class _URLSafeSerializerMixin(object):
default_serializer: _serializer = ...
def load_payload(self, payload: bytes, serializer: Optional[_serializer] = ...) -> Any: ...
def dump_payload(self, obj: Any) -> bytes: ...
class URLSafeSerializer(_URLSafeSerializerMixin, Serializer): ...
class URLSafeTimedSerializer(_URLSafeSerializerMixin, TimedSerializer): ...
| 8,405 | Python | .py | 161 | 46.639752 | 126 | 0.621701 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,384 | dateparser.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/dateparser.pyi | import datetime
from typing import Any, List, Mapping, Optional, Set, Tuple, Union
__version__: str
def parse(
date_string: str,
date_formats: Optional[Union[List[str], Tuple[str], Set[str]]] = ...,
languages: Optional[Union[List[str], Tuple[str], Set[str]]] = ...,
locales: Optional[Union[List[str], Tuple[str], Set[str]]] = ...,
region: Optional[str] = ...,
settings: Optional[Mapping[str, Any]] = ...,
) -> Optional[datetime.datetime]: ...
def __getattr__(name: str) -> Any: ... # incomplete
| 522 | Python | .py | 12 | 40.333333 | 73 | 0.639764 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,385 | pycurl.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/pycurl.pyi | # TODO(MichalPokorny): more precise types
from typing import Any, List, Text, Tuple
GLOBAL_ACK_EINTR: int
GLOBAL_ALL: int
GLOBAL_DEFAULT: int
GLOBAL_NOTHING: int
GLOBAL_SSL: int
GLOBAL_WIN32: int
def global_init(option: int) -> None: ...
def global_cleanup() -> None: ...
version: str
def version_info() -> Tuple[int, str, int, str, int, str, int, str, Tuple[str, ...], Any, int, Any]: ...
class error(Exception): ...
class Curl(object):
def close(self) -> None: ...
def setopt(self, option: int, value: Any) -> None: ...
def setopt_string(self, option: int, value: str) -> None: ...
def perform(self) -> None: ...
def perform_rb(self) -> bytes: ...
def perform_rs(self) -> Text: ...
def getinfo(self, info: Any) -> Any: ...
def getinfo_raw(self, info: Any) -> Any: ...
def reset(self) -> None: ...
def unsetopt(self, option: int) -> Any: ...
def pause(self, bitmask: Any) -> Any: ...
def errstr(self) -> str: ...
# TODO(MichalPokorny): wat?
USERPWD: int
class CurlMulti(object):
def close(self) -> None: ...
def add_handle(self, obj: Curl) -> None: ...
def remove_handle(self, obj: Curl) -> None: ...
def perform(self) -> Tuple[Any, int]: ...
def fdset(self) -> Tuple[List[Any], List[Any], List[Any]]: ...
def select(self, timeout: float = ...) -> int: ...
def info_read(self, max_objects: int = ...) -> Tuple[int, List[Any], List[Any]]: ...
def socket_action(self, sockfd: int, ev_bitmask: int) -> Tuple[int, int]: ...
class CurlShare(object):
def close(self) -> None: ...
def setopt(self, option: int, value: Any) -> Any: ...
ACCEPTTIMEOUT_MS: int
ACCEPT_ENCODING: int
ADDRESS_SCOPE: int
APPCONNECT_TIME: int
APPEND: int
AUTOREFERER: int
BUFFERSIZE: int
CAINFO: int
CAPATH: int
CLOSESOCKETFUNCTION: int
COMPILE_LIBCURL_VERSION_NUM: int
COMPILE_PY_VERSION_HEX: int
CONDITION_UNMET: int
CONNECTTIMEOUT: int
CONNECTTIMEOUT_MS: int
CONNECT_ONLY: int
CONNECT_TIME: int
CONNECT_TO: int
CONTENT_LENGTH_DOWNLOAD: int
CONTENT_LENGTH_UPLOAD: int
CONTENT_TYPE: int
COOKIE: int
COOKIEFILE: int
COOKIEJAR: int
COOKIELIST: int
COOKIESESSION: int
COPYPOSTFIELDS: int
CRLF: int
CRLFILE: int
CSELECT_ERR: int
CSELECT_IN: int
CSELECT_OUT: int
CURL_HTTP_VERSION_1_0: int
CURL_HTTP_VERSION_1_1: int
CURL_HTTP_VERSION_2: int
CURL_HTTP_VERSION_2_0: int
CURL_HTTP_VERSION_LAST: int
CURL_HTTP_VERSION_NONE: int
CUSTOMREQUEST: int
DEBUGFUNCTION: int
DEFAULT_PROTOCOL: int
DIRLISTONLY: int
DNS_CACHE_TIMEOUT: int
DNS_SERVERS: int
DNS_USE_GLOBAL_CACHE: int
EFFECTIVE_URL: int
EGDSOCKET: int
ENCODING: int
EXPECT_100_TIMEOUT_MS: int
FAILONERROR: int
FILE: int
FOLLOWLOCATION: int
FORBID_REUSE: int
FORM_BUFFER: int
FORM_BUFFERPTR: int
FORM_CONTENTS: int
FORM_CONTENTTYPE: int
FORM_FILE: int
FORM_FILENAME: int
FRESH_CONNECT: int
FTPAPPEND: int
FTPAUTH_DEFAULT: int
FTPAUTH_SSL: int
FTPAUTH_TLS: int
FTPLISTONLY: int
FTPMETHOD_DEFAULT: int
FTPMETHOD_MULTICWD: int
FTPMETHOD_NOCWD: int
FTPMETHOD_SINGLECWD: int
FTPPORT: int
FTPSSLAUTH: int
FTPSSL_ALL: int
FTPSSL_CONTROL: int
FTPSSL_NONE: int
FTPSSL_TRY: int
FTP_ACCOUNT: int
FTP_ALTERNATIVE_TO_USER: int
FTP_CREATE_MISSING_DIRS: int
FTP_ENTRY_PATH: int
FTP_FILEMETHOD: int
FTP_RESPONSE_TIMEOUT: int
FTP_SKIP_PASV_IP: int
FTP_SSL: int
FTP_SSL_CCC: int
FTP_USE_EPRT: int
FTP_USE_EPSV: int
FTP_USE_PRET: int
GSSAPI_DELEGATION: int
GSSAPI_DELEGATION_FLAG: int
GSSAPI_DELEGATION_NONE: int
GSSAPI_DELEGATION_POLICY_FLAG: int
HEADER: int
HEADERFUNCTION: int
HEADEROPT: int
HEADER_SEPARATE: int
HEADER_SIZE: int
HEADER_UNIFIED: int
HTTP200ALIASES: int
HTTPAUTH: int
HTTPAUTH_ANY: int
HTTPAUTH_ANYSAFE: int
HTTPAUTH_AVAIL: int
HTTPAUTH_BASIC: int
HTTPAUTH_DIGEST: int
HTTPAUTH_DIGEST_IE: int
HTTPAUTH_GSSNEGOTIATE: int
HTTPAUTH_NEGOTIATE: int
HTTPAUTH_NONE: int
HTTPAUTH_NTLM: int
HTTPAUTH_NTLM_WB: int
HTTPAUTH_ONLY: int
HTTPGET: int
HTTPHEADER: int
HTTPPOST: int
HTTPPROXYTUNNEL: int
HTTP_CODE: int
HTTP_CONNECTCODE: int
HTTP_CONTENT_DECODING: int
HTTP_TRANSFER_DECODING: int
HTTP_VERSION: int
IGNORE_CONTENT_LENGTH: int
INFILE: int
INFILESIZE: int
INFILESIZE_LARGE: int
INFOTYPE_DATA_IN: int
INFOTYPE_DATA_OUT: int
INFOTYPE_HEADER_IN: int
INFOTYPE_HEADER_OUT: int
INFOTYPE_SSL_DATA_IN: int
INFOTYPE_SSL_DATA_OUT: int
INFOTYPE_TEXT: int
INFO_CERTINFO: int
INFO_COOKIELIST: int
INFO_FILETIME: int
INFO_HTTP_VERSION: int
INFO_RTSP_CLIENT_CSEQ: int
INFO_RTSP_CSEQ_RECV: int
INFO_RTSP_SERVER_CSEQ: int
INFO_RTSP_SESSION_ID: int
INTERFACE: int
IOCMD_NOP: int
IOCMD_RESTARTREAD: int
IOCTLFUNCTION: int
IOE_FAILRESTART: int
IOE_OK: int
IOE_UNKNOWNCMD: int
IPRESOLVE: int
IPRESOLVE_V4: int
IPRESOLVE_V6: int
IPRESOLVE_WHATEVER: int
ISSUERCERT: int
KEYPASSWD: int
KHMATCH_MISMATCH: int
KHMATCH_MISSING: int
KHMATCH_OK: int
KHSTAT_DEFER: int
KHSTAT_FINE: int
KHSTAT_FINE_ADD_TO_FILE: int
KHSTAT_REJECT: int
KHTYPE_DSS: int
KHTYPE_RSA: int
KHTYPE_RSA1: int
KHTYPE_UNKNOWN: int
KRB4LEVEL: int
KRBLEVEL: int
LASTSOCKET: int
LOCALPORT: int
LOCALPORTRANGE: int
LOCAL_IP: int
LOCAL_PORT: int
LOCK_DATA_COOKIE: int
LOCK_DATA_DNS: int
LOCK_DATA_SSL_SESSION: int
LOGIN_OPTIONS: int
LOW_SPEED_LIMIT: int
LOW_SPEED_TIME: int
MAIL_AUTH: int
MAIL_FROM: int
MAIL_RCPT: int
MAXCONNECTS: int
MAXFILESIZE: int
MAXFILESIZE_LARGE: int
MAXREDIRS: int
MAX_RECV_SPEED_LARGE: int
MAX_SEND_SPEED_LARGE: int
M_CHUNK_LENGTH_PENALTY_SIZE: int
M_CONTENT_LENGTH_PENALTY_SIZE: int
M_MAXCONNECTS: int
M_MAX_HOST_CONNECTIONS: int
M_MAX_PIPELINE_LENGTH: int
M_MAX_TOTAL_CONNECTIONS: int
M_PIPELINING: int
M_PIPELINING_SERVER_BL: int
M_PIPELINING_SITE_BL: int
M_SOCKETFUNCTION: int
M_TIMERFUNCTION: int
NAMELOOKUP_TIME: int
NETRC: int
NETRC_FILE: int
NETRC_IGNORED: int
NETRC_OPTIONAL: int
NETRC_REQUIRED: int
NEW_DIRECTORY_PERMS: int
NEW_FILE_PERMS: int
NOBODY: int
NOPROGRESS: int
NOPROXY: int
NOSIGNAL: int
NUM_CONNECTS: int
OPENSOCKETFUNCTION: int
OPT_CERTINFO: int
OPT_COOKIELIST: int
OPT_FILETIME: int
OPT_RTSP_CLIENT_CSEQ: int
OPT_RTSP_REQUEST: int
OPT_RTSP_SERVER_CSEQ: int
OPT_RTSP_SESSION_ID: int
OPT_RTSP_STREAM_URI: int
OPT_RTSP_TRANSPORT: int
OS_ERRNO: int
PASSWORD: int
PATH_AS_IS: int
PAUSE_ALL: int
PAUSE_CONT: int
PAUSE_RECV: int
PAUSE_SEND: int
PINNEDPUBLICKEY: int
PIPEWAIT: int
PIPE_MULTIPLEX: int
PIPE_NOTHING: int
POLL_IN: int
POLL_INOUT: int
POLL_NONE: int
POLL_OUT: int
POLL_REMOVE: int
PORT: int
POST: int
POST301: int
POSTFIELDS: int
POSTFIELDSIZE: int
POSTFIELDSIZE_LARGE: int
POSTQUOTE: int
POSTREDIR: int
PREQUOTE: int
PRETRANSFER_TIME: int
PRE_PROXY: int
PRIMARY_IP: int
PRIMARY_PORT: int
PROGRESSFUNCTION: int
PROTOCOLS: int
PROTO_ALL: int
PROTO_DICT: int
PROTO_FILE: int
PROTO_FTP: int
PROTO_FTPS: int
PROTO_GOPHER: int
PROTO_HTTP: int
PROTO_HTTPS: int
PROTO_IMAP: int
PROTO_IMAPS: int
PROTO_LDAP: int
PROTO_LDAPS: int
PROTO_POP3: int
PROTO_POP3S: int
PROTO_RTMP: int
PROTO_RTMPE: int
PROTO_RTMPS: int
PROTO_RTMPT: int
PROTO_RTMPTE: int
PROTO_RTMPTS: int
PROTO_RTSP: int
PROTO_SCP: int
PROTO_SFTP: int
PROTO_SMB: int
PROTO_SMBS: int
PROTO_SMTP: int
PROTO_SMTPS: int
PROTO_TELNET: int
PROTO_TFTP: int
PROXY: int
PROXYAUTH: int
PROXYAUTH_AVAIL: int
PROXYHEADER: int
PROXYPASSWORD: int
PROXYPORT: int
PROXYTYPE: int
PROXYTYPE_HTTP: int
PROXYTYPE_HTTP_1_0: int
PROXYTYPE_SOCKS4: int
PROXYTYPE_SOCKS4A: int
PROXYTYPE_SOCKS5: int
PROXYTYPE_SOCKS5_HOSTNAME: int
PROXYUSERNAME: int
PROXYUSERPWD: int
PROXY_CAINFO: int
PROXY_CAPATH: int
PROXY_SERVICE_NAME: int
PROXY_SSLCERT: int
PROXY_SSLCERTTYPE: int
PROXY_SSLKEY: int
PROXY_SSLKEYTYPE: int
PROXY_SSL_VERIFYHOST: int
PROXY_SSL_VERIFYPEER: int
PROXY_TRANSFER_MODE: int
PUT: int
QUOTE: int
RANDOM_FILE: int
RANGE: int
READDATA: int
READFUNCTION: int
READFUNC_ABORT: int
READFUNC_PAUSE: int
REDIRECT_COUNT: int
REDIRECT_TIME: int
REDIRECT_URL: int
REDIR_POST_301: int
REDIR_POST_302: int
REDIR_POST_303: int
REDIR_POST_ALL: int
REDIR_PROTOCOLS: int
REFERER: int
REQUEST_SIZE: int
RESOLVE: int
RESPONSE_CODE: int
RESUME_FROM: int
RESUME_FROM_LARGE: int
RTSPREQ_ANNOUNCE: int
RTSPREQ_DESCRIBE: int
RTSPREQ_GET_PARAMETER: int
RTSPREQ_LAST: int
RTSPREQ_NONE: int
RTSPREQ_OPTIONS: int
RTSPREQ_PAUSE: int
RTSPREQ_PLAY: int
RTSPREQ_RECEIVE: int
RTSPREQ_RECORD: int
RTSPREQ_SETUP: int
RTSPREQ_SET_PARAMETER: int
RTSPREQ_TEARDOWN: int
SASL_IR: int
SEEKFUNCTION: int
SEEKFUNC_CANTSEEK: int
SEEKFUNC_FAIL: int
SEEKFUNC_OK: int
SERVICE_NAME: int
SHARE: int
SH_SHARE: int
SH_UNSHARE: int
SIZE_DOWNLOAD: int
SIZE_UPLOAD: int
SOCKET_BAD: int
SOCKET_TIMEOUT: int
SOCKOPTFUNCTION: int
SOCKOPT_ALREADY_CONNECTED: int
SOCKOPT_ERROR: int
SOCKOPT_OK: int
SOCKS5_GSSAPI_NEC: int
SOCKS5_GSSAPI_SERVICE: int
SOCKTYPE_ACCEPT: int
SOCKTYPE_IPCXN: int
SPEED_DOWNLOAD: int
SPEED_UPLOAD: int
SSH_AUTH_AGENT: int
SSH_AUTH_ANY: int
SSH_AUTH_DEFAULT: int
SSH_AUTH_HOST: int
SSH_AUTH_KEYBOARD: int
SSH_AUTH_NONE: int
SSH_AUTH_PASSWORD: int
SSH_AUTH_PUBLICKEY: int
SSH_AUTH_TYPES: int
SSH_HOST_PUBLIC_KEY_MD5: int
SSH_KEYFUNCTION: int
SSH_KNOWNHOSTS: int
SSH_PRIVATE_KEYFILE: int
SSH_PUBLIC_KEYFILE: int
SSLCERT: int
SSLCERTPASSWD: int
SSLCERTTYPE: int
SSLENGINE: int
SSLENGINE_DEFAULT: int
SSLKEY: int
SSLKEYPASSWD: int
SSLKEYTYPE: int
SSLOPT_ALLOW_BEAST: int
SSLOPT_NO_REVOKE: int
SSLVERSION: int
SSLVERSION_DEFAULT: int
SSLVERSION_SSLv2: int
SSLVERSION_SSLv3: int
SSLVERSION_TLSv1: int
SSLVERSION_TLSv1_0: int
SSLVERSION_TLSv1_1: int
SSLVERSION_TLSv1_2: int
SSLVERSION_MAX_DEFAULT: int
SSL_CIPHER_LIST: int
SSL_ENABLE_ALPN: int
SSL_ENABLE_NPN: int
SSL_ENGINES: int
SSL_FALSESTART: int
SSL_OPTIONS: int
SSL_SESSIONID_CACHE: int
SSL_VERIFYHOST: int
SSL_VERIFYPEER: int
SSL_VERIFYRESULT: int
SSL_VERIFYSTATUS: int
STARTTRANSFER_TIME: int
STDERR: int
TCP_FASTOPEN: int
TCP_KEEPALIVE: int
TCP_KEEPIDLE: int
TCP_KEEPINTVL: int
TCP_NODELAY: int
TELNETOPTIONS: int
TFTP_BLKSIZE: int
TIMECONDITION: int
TIMECONDITION_IFMODSINCE: int
TIMECONDITION_IFUNMODSINCE: int
TIMECONDITION_LASTMOD: int
TIMECONDITION_NONE: int
TIMEOUT: int
TIMEOUT_MS: int
TIMEVALUE: int
TLSAUTH_PASSWORD: int
TLSAUTH_TYPE: int
TLSAUTH_USERNAME: int
TOTAL_TIME: int
TRANSFERTEXT: int
TRANSFER_ENCODING: int
UNIX_SOCKET_PATH: int
UNRESTRICTED_AUTH: int
UPLOAD: int
URL: int
USERAGENT: int
USERNAME: int
USERPWD: int
USESSL_ALL: int
USESSL_CONTROL: int
USESSL_NONE: int
USESSL_TRY: int
USE_SSL: int
VERBOSE: int
VERSION_ASYNCHDNS: int
VERSION_CONV: int
VERSION_CURLDEBUG: int
VERSION_DEBUG: int
VERSION_FIRST: int
VERSION_GSSAPI: int
VERSION_GSSNEGOTIATE: int
VERSION_HTTP2: int
VERSION_IDN: int
VERSION_IPV6: int
VERSION_KERBEROS4: int
VERSION_KERBEROS5: int
VERSION_LARGEFILE: int
VERSION_LIBZ: int
VERSION_NTLM: int
VERSION_NTLM_WB: int
VERSION_PSL: int
VERSION_SPNEGO: int
VERSION_SSL: int
VERSION_SSPI: int
VERSION_TLSAUTH_SRP: int
VERSION_UNIX_SOCKETS: int
WILDCARDMATCH: int
WRITEDATA: int
WRITEFUNCTION: int
WRITEFUNC_PAUSE: int
WRITEHEADER: int
XFERINFOFUNCTION: int
XOAUTH2_BEARER: int
E_ABORTED_BY_CALLBACK: int
E_AGAIN: int
E_ALREADY_COMPLETE: int
E_BAD_CALLING_ORDER: int
E_BAD_CONTENT_ENCODING: int
E_BAD_DOWNLOAD_RESUME: int
E_BAD_FUNCTION_ARGUMENT: int
E_BAD_PASSWORD_ENTERED: int
E_CALL_MULTI_PERFORM: int
E_CHUNK_FAILED: int
E_CONV_FAILED: int
E_CONV_REQD: int
E_COULDNT_CONNECT: int
E_COULDNT_RESOLVE_HOST: int
E_COULDNT_RESOLVE_PROXY: int
E_FAILED_INIT: int
E_FILESIZE_EXCEEDED: int
E_FILE_COULDNT_READ_FILE: int
E_FTP_ACCEPT_FAILED: int
E_FTP_ACCEPT_TIMEOUT: int
E_FTP_ACCESS_DENIED: int
E_FTP_BAD_DOWNLOAD_RESUME: int
E_FTP_BAD_FILE_LIST: int
E_FTP_CANT_GET_HOST: int
E_FTP_CANT_RECONNECT: int
E_FTP_COULDNT_GET_SIZE: int
E_FTP_COULDNT_RETR_FILE: int
E_FTP_COULDNT_SET_ASCII: int
E_FTP_COULDNT_SET_BINARY: int
E_FTP_COULDNT_SET_TYPE: int
E_FTP_COULDNT_STOR_FILE: int
E_FTP_COULDNT_USE_REST: int
E_FTP_PARTIAL_FILE: int
E_FTP_PORT_FAILED: int
E_FTP_PRET_FAILED: int
E_FTP_QUOTE_ERROR: int
E_FTP_SSL_FAILED: int
E_FTP_USER_PASSWORD_INCORRECT: int
E_FTP_WEIRD_227_FORMAT: int
E_FTP_WEIRD_PASS_REPLY: int
E_FTP_WEIRD_PASV_REPLY: int
E_FTP_WEIRD_SERVER_REPLY: int
E_FTP_WEIRD_USER_REPLY: int
E_FTP_WRITE_ERROR: int
E_FUNCTION_NOT_FOUND: int
E_GOT_NOTHING: int
E_HTTP2: int
E_HTTP_NOT_FOUND: int
E_HTTP_PORT_FAILED: int
E_HTTP_POST_ERROR: int
E_HTTP_RANGE_ERROR: int
E_HTTP_RETURNED_ERROR: int
E_INTERFACE_FAILED: int
E_LDAP_CANNOT_BIND: int
E_LDAP_INVALID_URL: int
E_LDAP_SEARCH_FAILED: int
E_LIBRARY_NOT_FOUND: int
E_LOGIN_DENIED: int
E_MALFORMAT_USER: int
E_MULTI_ADDED_ALREADY: int
E_MULTI_BAD_EASY_HANDLE: int
E_MULTI_BAD_HANDLE: int
E_MULTI_BAD_SOCKET: int
E_MULTI_CALL_MULTI_PERFORM: int
E_MULTI_CALL_MULTI_SOCKET: int
E_MULTI_INTERNAL_ERROR: int
E_MULTI_OK: int
E_MULTI_OUT_OF_MEMORY: int
E_MULTI_UNKNOWN_OPTION: int
E_NOT_BUILT_IN: int
E_NO_CONNECTION_AVAILABLE: int
E_OK: int
E_OPERATION_TIMEDOUT: int
E_OPERATION_TIMEOUTED: int
E_OUT_OF_MEMORY: int
E_PARTIAL_FILE: int
E_PEER_FAILED_VERIFICATION: int
E_QUOTE_ERROR: int
E_RANGE_ERROR: int
E_READ_ERROR: int
E_RECV_ERROR: int
E_REMOTE_ACCESS_DENIED: int
E_REMOTE_DISK_FULL: int
E_REMOTE_FILE_EXISTS: int
E_REMOTE_FILE_NOT_FOUND: int
E_RTSP_CSEQ_ERROR: int
E_RTSP_SESSION_ERROR: int
E_SEND_ERROR: int
E_SEND_FAIL_REWIND: int
E_SHARE_IN_USE: int
E_SSH: int
E_SSL_CACERT: int
E_SSL_CACERT_BADFILE: int
E_SSL_CERTPROBLEM: int
E_SSL_CIPHER: int
E_SSL_CONNECT_ERROR: int
E_SSL_CRL_BADFILE: int
E_SSL_ENGINE_INITFAILED: int
E_SSL_ENGINE_NOTFOUND: int
E_SSL_ENGINE_SETFAILED: int
E_SSL_INVALIDCERTSTATUS: int
E_SSL_ISSUER_ERROR: int
E_SSL_PEER_CERTIFICATE: int
E_SSL_PINNEDPUBKEYNOTMATCH: int
E_SSL_SHUTDOWN_FAILED: int
E_TELNET_OPTION_SYNTAX: int
E_TFTP_DISKFULL: int
E_TFTP_EXISTS: int
E_TFTP_ILLEGAL: int
E_TFTP_NOSUCHUSER: int
E_TFTP_NOTFOUND: int
E_TFTP_PERM: int
E_TFTP_UNKNOWNID: int
E_TOO_MANY_REDIRECTS: int
E_UNKNOWN_OPTION: int
E_UNKNOWN_TELNET_OPTION: int
E_UNSUPPORTED_PROTOCOL: int
E_UPLOAD_FAILED: int
E_URL_MALFORMAT: int
E_URL_MALFORMAT_USER: int
E_USE_SSL_FAILED: int
E_WRITE_ERROR: int
| 13,755 | Python | .py | 633 | 20.560821 | 104 | 0.797651 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,386 | backports_abc.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/backports_abc.pyi | from typing import Any
def mk_gen(): ...
def mk_awaitable(): ...
def mk_coroutine(): ...
Generator: Any
Awaitable: Any
Coroutine: Any
def isawaitable(obj): ...
PATCHED: Any
def patch(patch_inspect: bool = ...): ...
| 220 | Python | .py | 10 | 20.5 | 41 | 0.692683 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,387 | pyre_extensions.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/pyre_extensions.pyi | from typing import Any, List, Optional, Type, TypeVar
_T = TypeVar("_T")
def none_throws(optional: Optional[_T], message: str = ...) -> _T: ...
def safe_cast(new_type: Type[_T], value: Any) -> _T: ...
def ParameterSpecification(__name: str) -> List[Type[Any]]: ...
| 267 | Python | .py | 5 | 52 | 70 | 0.642308 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,388 | singledispatch.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/singledispatch.pyi | from typing import Any, Callable, Generic, Mapping, TypeVar, overload
_T = TypeVar("_T")
class _SingleDispatchCallable(Generic[_T]):
registry: Mapping[Any, Callable[..., _T]]
def dispatch(self, cls: Any) -> Callable[..., _T]: ...
@overload
def register(self, cls: Any) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ...
@overload
def register(self, cls: Any, func: Callable[..., _T]) -> Callable[..., _T]: ...
def _clear_cache(self) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> _T: ...
def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ...
| 624 | Python | .py | 12 | 48.083333 | 89 | 0.599343 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,389 | decorator.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/decorator.pyi | import sys
from typing import Any, Callable, Dict, Iterator, List, NamedTuple, Optional, Pattern, Text, Tuple, TypeVar
_C = TypeVar("_C", bound=Callable[..., Any])
_Func = TypeVar("_Func", bound=Callable[..., Any])
_T = TypeVar("_T")
def get_init(cls): ...
if sys.version_info >= (3,):
from inspect import getfullargspec as getfullargspec, iscoroutinefunction as iscoroutinefunction
else:
class FullArgSpec(NamedTuple):
args: List[str]
varargs: Optional[str]
varkw: Optional[str]
defaults: Tuple[Any, ...]
kwonlyargs: List[str]
kwonlydefaults: Dict[str, Any]
annotations: Dict[str, Any]
def iscoroutinefunction(f: Callable[..., Any]) -> bool: ...
def getfullargspec(func: Any) -> FullArgSpec: ...
if sys.version_info >= (3, 2):
from contextlib import _GeneratorContextManager
else:
from contextlib import GeneratorContextManager as _GeneratorContextManager
DEF: Pattern[str]
class FunctionMaker(object):
args: List[Text]
varargs: Optional[Text]
varkw: Optional[Text]
defaults: Tuple[Any, ...]
kwonlyargs: List[Text]
kwonlydefaults: Optional[Text]
shortsignature: Optional[Text]
name: Text
doc: Optional[Text]
module: Optional[Text]
annotations: Dict[Text, Any]
signature: Text
dict: Dict[Text, Any]
def __init__(
self,
func: Optional[Callable[..., Any]] = ...,
name: Optional[Text] = ...,
signature: Optional[Text] = ...,
defaults: Optional[Tuple[Any, ...]] = ...,
doc: Optional[Text] = ...,
module: Optional[Text] = ...,
funcdict: Optional[Dict[Text, Any]] = ...,
) -> None: ...
def update(self, func: Any, **kw: Any) -> None: ...
def make(
self, src_templ: Text, evaldict: Optional[Dict[Text, Any]] = ..., addsource: bool = ..., **attrs: Any
) -> Callable[..., Any]: ...
@classmethod
def create(
cls,
obj: Any,
body: Text,
evaldict: Dict[Text, Any],
defaults: Optional[Tuple[Any, ...]] = ...,
doc: Optional[Text] = ...,
module: Optional[Text] = ...,
addsource: bool = ...,
**attrs: Any,
) -> Callable[..., Any]: ...
def decorate(func: _Func, caller: Callable[..., Any], extras: Any = ...) -> _Func: ...
def decorator(
caller: Callable[..., Any], _func: Optional[Callable[..., Any]] = ...
) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ...
class ContextManager(_GeneratorContextManager[_T]):
def __call__(self, func: _C) -> _C: ...
def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ...
def dispatch_on(*dispatch_args: Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ...
| 2,760 | Python | .py | 72 | 32.819444 | 109 | 0.605077 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,390 | croniter.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/croniter.pyi | import datetime
from typing import Any, Dict, Iterator, List, Optional, Text, Tuple, Type, TypeVar, Union
_RetType = Union[Type[float], Type[datetime.datetime]]
_SelfT = TypeVar("_SelfT", bound=croniter)
class CroniterError(ValueError): ...
class CroniterBadCronError(CroniterError): ...
class CroniterBadDateError(CroniterError): ...
class CroniterNotAlphaError(CroniterError): ...
class croniter(Iterator[Any]):
MONTHS_IN_YEAR: int
RANGES: Tuple[Tuple[int, int], ...]
DAYS: Tuple[int, ...]
ALPHACONV: Tuple[Dict[str, Any], ...]
LOWMAP: Tuple[Dict[int, Any], ...]
bad_length: str
tzinfo: Optional[datetime.tzinfo]
cur: float
expanded: List[List[str]]
start_time: float
dst_start_time: float
nth_weekday_of_month: Dict[str, Any]
def __init__(
self, expr_format: Text, start_time: Optional[Union[float, datetime.datetime]] = ..., ret_type: Optional[_RetType] = ...
) -> None: ...
# Most return value depend on ret_type, which can be passed in both as a method argument and as
# a constructor argument.
def get_next(self, ret_type: Optional[_RetType] = ...) -> Any: ...
def get_prev(self, ret_type: Optional[_RetType] = ...) -> Any: ...
def get_current(self, ret_type: Optional[_RetType] = ...) -> Any: ...
def __iter__(self: _SelfT) -> _SelfT: ...
def __next__(self, ret_type: Optional[_RetType] = ...) -> Any: ...
def next(self, ret_type: Optional[_RetType] = ...) -> Any: ...
def all_next(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ...
def all_prev(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ...
def iter(self, ret_type: Optional[_RetType] = ...) -> Iterator[Any]: ...
def is_leap(self, year: int) -> bool: ...
@classmethod
def expand(cls, expr_format: Text) -> Tuple[List[List[str]], Dict[str, Any]]: ...
@classmethod
def is_valid(cls, expression: Text) -> bool: ...
| 1,934 | Python | .py | 40 | 44.075 | 128 | 0.638287 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,391 | util.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/util.pyi | from collections import namedtuple
from typing import Any, Optional, Pattern
PY37: Any
__deprecated__: Any
BLOCK_LEVEL_ELEMENTS: Any
STX: str
ETX: str
INLINE_PLACEHOLDER_PREFIX: Any
INLINE_PLACEHOLDER: Any
INLINE_PLACEHOLDER_RE: Pattern
AMP_SUBSTITUTE: Any
HTML_PLACEHOLDER: Any
HTML_PLACEHOLDER_RE: Pattern
TAG_PLACEHOLDER: Any
INSTALLED_EXTENSIONS: Any
RTL_BIDI_RANGES: Any
def deprecated(message, stacklevel: int = ...): ...
def isBlockLevel(tag): ...
def parseBoolValue(value, fail_on_errors: bool = ..., preserve_none: bool = ...): ...
def code_escape(text): ...
class AtomicString(str): ...
class Processor:
md: Any
def __init__(self, md: Optional[Any] = ...) -> None: ...
@property
def markdown(self): ...
class HtmlStash:
html_counter: int = ...
rawHtmlBlocks: Any
tag_counter: int = ...
tag_data: Any
def __init__(self) -> None: ...
def store(self, html): ...
def reset(self) -> None: ...
def get_placeholder(self, key): ...
def store_tag(self, tag, attrs, left_index, right_index): ...
class Registry:
def __init__(self) -> None: ...
def __contains__(self, item): ...
def __iter__(self) -> Any: ...
def __getitem__(self, key): ...
def __len__(self): ...
def get_index_for_name(self, name): ...
def register(self, item, name, priority) -> None: ...
def deregister(self, name, strict: bool = ...) -> None: ...
def __setitem__(self, key, value) -> None: ...
def __delitem__(self, key) -> None: ...
def add(self, key, value, location) -> None: ...
def __getattr__(name): ...
| 1,584 | Python | .py | 49 | 29.22449 | 85 | 0.634817 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,392 | inlinepatterns.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/inlinepatterns.pyi | from typing import Any, Match, Optional, Tuple, Union
from xml.etree.ElementTree import Element
def build_inlinepatterns(md, **kwargs): ...
NOIMG: str
BACKTICK_RE: str
ESCAPE_RE: str
EMPHASIS_RE: str
STRONG_RE: str
SMART_STRONG_RE: str
SMART_EMPHASIS_RE: str
SMART_STRONG_EM_RE: str
EM_STRONG_RE: str
EM_STRONG2_RE: str
STRONG_EM_RE: str
STRONG_EM2_RE: str
STRONG_EM3_RE: str
LINK_RE: str
IMAGE_LINK_RE: str
REFERENCE_RE: str
IMAGE_REFERENCE_RE: str
NOT_STRONG_RE: str
AUTOLINK_RE: str
AUTOMAIL_RE: str
HTML_RE: str
ENTITY_RE: str
LINE_BREAK_RE: str
def dequote(string): ...
class EmStrongItem: ...
class Pattern:
ANCESTOR_EXCLUDES: Any
pattern: Any
compiled_re: Any
md: Any
def __init__(self, pattern, md: Optional[Any] = ...) -> None: ...
@property
def markdown(self): ...
def getCompiledRegExp(self): ...
def handleMatch(self, m: Match) -> Optional[Union[str, Element]]: ...
def type(self): ...
def unescape(self, text): ...
class InlineProcessor(Pattern):
safe_mode: bool = ...
def __init__(self, pattern, md: Optional[Any] = ...) -> None: ...
def handleMatch(self, m: Match, data) -> Union[Tuple[Element, int, int], Tuple[None, None, None]]: ... # type: ignore
class SimpleTextPattern(Pattern): ...
class SimpleTextInlineProcessor(InlineProcessor): ...
class EscapeInlineProcessor(InlineProcessor): ...
class SimpleTagPattern(Pattern):
tag: Any
def __init__(self, pattern, tag) -> None: ...
class SimpleTagInlineProcessor(InlineProcessor):
tag: Any
def __init__(self, pattern, tag) -> None: ...
class SubstituteTagPattern(SimpleTagPattern): ...
class SubstituteTagInlineProcessor(SimpleTagInlineProcessor): ...
class BacktickInlineProcessor(InlineProcessor):
ESCAPED_BSLASH: Any
tag: str = ...
def __init__(self, pattern) -> None: ...
class DoubleTagPattern(SimpleTagPattern): ...
class DoubleTagInlineProcessor(SimpleTagInlineProcessor): ...
class HtmlInlineProcessor(InlineProcessor): ...
class AsteriskProcessor(InlineProcessor):
PATTERNS: Any
def build_single(self, m, tag, idx): ...
def build_double(self, m, tags, idx): ...
def build_double2(self, m, tags, idx): ...
def parse_sub_patterns(self, data, parent, last, idx) -> None: ...
def build_element(self, m, builder, tags, index): ...
class UnderscoreProcessor(AsteriskProcessor):
PATTERNS: Any
class LinkInlineProcessor(InlineProcessor):
RE_LINK: Any
RE_TITLE_CLEAN: Any
def getLink(self, data, index): ...
def getText(self, data, index): ...
class ImageInlineProcessor(LinkInlineProcessor): ...
class ReferenceInlineProcessor(LinkInlineProcessor):
NEWLINE_CLEANUP_RE: Pattern
def evalId(self, data, index, text): ...
def makeTag(self, href, title, text): ...
class ShortReferenceInlineProcessor(ReferenceInlineProcessor): ...
class ImageReferenceInlineProcessor(ReferenceInlineProcessor): ...
class AutolinkInlineProcessor(InlineProcessor): ...
class AutomailInlineProcessor(InlineProcessor): ...
| 3,022 | Python | .py | 85 | 32.694118 | 122 | 0.725248 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,393 | preprocessors.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/preprocessors.pyi | from typing import Any, Iterable, List, Pattern
from . import util
def build_preprocessors(md, **kwargs): ...
class Preprocessor(util.Processor):
def run(self, lines: List[str]) -> List[str]: ...
class NormalizeWhitespace(Preprocessor): ...
class HtmlBlockPreprocessor(Preprocessor):
right_tag_patterns: Any
attrs_pattern: str = ...
left_tag_pattern: Any
attrs_re: Any
left_tag_re: Any
markdown_in_raw: bool = ...
class ReferencePreprocessor(Preprocessor):
TITLE: str = ...
RE: Pattern
TITLE_RE: Pattern
| 550 | Python | .py | 17 | 28.647059 | 53 | 0.70778 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,394 | __init__.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/__init__.pyi | from .core import Markdown as Markdown, markdown as markdown, markdownFromFile as markdownFromFile
from .extensions import Extension as Extension
| 146 | Python | .py | 2 | 72 | 98 | 0.861111 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,395 | core.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/core.pyi | from typing import Any, BinaryIO, Callable, ClassVar, Dict, List, Mapping, Optional, Sequence, Text, TextIO, Union
from typing_extensions import Literal
from xml.etree.ElementTree import Element
from .blockparser import BlockParser
from .extensions import Extension
from .util import HtmlStash, Registry
class Markdown:
preprocessors: Registry
inlinePatterns: Registry
treeprocessors: Registry
postprocessors: Registry
parser: BlockParser
htmlStash: HtmlStash
output_formats: ClassVar[Dict[Literal["xhtml", "html"], Callable[[Element], Text]]]
output_format: Literal["xhtml", "html"]
serializer: Callable[[Element], Text]
tab_length: int
block_level_elements: List[str]
def __init__(
self,
*,
extensions: Optional[Sequence[Union[str, Extension]]] = ...,
extension_configs: Optional[Mapping[str, Mapping[str, Any]]] = ...,
output_format: Optional[Literal["xhtml", "html"]] = ...,
tab_length: Optional[int] = ...,
) -> None: ...
def build_parser(self) -> Markdown: ...
def registerExtensions(
self, extensions: Sequence[Union[Extension, str]], configs: Mapping[str, Mapping[str, Any]]
) -> Markdown: ...
def build_extension(self, ext_name: Text, configs: Mapping[str, str]) -> Extension: ...
def registerExtension(self, extension: Extension) -> Markdown: ...
def reset(self: Markdown) -> Markdown: ...
def set_output_format(self, format: Literal["xhtml", "html"]) -> Markdown: ...
def is_block_level(self, tag: str) -> bool: ...
def convert(self, source: Text) -> Text: ...
def convertFile(
self,
input: Optional[Union[str, TextIO, BinaryIO]] = ...,
output: Optional[Union[str, TextIO, BinaryIO]] = ...,
encoding: Optional[str] = ...,
) -> Markdown: ...
def markdown(
text: Text,
*,
extensions: Optional[Sequence[Union[str, Extension]]] = ...,
extension_configs: Optional[Mapping[str, Mapping[str, Any]]] = ...,
output_format: Optional[Literal["xhtml", "html"]] = ...,
tab_length: Optional[int] = ...,
) -> Text: ...
def markdownFromFile(
*,
input: Optional[Union[str, TextIO, BinaryIO]] = ...,
output: Optional[Union[str, TextIO, BinaryIO]] = ...,
encoding: Optional[str] = ...,
extensions: Optional[Sequence[Union[str, Extension]]] = ...,
extension_configs: Optional[Mapping[str, Mapping[str, Any]]] = ...,
output_format: Optional[Literal["xhtml", "html"]] = ...,
tab_length: Optional[int] = ...,
) -> None: ...
| 2,555 | Python | .py | 60 | 37.533333 | 114 | 0.648475 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,396 | postprocessors.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/postprocessors.pyi | from typing import Any, Pattern
from . import util
def build_postprocessors(md, **kwargs): ...
class Postprocessor(util.Processor):
def run(self, text) -> None: ...
class RawHtmlPostprocessor(Postprocessor):
def isblocklevel(self, html): ...
class AndSubstitutePostprocessor(Postprocessor): ...
class UnescapePostprocessor(Postprocessor):
RE: Pattern
def unescape(self, m): ...
| 400 | Python | .py | 11 | 33.363636 | 52 | 0.751958 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,397 | serializers.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/serializers.pyi | from typing import Any
def to_html_string(element): ...
def to_xhtml_string(element): ...
| 91 | Python | .py | 3 | 29 | 33 | 0.735632 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,398 | treeprocessors.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/treeprocessors.pyi | from typing import Any, Optional
from . import util
def build_treeprocessors(md, **kwargs): ...
def isString(s): ...
class Treeprocessor(util.Processor):
def run(self, root) -> None: ...
class InlineProcessor(Treeprocessor):
inlinePatterns: Any
ancestors: Any
def __init__(self, md) -> None: ...
stashed_nodes: Any
parent_map: Any
def run(self, tree, ancestors: Optional[Any] = ...): ...
class PrettifyTreeprocessor(Treeprocessor): ...
| 469 | Python | .py | 14 | 30.142857 | 60 | 0.691111 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |
28,399 | blockparser.pyi | DamnWidget_anaconda/anaconda_lib/jedi/third_party/typeshed/third_party/2and3/markdown/blockparser.pyi | from typing import Any
class State(list):
def set(self, state) -> None: ...
def reset(self) -> None: ...
def isstate(self, state): ...
class BlockParser:
blockprocessors: Any
state: Any
md: Any
def __init__(self, md) -> None: ...
@property
def markdown(self): ...
root: Any
def parseDocument(self, lines): ...
def parseChunk(self, parent, text) -> None: ...
def parseBlocks(self, parent, blocks) -> None: ...
| 463 | Python | .py | 16 | 24.5625 | 54 | 0.608989 | DamnWidget/anaconda | 2,213 | 260 | 184 | GPL-3.0 | 9/5/2024, 5:14:06 PM (Europe/Amsterdam) |