desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Clears the internal cache.'
def clear_cache(self):
self._cache.clear() self._cache_egg.clear()
'Yield .dist-info and/or .egg(-info) distributions.'
def _yield_distributions(self):
seen = set() for path in self.path: finder = resources.finder_for_path(path) if (finder is None): continue r = finder.find(u'') if ((not r) or (not r.is_container)): continue rset = sorted(r.resources) for entry in rset: r = fin...
'Scan the path for distributions and populate the cache with those that are found.'
def _generate_cache(self):
gen_dist = (not self._cache.generated) gen_egg = (self._include_egg and (not self._cache_egg.generated)) if (gen_dist or gen_egg): for dist in self._yield_distributions(): if isinstance(dist, InstalledDistribution): self._cache.add(dist) else: ...
'The *name* and *version* parameters are converted into their filename-escaped form, i.e. any ``\'-\'`` characters are replaced with ``\'_\'`` other than the one in ``\'dist-info\'`` and the one separating the name from the version number. :parameter name: is converted to a standard distribution name by replacing any r...
@classmethod def distinfo_dirname(cls, name, version):
name = name.replace(u'-', u'_') return (u'-'.join([name, version]) + DISTINFO_EXT)
'Provides an iterator that looks for distributions and returns :class:`InstalledDistribution` or :class:`EggInfoDistribution` instances for each one of them. :rtype: iterator of :class:`InstalledDistribution` and :class:`EggInfoDistribution` instances'
def get_distributions(self):
if (not self._cache_enabled): for dist in self._yield_distributions(): (yield dist) else: self._generate_cache() for dist in self._cache.path.values(): (yield dist) if self._include_egg: for dist in self._cache_egg.path.values(): ...
'Looks for a named distribution on the path. This function only returns the first result found, as no more than one value is expected. If nothing is found, ``None`` is returned. :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` or ``None``'
def get_distribution(self, name):
result = None name = name.lower() if (not self._cache_enabled): for dist in self._yield_distributions(): if (dist.key == name): result = dist break else: self._generate_cache() if (name in self._cache.name): result = self._c...
'Iterates over all distributions to find which distributions provide *name*. If a *version* is provided, it will be used to filter the results. This function only returns the first result found, since no more than one values are expected. If the directory is not found, returns ``None``. :parameter version: a version sp...
def provides_distribution(self, name, version=None):
matcher = None if (not (version is None)): try: matcher = self._scheme.matcher((u'%s (%s)' % (name, version))) except ValueError: raise DistlibException((u'invalid name or version: %r, %r' % (name, version))) for dist in self.get_distributions(): ...
'Return the path to a resource file.'
def get_file_path(self, name, relative_path):
dist = self.get_distribution(name) if (dist is None): raise LookupError((u'no distribution named %r found' % name)) return dist.get_resource_path(relative_path)
'Return all of the exported entries in a particular category. :param category: The category to search for entries. :param name: If specified, only entries with that name are returned.'
def get_exported_entries(self, category, name=None):
for dist in self.get_distributions(): r = dist.exports if (category in r): d = r[category] if (name is not None): if (name in d): (yield d[name]) else: for v in d.values(): (yield v)
'Initialise an instance. :param metadata: The instance of :class:`Metadata` describing this distribution.'
def __init__(self, metadata):
self.metadata = metadata self.name = metadata.name self.key = self.name.lower() self.version = metadata.version self.locator = None self.digest = None self.extras = None self.context = None self.download_urls = set() self.digests = {}
'The source archive download URL for this distribution.'
@property def source_url(self):
return self.metadata.source_url
'A utility property which displays the name and version in parentheses.'
@property def name_and_version(self):
return (u'%s (%s)' % (self.name, self.version))
'A set of distribution names and versions provided by this distribution. :return: A set of "name (version)" strings.'
@property def provides(self):
plist = self.metadata.provides s = (u'%s (%s)' % (self.name, self.version)) if (s not in plist): plist.append(s) return plist
'Say if this instance matches (fulfills) a requirement. :param req: The requirement to match. :rtype req: str :return: True if it matches, else False.'
def matches_requirement(self, req):
r = parse_requirement(req) scheme = get_scheme(self.metadata.scheme) try: matcher = scheme.matcher(r.requirement) except UnsupportedVersionError: logger.warning(u'could not read version %r - using name only', req) name = req.split()[0] matcher = sc...
'Return a textual representation of this instance,'
def __repr__(self):
if self.source_url: suffix = (u' [%s]' % self.source_url) else: suffix = u'' return (u'<Distribution %s (%s)%s>' % (self.name, self.version, suffix))
'See if this distribution is the same as another. :param other: The distribution to compare with. To be equal to one another. distributions must have the same type, name, version and source_url. :return: True if it is the same, else False.'
def __eq__(self, other):
if (type(other) is not type(self)): result = False else: result = ((self.name == other.name) and (self.version == other.version) and (self.source_url == other.source_url)) return result
'Compute hash in a way which matches the equality test.'
def __hash__(self):
return ((hash(self.name) + hash(self.version)) + hash(self.source_url))
'Initialise an instance. :param metadata: An instance of :class:`Metadata` which describes the distribution. This will normally have been initialised from a metadata file in the ``path``. :param path: The path of the ``.dist-info`` or ``.egg-info`` directory for the distribution. :param env: This is normally t...
def __init__(self, metadata, path, env=None):
super(BaseInstalledDistribution, self).__init__(metadata) self.path = path self.dist_path = env
'Get the hash of some data, using a particular hash algorithm, if specified. :param data: The data to be hashed. :type data: bytes :param hasher: The name of a hash implementation, supported by hashlib, or ``None``. Examples of valid values are ``\'sha1\'``, ``\'sha224\'``, ``\'sha384\'``, \'``sha256\'``, ``\'md5\'`` a...
def get_hash(self, data, hasher=None):
if (hasher is None): hasher = self.hasher if (hasher is None): hasher = hashlib.md5 prefix = u'' else: hasher = getattr(hashlib, hasher) prefix = (u'%s=' % self.hasher) digest = hasher(data).digest() digest = base64.urlsafe_b64encode(digest).rstrip('=').decode...
'Get the list of installed files for the distribution :return: A list of tuples of path, hash and size. Note that hash and size might be ``None`` for some entries. The path is exactly as stored in the file (which is as in PEP 376).'
def _get_records(self):
results = [] r = self.get_distinfo_resource(u'RECORD') with contextlib.closing(r.as_stream()) as stream: with CSVReader(stream=stream) as record_reader: for row in record_reader: missing = [None for i in range(len(row), 3)] (path, checksum, size) = (row + ...
'Return the information exported by this distribution. :return: A dictionary of exports, mapping an export category to a dict of :class:`ExportEntry` instances describing the individual export entries, and keyed by name.'
@cached_property def exports(self):
result = {} r = self.get_distinfo_resource(EXPORTS_FILENAME) if r: result = self.read_exports() return result
'Read exports data from a file in .ini format. :return: A dictionary of exports, mapping an export category to a list of :class:`ExportEntry` instances describing the individual export entries.'
def read_exports(self):
result = {} r = self.get_distinfo_resource(EXPORTS_FILENAME) if r: with contextlib.closing(r.as_stream()) as stream: result = read_exports(stream) return result
'Write a dictionary of exports to a file in .ini format. :param exports: A dictionary of exports, mapping an export category to a list of :class:`ExportEntry` instances describing the individual export entries.'
def write_exports(self, exports):
rf = self.get_distinfo_file(EXPORTS_FILENAME) with open(rf, u'w') as f: write_exports(exports, f)
'NOTE: This API may change in the future. Return the absolute path to a resource file with the given relative path. :param relative_path: The path, relative to .dist-info, of the resource of interest. :return: The absolute path where the resource is to be found.'
def get_resource_path(self, relative_path):
r = self.get_distinfo_resource(u'RESOURCES') with contextlib.closing(r.as_stream()) as stream: with CSVReader(stream=stream) as resources_reader: for (relative, destination) in resources_reader: if (relative == relative_path): return destination raise ...
'Iterates over the ``RECORD`` entries and returns a tuple ``(path, hash, size)`` for each line. :returns: iterator of (path, hash, size)'
def list_installed_files(self):
for result in self._get_records(): (yield result)
'Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any existing ``RECORD`` file is silently overwritten. prefix is used to determine when to write absolute paths.'
def write_installed_files(self, paths, prefix, dry_run=False):
prefix = os.path.join(prefix, u'') base = os.path.dirname(self.path) base_under_prefix = base.startswith(prefix) base = os.path.join(base, u'') record_path = self.get_distinfo_file(u'RECORD') logger.info(u'creating %s', record_path) if dry_run: return None with CSVWriter(recor...
'Checks that the hashes and sizes of the files in ``RECORD`` are matched by the files themselves. Returns a (possibly empty) list of mismatches. Each entry in the mismatch list will be a tuple consisting of the path, \'exists\', \'size\' or \'hash\' according to what didn\'t match (existence is checked first, then size...
def check_installed_files(self):
mismatches = [] base = os.path.dirname(self.path) record_path = self.get_distinfo_file(u'RECORD') for (path, hash_value, size) in self.list_installed_files(): if (not os.path.isabs(path)): path = os.path.join(base, path) if (path == record_path): continue ...
'A dictionary of shared locations whose keys are in the set \'prefix\', \'purelib\', \'platlib\', \'scripts\', \'headers\', \'data\' and \'namespace\'. The corresponding value is the absolute path of that category for this distribution, and takes into account any paths selected by the user at installation time (e.g. vi...
@cached_property def shared_locations(self):
result = {} shared_path = os.path.join(self.path, u'SHARED') if os.path.isfile(shared_path): with codecs.open(shared_path, u'r', encoding=u'utf-8') as f: lines = f.read().splitlines() for line in lines: (key, value) = line.split(u'=', 1) if (key == u'names...
'Write shared location information to the SHARED file in .dist-info. :param paths: A dictionary as described in the documentation for :meth:`shared_locations`. :param dry_run: If True, the action is logged but no file is actually written. :return: The path of the file written to.'
def write_shared_locations(self, paths, dry_run=False):
shared_path = os.path.join(self.path, u'SHARED') logger.info(u'creating %s', shared_path) if dry_run: return None lines = [] for key in (u'prefix', u'lib', u'headers', u'scripts', u'data'): path = paths[key] if os.path.isdir(paths[key]): lines.append((u'%s=%s' ...
'Returns a path located under the ``.dist-info`` directory. Returns a string representing the path. :parameter path: a ``\'/\'``-separated path relative to the ``.dist-info`` directory or an absolute path; If *path* is an absolute path and doesn\'t start with the ``.dist-info`` directory path, a :class:`DistlibExceptio...
def get_distinfo_file(self, path):
if (path.find(os.sep) >= 0): (distinfo_dirname, path) = path.split(os.sep)[(-2):] if (distinfo_dirname != self.path.split(os.sep)[(-1)]): raise DistlibException((u'dist-info file %r does not belong to the %r %s distribution' % (path, self.name, self.version)...
'Iterates over the ``RECORD`` entries and returns paths for each line if the path is pointing to a file located in the ``.dist-info`` directory or one of its subdirectories. :returns: iterator of paths'
def list_distinfo_files(self):
base = os.path.dirname(self.path) for (path, checksum, size) in self._get_records(): if (not os.path.isabs(path)): path = os.path.join(base, path) if path.startswith(self.path): (yield path)
'Checks that the hashes and sizes of the files in ``RECORD`` are matched by the files themselves. Returns a (possibly empty) list of mismatches. Each entry in the mismatch list will be a tuple consisting of the path, \'exists\', \'size\' or \'hash\' according to what didn\'t match (existence is checked first, then size...
def check_installed_files(self):
mismatches = [] record_path = os.path.join(self.path, u'installed-files.txt') if os.path.exists(record_path): for (path, _, _) in self.list_installed_files(): if (path == record_path): continue if (not os.path.exists(path)): mismatches.append((...
'Iterates over the ``installed-files.txt`` entries and returns a tuple ``(path, hash, size)`` for each line. :returns: a list of (path, hash, size)'
def list_installed_files(self):
def _md5(path): f = open(path, u'rb') try: content = f.read() finally: f.close() return hashlib.md5(content).hexdigest() def _size(path): return os.stat(path).st_size record_path = os.path.join(self.path, u'installed-files.txt') result = []...
'Iterates over the ``installed-files.txt`` entries and returns paths for each line if the path is pointing to a file located in the ``.egg-info`` directory or one of its subdirectories. :parameter absolute: If *absolute* is ``True``, each returned path is transformed into a local absolute path. Otherwise the raw value ...
def list_distinfo_files(self, absolute=False):
record_path = os.path.join(self.path, u'installed-files.txt') skip = True with codecs.open(record_path, u'r', encoding=u'utf-8') as f: for line in f: line = line.strip() if (line == u'./'): skip = False continue if (not skip): ...
'Add the *distribution* to the graph. :type distribution: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution`'
def add_distribution(self, distribution):
self.adjacency_list[distribution] = [] self.reverse_list[distribution] = []
'Add an edge from distribution *x* to distribution *y* with the given *label*. :type x: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type y: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type label: ``st...
def add_edge(self, x, y, label=None):
self.adjacency_list[x].append((y, label)) if (x not in self.reverse_list[y]): self.reverse_list[y].append(x)
'Add a missing *requirement* for the given *distribution*. :type distribution: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type requirement: ``str``'
def add_missing(self, distribution, requirement):
logger.debug(u'%s missing %r', distribution, requirement) self.missing.setdefault(distribution, []).append(requirement)
'Prints only a subgraph'
def repr_node(self, dist, level=1):
output = [self._repr_dist(dist)] for (other, label) in self.adjacency_list[dist]: dist = self._repr_dist(other) if (label is not None): dist = (u'%s [%s]' % (dist, label)) output.append(((u' ' * level) + str(dist))) suboutput = self.repr_node(other, ...
'Writes a DOT output for the graph to the provided file *f*. If *skip_disconnected* is set to ``True``, then all distributions that are not dependent on any other distribution are skipped. :type f: has to support ``file``-like operations :type skip_disconnected: ``bool``'
def to_dot(self, f, skip_disconnected=True):
disconnected = [] f.write(u'digraph dependencies {\n') for (dist, adjs) in self.adjacency_list.items(): if ((len(adjs) == 0) and (not skip_disconnected)): disconnected.append(dist) for (other, label) in adjs: if (not (label is None)): f.write((u'...
'Perform a topological sort of the graph. :return: A tuple, the first element of which is a topologically sorted list of distributions, and the second element of which is a list of distributions that cannot be sorted because they have circular dependencies and so form a cycle.'
def topological_sort(self):
result = [] alist = {} for (k, v) in self.adjacency_list.items(): alist[k] = v[:] while True: to_remove = [] for (k, v) in list(alist.items())[:]: if (not v): to_remove.append(k) del alist[k] if (not to_remove): brea...
'Representation of the graph'
def __repr__(self):
output = [] for (dist, adjs) in self.adjacency_list.items(): output.append(self.repr_node(dist)) return u'\n'.join(output)
'Initialise an instance. :param context: If specified, names are looked up in this mapping.'
def __init__(self, context=None):
self.context = (context or {}) self.source = None
'Get the part of the source which is causing a problem.'
def get_fragment(self, offset):
fragment_len = 10 s = ('%r' % self.source[offset:(offset + fragment_len)]) if ((offset + fragment_len) < len(self.source)): s += '...' return s
'Get a handler for the specified AST node type.'
def get_handler(self, node_type):
return getattr(self, ('do_%s' % node_type), None)
'Evaluate a source string or node, using ``filename`` when displaying errors.'
def evaluate(self, node, filename=None):
if isinstance(node, string_types): self.source = node kwargs = {'mode': 'eval'} if filename: kwargs['filename'] = filename try: node = ast.parse(node, **kwargs) except SyntaxError as e: s = self.get_fragment(e.offset) raise Synt...
'Check if the provided version matches the constraints. :param version: The version to match against this instance. :type version: Strring or :class:`Version` instance.'
def match(self, version):
if isinstance(version, string_types): version = self.version_class(version) for (operator, constraint, prefix) in self._parts: f = self._operators.get(operator) if isinstance(f, string_types): f = getattr(self, f) if (not f): msg = ('%r not implement...
'Used for processing some metadata fields'
def is_valid_constraint_list(self, s):
return self.is_valid_matcher(('dummy_name (%s)' % s))
'Is the cache stale for the given resource? :param resource: The :class:`Resource` being cached. :param path: The path of the resource in the cache. :return: True if the cache is stale.'
def is_stale(self, resource, path):
return True
'Get a resource into the cache, :param resource: A :class:`Resource` instance. :return: The pathname of the resource in the cache.'
def get(self, resource):
(prefix, path) = resource.finder.get_cache_info(resource) if (prefix is None): result = path else: result = os.path.join(self.base, self.prefix_to_dir(prefix), path) dirname = os.path.dirname(result) if (not os.path.isdir(dirname)): os.makedirs(dirname) if...
'Get the resource as a stream. This is not a property to make it obvious that it returns a new stream each time.'
def as_stream(self):
return self.finder.get_stream(self)
'Initialise an instance using a (valid) filename.'
def __init__(self, filename=None, sign=False, verify=False):
self.sign = sign self.should_verify = verify self.buildver = u'' self.pyver = [PYVER] self.abi = [u'none'] self.arch = [u'any'] self.dirname = os.getcwd() if (filename is None): self.name = u'dummy' self.version = u'0.1' self._filename = self.filename else: ...
'Build and return a filename from the various components.'
@property def filename(self):
if self.buildver: buildver = (u'-' + self.buildver) else: buildver = u'' pyver = u'.'.join(self.pyver) abi = u'.'.join(self.abi) arch = u'.'.join(self.arch) version = self.version.replace(u'-', u'_') return (u'%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, abi,...
'Build a wheel from files in specified paths, and use any specified tags when determining the name of the wheel.'
def build(self, paths, tags=None, wheel_version=None):
if (tags is None): tags = {} libkey = list(filter((lambda o: (o in paths)), (u'purelib', u'platlib')))[0] if (libkey == u'platlib'): is_pure = u'false' default_pyver = [IMPVER] default_abi = [ABI] default_arch = [ARCH] else: is_pure = u'true' defau...
'Install a wheel to the specified paths. If kwarg ``warner`` is specified, it should be a callable, which will be called with two tuples indicating the wheel version of this software and the wheel version in the file, if there is a discrepancy in the versions. This can be used to issue any warnings to raise any excepti...
def install(self, paths, maker, **kwargs):
dry_run = maker.dry_run warner = kwargs.get(u'warner') lib_only = kwargs.get(u'lib_only', False) pathname = os.path.join(self.dirname, self.filename) name_ver = (u'%s-%s' % (self.name, self.version)) data_dir = (u'%s.data' % name_ver) info_dir = (u'%s.dist-info' % name_ver) metadata_name...
'Determine if a wheel is compatible with the running system.'
def is_compatible(self):
return is_compatible(self)
'Determine if a wheel is asserted as mountable by its metadata.'
def is_mountable(self):
return True
'Update the contents of a wheel in a generic way. The modifier should be a callable which expects a dictionary argument: its keys are archive-entry paths, and its values are absolute filesystem paths where the contents the corresponding archive entries can be found. The modifier is free to change the contents of the fi...
def update(self, modifier, dest_dir=None, **kwargs):
def get_version(path_map, info_dir): version = path = None key = (u'%s/%s' % (info_dir, METADATA_FILENAME)) if (key not in path_map): key = (u'%s/PKG-INFO' % info_dir) if (key in path_map): path = path_map[key] version = Metadata(path=path).version...
'Initialise an instance. :param base: The base directory to explore under.'
def __init__(self, base=None):
self.base = os.path.abspath(os.path.normpath((base or os.getcwd()))) self.prefix = (self.base + os.sep) self.allfiles = None self.files = set()
'Find all files under the base and set ``allfiles`` to the absolute pathnames of files found.'
def findall(self):
from stat import S_ISREG, S_ISDIR, S_ISLNK self.allfiles = allfiles = [] root = self.base stack = [root] pop = stack.pop push = stack.append while stack: root = pop() names = os.listdir(root) for name in names: fullname = os.path.join(root, name) ...
'Add a file to the manifest. :param item: The pathname to add. This can be relative to the base.'
def add(self, item):
if (not item.startswith(self.prefix)): item = os.path.join(self.base, item) self.files.add(os.path.normpath(item))
'Add a list of files to the manifest. :param items: The pathnames to add. These can be relative to the base.'
def add_many(self, items):
for item in items: self.add(item)
'Return sorted files in directory order'
def sorted(self, wantdirs=False):
def add_dir(dirs, d): dirs.add(d) logger.debug('add_dir added %s', d) if (d != self.base): (parent, _) = os.path.split(d) assert (parent not in ('', '/')) add_dir(dirs, parent) result = set(self.files) if wantdirs: dirs = set() ...
'Clear all collected files.'
def clear(self):
self.files = set() self.allfiles = []
'Process a directive which either adds some files from ``allfiles`` to ``files``, or removes some files from ``files``. :param directive: The directive to process. This should be in a format compatible with distutils ``MANIFEST.in`` files: http://docs.python.org/distutils/sourcedist.html#commands'
def process_directive(self, directive):
(action, patterns, thedir, dirpattern) = self._parse_directive(directive) if (action == 'include'): for pattern in patterns: if (not self._include_pattern(pattern, anchor=True)): logger.warning('no files found matching %r', pattern) elif (action == 'exclude'):...
'Validate a directive. :param directive: The directive to validate. :return: A tuple of action, patterns, thedir, dir_patterns'
def _parse_directive(self, directive):
words = directive.split() if ((len(words) == 1) and (words[0] not in ('include', 'exclude', 'global-include', 'global-exclude', 'recursive-include', 'recursive-exclude', 'graft', 'prune'))): words.insert(0, 'include') action = words[0] patterns = thedir = dir_pattern = None if (action in ('i...
'Select strings (presumably filenames) from \'self.files\' that match \'pattern\', a Unix-style wildcard (glob) pattern. Patterns are not quite the same as implemented by the \'fnmatch\' module: \'*\' and \'?\' match non-special characters, where "special" is platform-dependent: slash on Unix; colon, slash, and backsl...
def _include_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
found = False pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) if (self.allfiles is None): self.findall() for name in self.allfiles: if pattern_re.search(name): self.files.add(name) found = True return found
'Remove strings (presumably filenames) from \'files\' that match \'pattern\'. Other parameters are the same as for \'include_pattern()\', above. The list \'self.files\' is modified in place. Return True if files are found. This API is public to allow e.g. exclusion of SCM subdirs, e.g. when packaging source distributio...
def _exclude_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
found = False pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) for f in list(self.files): if pattern_re.search(f): self.files.remove(f) found = True return found
'Translate a shell-like wildcard pattern to a compiled regular expression. Return the compiled regex. If \'is_regex\' true, then \'pattern\' is directly compiled to a regex (if it\'s a string) or just returned as-is (assumes it\'s a regex object).'
def _translate_pattern(self, pattern, anchor=True, prefix=None, is_regex=False):
if is_regex: if isinstance(pattern, str): return re.compile(pattern) else: return pattern if pattern: pattern_re = self._glob_to_re(pattern) else: pattern_re = '' base = re.escape(os.path.join(self.base, '')) if (prefix is not None): em...
'Translate a shell-like glob pattern to a regular expression. Return a string containing the regex. Differs from \'fnmatch.translate()\' in that \'*\' does not match "special characters" (which are platform-specific).'
def _glob_to_re(self, pattern):
pattern_re = fnmatch.translate(pattern) sep = os.sep if (os.sep == '\\'): sep = '\\\\\\\\' escaped = ('\\1[^%s]' % sep) pattern_re = re.sub('((?<!\\\\)(\\\\\\\\)*)\\.', escaped, pattern_re) return pattern_re
'Return the distribution name with version. If filesafe is true, return a filename-escaped form.'
def get_fullname(self, filesafe=False):
return _get_name_and_version(self[u'Name'], self[u'Version'], filesafe)
'return True if name is a valid metadata key'
def is_field(self, name):
name = self._convert_name(name) return (name in _ALL_FIELDS)
'Read the metadata values from a file path.'
def read(self, filepath):
fp = codecs.open(filepath, u'r', encoding=u'utf-8') try: self.read_file(fp) finally: fp.close()
'Read the metadata values from a file object.'
def read_file(self, fileob):
msg = message_from_file(fileob) self._fields[u'Metadata-Version'] = msg[u'metadata-version'] for field in _ALL_FIELDS: if (field not in msg): continue if (field in _LISTFIELDS): values = msg.get_all(field) if ((field in _LISTTUPLEFIELDS) and (values is not...
'Write the metadata fields to filepath.'
def write(self, filepath, skip_unknown=False):
fp = codecs.open(filepath, u'w', encoding=u'utf-8') try: self.write_file(fp, skip_unknown) finally: fp.close()
'Write the PKG-INFO format data to a file object.'
def write_file(self, fileobject, skip_unknown=False):
self.set_metadata_version() for field in _version2fieldlist(self[u'Metadata-Version']): values = self.get(field) if (skip_unknown and (values in (u'UNKNOWN', [], [u'UNKNOWN']))): continue if (field in _ELEMENTSFIELD): self._write_field(fileobject, field, u','.join...
'Set metadata values from the given iterable `other` and kwargs. Behavior is like `dict.update`: If `other` has a ``keys`` method, they are looped over and ``self[key]`` is assigned ``other[key]``. Else, ``other`` is an iterable of ``(key, value)`` iterables. Keys that don\'t match a metadata field or that have an empt...
def update(self, other=None, **kwargs):
def _set(key, value): if ((key in _ATTR2FIELD) and value): self.set(self._convert_name(key), value) if (not other): pass elif hasattr(other, u'keys'): for k in other.keys(): _set(k, other[k]) else: for (k, v) in other: _set(k, v) if...
'Control then set a metadata field.'
def set(self, name, value):
name = self._convert_name(name) if (((name in _ELEMENTSFIELD) or (name == u'Platform')) and (not isinstance(value, (list, tuple)))): if isinstance(value, string_types): value = [v.strip() for v in value.split(u',')] else: value = [] elif ((name in _LISTFIELDS) and (no...
'Get a metadata field.'
def get(self, name, default=_MISSING):
name = self._convert_name(name) if (name not in self._fields): if (default is _MISSING): default = self._default_value(name) return default if (name in _UNICODEFIELDS): value = self._fields[name] return value elif (name in _LISTFIELDS): value = self._f...
'Check if the metadata is compliant. If strict is True then raise if no Name or Version are provided'
def check(self, strict=False):
self.set_metadata_version() (missing, warnings) = ([], []) for attr in (u'Name', u'Version'): if (attr not in self): missing.append(attr) if (strict and (missing != [])): msg = (u'missing required metadata: %s' % u', '.join(missing)) raise MetadataMissingE...
'Return fields as a dict. Field names will be converted to use the underscore-lowercase style instead of hyphen-mixed case (i.e. home_page instead of Home-page).'
def todict(self, skip_missing=False):
self.set_metadata_version() mapping_1_0 = ((u'metadata_version', u'Metadata-Version'), (u'name', u'Name'), (u'version', u'Version'), (u'summary', u'Summary'), (u'home_page', u'Home-page'), (u'author', u'Author'), (u'author_email', u'Author-email'), (u'license', u'License'), (u'description', u'Description'), (u'...
'Base method to get dependencies, given a set of extras to satisfy and an optional environment context. :param reqts: A list of sometimes-wanted dependencies, perhaps dependent on extras and environment. :param extras: A list of optional components being requested. :param env: An optional environment for marker evaluat...
def get_requirements(self, reqts, extras=None, env=None):
if self._legacy: result = reqts else: result = [] extras = get_extras((extras or []), self.extras) for d in reqts: if ((u'extra' not in d) and (u'environment' not in d)): include = True else: if (u'extra' not in d): ...
'Make a script. :param specification: The specification, which is either a valid export entry specification (to make a script from a callable) or a filename (to make a script by copying from a source location). :param options: A dictionary of options controlling script generation. :return: A list of all absolute pathna...
def make(self, specification, options=None):
filenames = [] entry = get_export_entry(specification) if (entry is None): self._copy_script(specification, filenames) else: self._make_script(entry, filenames, options=options) return filenames
'Take a list of specifications and make scripts from them, :param specifications: A list of specifications. :return: A list of all absolute pathnames written to,'
def make_multiple(self, specifications, options=None):
filenames = [] for specification in specifications: filenames.extend(self.make(specification, options)) return filenames
'Initialise an instance. :param scheme: Because locators look for most recent versions, they need to know the version scheme to use. This specifies the current PEP-recommended scheme - use ``\'legacy\'`` if you need to support existing distributions on PyPI.'
def __init__(self, scheme='default'):
self._cache = {} self.scheme = scheme self.opener = build_opener(RedirectHandler()) self.matcher = None
'For a given project, get a dictionary mapping available versions to Distribution instances. This should be implemented in subclasses. If called from a locate() request, self.matcher will be set to a matcher for the requirement to satisfy, otherwise it will be None.'
def _get_project(self, name):
raise NotImplementedError('Please implement in the subclass')
'Return all the distribution names known to this locator.'
def get_distribution_names(self):
raise NotImplementedError('Please implement in the subclass')
'For a given project, get a dictionary mapping available versions to Distribution instances. This calls _get_project to do all the work, and just implements a caching layer on top.'
def get_project(self, name):
if (self._cache is None): result = self._get_project(name) elif (name in self._cache): result = self._cache[name] else: result = self._get_project(name) self._cache[name] = result return result
'Give an url a score which can be used to choose preferred URLs for a given project release.'
def score_url(self, url):
t = urlparse(url) basename = posixpath.basename(t.path) compatible = True is_wheel = basename.endswith('.whl') if is_wheel: compatible = is_compatible(Wheel(basename), self.wheel_tags) return ((t.scheme != 'https'), ('pypi.python.org' in t.netloc), is_wheel, compatible, basename)
'Choose one of two URLs where both are candidates for distribution archives for the same version of a distribution (for example, .tar.gz vs. zip). The current implementation favours https:// URLs over http://, archives from PyPI over those from other locations, wheel compatibility (if a wheel) and then the archive name...
def prefer_url(self, url1, url2):
result = url2 if url1: s1 = self.score_url(url1) s2 = self.score_url(url2) if (s1 > s2): result = url1 if (result != url2): logger.debug('Not replacing %r with %r', url1, url2) else: logger.debug('Replacing %r with ...
'Attempt to split a filename in project name, version and Python version.'
def split_filename(self, filename, project_name):
return split_filename(filename, project_name)
'See if a URL is a candidate for a download URL for a project (the URL has typically been scraped from an HTML page). If it is, a dictionary is returned with keys "name", "version", "filename" and "url"; otherwise, None is returned.'
def convert_url_to_download_info(self, url, project_name):
def same_project(name1, name2): (name1, name2) = (name1.lower(), name2.lower()) if (name1 == name2): result = True else: result = (name1.replace('_', '-') == name2.replace('_', '-')) return result result = None (scheme, netloc, path, params, query, fra...
'Get a digest from a dictionary by looking at keys of the form \'algo_digest\'. Returns a 2-tuple (algo, digest) if found, else None. Currently looks only for SHA256, then MD5.'
def _get_digest(self, info):
result = None for algo in ('sha256', 'md5'): key = ('%s_digest' % algo) if (key in info): result = (algo, info[key]) break return result
'Update a result dictionary (the final result from _get_project) with a dictionary for a specific version, which typically holds information gleaned from a filename or URL for an archive for the distribution.'
def _update_version_data(self, result, info):
name = info.pop('name') version = info.pop('version') if (version in result): dist = result[version] md = dist.metadata else: dist = make_dist(name, version, scheme=self.scheme) md = dist.metadata dist.digest = digest = self._get_digest(info) url = info['url'] ...
'Find the most recent distribution which matches the given requirement. :param requirement: A requirement of the form \'foo (1.0)\' or perhaps \'foo (>= 1.0, < 2.0, != 1.3)\' :param prereleases: If ``True``, allow pre-release versions to be located. Otherwise, pre-release versions are not returned. :return: A :class:`D...
def locate(self, requirement, prereleases=False):
result = None r = parse_requirement(requirement) if (r is None): raise DistlibException(('Not a valid requirement: %r' % requirement)) scheme = get_scheme(self.scheme) self.matcher = matcher = scheme.matcher(r.requirement) logger.debug('matcher: %s (%s)', matcher, type(...
'Initialise an instance. :param url: The URL to use for XML-RPC. :param kwargs: Passed to the superclass constructor.'
def __init__(self, url, **kwargs):
super(PyPIRPCLocator, self).__init__(**kwargs) self.base_url = url self.client = ServerProxy(url, timeout=3.0)
'Return all the distribution names known to this locator.'
def get_distribution_names(self):
return set(self.client.list_packages())
'Return all the distribution names known to this locator.'
def get_distribution_names(self):
raise NotImplementedError('Not available from this locator')
'Initialise an instance with the Unicode page contents and the URL they came from.'
def __init__(self, data, url):
self.data = data self.base_url = self.url = url m = self._base.search(self.data) if m: self.base_url = m.group(1)
'Return the URLs of all the links on a page together with information about their "rel" attribute, for determining which ones to treat as downloads and which ones to queue for further scraping.'
@cached_property def links(self):
def clean(url): 'Tidy up an URL.' (scheme, netloc, path, params, query, frag) = urlparse(url) return urlunparse((scheme, netloc, quote(path), params, query, frag)) result = set() for match in self._href.finditer(self.data): d = match.groupdict('') rel = (d['r...
'Initialise an instance. :param url: The root URL to use for scraping. :param timeout: The timeout, in seconds, to be applied to requests. This defaults to ``None`` (no timeout specified). :param num_workers: The number of worker threads you want to do I/O, This defaults to 10. :param kwargs: Passed to the superclass.'...
def __init__(self, url, timeout=None, num_workers=10, **kwargs):
super(SimpleScrapingLocator, self).__init__(**kwargs) self.base_url = ensure_slash(url) self.timeout = timeout self._page_cache = {} self._seen = set() self._to_fetch = queue.Queue() self._bad_hosts = set() self.skip_externals = False self.num_workers = num_workers self._lock = t...