[0-9]+(?:\.[0-9]+)*) # release segment
+ (?P # pre-release
+ [-_\.]?
+ (?Palpha|a|beta|b|preview|pre|c|rc)
+ [-_\.]?
+ (?P[0-9]+)?
+ )?
+ (?P # post release
+ (?:-(?P[0-9]+))
+ |
+ (?:
+ [-_\.]?
+ (?Ppost|rev|r)
+ [-_\.]?
+ (?P[0-9]+)?
+ )
+ )?
+ (?P # dev release
+ [-_\.]?
+ (?Pdev)
+ [-_\.]?
+ (?P[0-9]+)?
+ )?
+ )
+ (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
+"""
+
+VERSION_PATTERN = _VERSION_PATTERN
+"""
+A string containing the regular expression used to match a valid version.
+
+The pattern is not anchored at either end, and is intended for embedding in larger
+expressions (for example, matching a version number as part of a file name). The
+regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
+flags set.
+
+:meta hide-value:
+"""
+
+
+class Version(_BaseVersion):
+ """This class abstracts handling of a project's versions.
+
+ A :class:`Version` instance is comparison aware and can be compared and
+ sorted using the standard Python interfaces.
+
+ >>> v1 = Version("1.0a5")
+ >>> v2 = Version("1.0")
+ >>> v1
+
+ >>> v2
+
+ >>> v1 < v2
+ True
+ >>> v1 == v2
+ False
+ >>> v1 > v2
+ False
+ >>> v1 >= v2
+ False
+ >>> v1 <= v2
+ True
+ """
+
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
+ _key: CmpKey
+
+ def __init__(self, version: str) -> None:
+ """Initialize a Version object.
+
+ :param version:
+ The string representation of a version which will be parsed and normalized
+ before use.
+ :raises InvalidVersion:
+ If the ``version`` does not conform to PEP 440 in any way then this
+ exception will be raised.
+ """
+
+ # Validate the version and parse it into pieces
+ match = self._regex.search(version)
+ if not match:
+ raise InvalidVersion(f"Invalid version: '{version}'")
+
+ # Store the parsed out pieces of the version
+ self._version = _Version(
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+ release=tuple(int(i) for i in match.group("release").split(".")),
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
+ post=_parse_letter_version(
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
+ ),
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
+ local=_parse_local_version(match.group("local")),
+ )
+
+ # Generate a key which will be used for sorting
+ self._key = _cmpkey(
+ self._version.epoch,
+ self._version.release,
+ self._version.pre,
+ self._version.post,
+ self._version.dev,
+ self._version.local,
+ )
+
+ def __repr__(self) -> str:
+ """A representation of the Version that shows all internal state.
+
+ >>> Version('1.0.0')
+
+ """
+ return f""
+
+ def __str__(self) -> str:
+ """A string representation of the version that can be rounded-tripped.
+
+ >>> str(Version("1.0a5"))
+ '1.0a5'
+ """
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ # Pre-release
+ if self.pre is not None:
+ parts.append("".join(str(x) for x in self.pre))
+
+ # Post-release
+ if self.post is not None:
+ parts.append(f".post{self.post}")
+
+ # Development release
+ if self.dev is not None:
+ parts.append(f".dev{self.dev}")
+
+ # Local version segment
+ if self.local is not None:
+ parts.append(f"+{self.local}")
+
+ return "".join(parts)
+
+ @property
+ def epoch(self) -> int:
+ """The epoch of the version.
+
+ >>> Version("2.0.0").epoch
+ 0
+ >>> Version("1!2.0.0").epoch
+ 1
+ """
+ return self._version.epoch
+
+ @property
+ def release(self) -> tuple[int, ...]:
+ """The components of the "release" segment of the version.
+
+ >>> Version("1.2.3").release
+ (1, 2, 3)
+ >>> Version("2.0.0").release
+ (2, 0, 0)
+ >>> Version("1!2.0.0.post0").release
+ (2, 0, 0)
+
+ Includes trailing zeroes but not the epoch or any pre-release / development /
+ post-release suffixes.
+ """
+ return self._version.release
+
+ @property
+ def pre(self) -> tuple[str, int] | None:
+ """The pre-release segment of the version.
+
+ >>> print(Version("1.2.3").pre)
+ None
+ >>> Version("1.2.3a1").pre
+ ('a', 1)
+ >>> Version("1.2.3b1").pre
+ ('b', 1)
+ >>> Version("1.2.3rc1").pre
+ ('rc', 1)
+ """
+ return self._version.pre
+
+ @property
+ def post(self) -> int | None:
+ """The post-release number of the version.
+
+ >>> print(Version("1.2.3").post)
+ None
+ >>> Version("1.2.3.post1").post
+ 1
+ """
+ return self._version.post[1] if self._version.post else None
+
+ @property
+ def dev(self) -> int | None:
+ """The development number of the version.
+
+ >>> print(Version("1.2.3").dev)
+ None
+ >>> Version("1.2.3.dev1").dev
+ 1
+ """
+ return self._version.dev[1] if self._version.dev else None
+
+ @property
+ def local(self) -> str | None:
+ """The local version segment of the version.
+
+ >>> print(Version("1.2.3").local)
+ None
+ >>> Version("1.2.3+abc").local
+ 'abc'
+ """
+ if self._version.local:
+ return ".".join(str(x) for x in self._version.local)
+ else:
+ return None
+
+ @property
+ def public(self) -> str:
+ """The public portion of the version.
+
+ >>> Version("1.2.3").public
+ '1.2.3'
+ >>> Version("1.2.3+abc").public
+ '1.2.3'
+ >>> Version("1.2.3+abc.dev1").public
+ '1.2.3'
+ """
+ return str(self).split("+", 1)[0]
+
+ @property
+ def base_version(self) -> str:
+ """The "base version" of the version.
+
+ >>> Version("1.2.3").base_version
+ '1.2.3'
+ >>> Version("1.2.3+abc").base_version
+ '1.2.3'
+ >>> Version("1!1.2.3+abc.dev1").base_version
+ '1!1.2.3'
+
+ The "base version" is the public version of the project without any pre or post
+ release markers.
+ """
+ parts = []
+
+ # Epoch
+ if self.epoch != 0:
+ parts.append(f"{self.epoch}!")
+
+ # Release segment
+ parts.append(".".join(str(x) for x in self.release))
+
+ return "".join(parts)
+
+ @property
+ def is_prerelease(self) -> bool:
+ """Whether this version is a pre-release.
+
+ >>> Version("1.2.3").is_prerelease
+ False
+ >>> Version("1.2.3a1").is_prerelease
+ True
+ >>> Version("1.2.3b1").is_prerelease
+ True
+ >>> Version("1.2.3rc1").is_prerelease
+ True
+ >>> Version("1.2.3dev1").is_prerelease
+ True
+ """
+ return self.dev is not None or self.pre is not None
+
+ @property
+ def is_postrelease(self) -> bool:
+ """Whether this version is a post-release.
+
+ >>> Version("1.2.3").is_postrelease
+ False
+ >>> Version("1.2.3.post1").is_postrelease
+ True
+ """
+ return self.post is not None
+
+ @property
+ def is_devrelease(self) -> bool:
+ """Whether this version is a development release.
+
+ >>> Version("1.2.3").is_devrelease
+ False
+ >>> Version("1.2.3.dev1").is_devrelease
+ True
+ """
+ return self.dev is not None
+
+ @property
+ def major(self) -> int:
+ """The first item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").major
+ 1
+ """
+ return self.release[0] if len(self.release) >= 1 else 0
+
+ @property
+ def minor(self) -> int:
+ """The second item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").minor
+ 2
+ >>> Version("1").minor
+ 0
+ """
+ return self.release[1] if len(self.release) >= 2 else 0
+
+ @property
+ def micro(self) -> int:
+ """The third item of :attr:`release` or ``0`` if unavailable.
+
+ >>> Version("1.2.3").micro
+ 3
+ >>> Version("1").micro
+ 0
+ """
+ return self.release[2] if len(self.release) >= 3 else 0
+
+
+def _parse_letter_version(
+ letter: str | None, number: str | bytes | SupportsInt | None
+) -> tuple[str, int] | None:
+ if letter:
+ # We consider there to be an implicit 0 in a pre-release if there is
+ # not a numeral associated with it.
+ if number is None:
+ number = 0
+
+ # We normalize any letters to their lower case form
+ letter = letter.lower()
+
+ # We consider some words to be alternate spellings of other words and
+ # in those cases we want to normalize the spellings to our preferred
+ # spelling.
+ if letter == "alpha":
+ letter = "a"
+ elif letter == "beta":
+ letter = "b"
+ elif letter in ["c", "pre", "preview"]:
+ letter = "rc"
+ elif letter in ["rev", "r"]:
+ letter = "post"
+
+ return letter, int(number)
+ if not letter and number:
+ # We assume if we are given a number, but we are not given a letter
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
+ letter = "post"
+
+ return letter, int(number)
+
+ return None
+
+
+_local_version_separators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local: str | None) -> LocalType | None:
+ """
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+ """
+ if local is not None:
+ return tuple(
+ part.lower() if not part.isdigit() else int(part)
+ for part in _local_version_separators.split(local)
+ )
+ return None
+
+
+def _cmpkey(
+ epoch: int,
+ release: tuple[int, ...],
+ pre: tuple[str, int] | None,
+ post: tuple[str, int] | None,
+ dev: tuple[str, int] | None,
+ local: LocalType | None,
+) -> CmpKey:
+ # When we compare a release version, we want to compare it with all of the
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
+ # leading zeros until we come to something non zero, then take the rest
+ # re-reverse it back into the correct order and make it a tuple and use
+ # that for our sorting key.
+ _release = tuple(
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
+ )
+
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
+ # if there is not a pre or a post segment. If we have one of those then
+ # the normal sorting rules will handle this case correctly.
+ if pre is None and post is None and dev is not None:
+ _pre: CmpPrePostDevType = NegativeInfinity
+ # Versions without a pre-release (except as noted above) should sort after
+ # those with one.
+ elif pre is None:
+ _pre = Infinity
+ else:
+ _pre = pre
+
+ # Versions without a post segment should sort before those with one.
+ if post is None:
+ _post: CmpPrePostDevType = NegativeInfinity
+
+ else:
+ _post = post
+
+ # Versions without a development segment should sort after those with one.
+ if dev is None:
+ _dev: CmpPrePostDevType = Infinity
+
+ else:
+ _dev = dev
+
+ if local is None:
+ # Versions without a local segment should sort before those with one.
+ _local: CmpLocalType = NegativeInfinity
+ else:
+ # Versions with a local segment need that segment parsed to implement
+ # the sorting rules in PEP440.
+ # - Alpha numeric segments sort before numeric segments
+ # - Alpha numeric segments sort lexicographically
+ # - Numeric segments sort numerically
+ # - Shorter versions sort before longer versions when the prefixes
+ # match exactly
+ _local = tuple(
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
+ )
+
+ return epoch, _release, _pre, _post, _dev, _local
diff --git a/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
new file mode 100644
index 0000000..57ce7f1
--- /dev/null
+++ b/venv/lib/python3.12/site-packages/pip/_vendor/pkg_resources/__init__.py
@@ -0,0 +1,3676 @@
+# TODO: Add Generic type annotations to initialized collections.
+# For now we'd simply use implicit Any/Unknown which would add redundant annotations
+# mypy: disable-error-code="var-annotated"
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof. The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is. Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files. It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+
+This module is deprecated. Users are directed to :mod:`importlib.resources`,
+:mod:`importlib.metadata` and :pypi:`packaging` instead.
+"""
+
+from __future__ import annotations
+
+import sys
+
+if sys.version_info < (3, 8): # noqa: UP036 # Check for unsupported versions
+ raise RuntimeError("Python 3.8 or later is required")
+
+import os
+import io
+import time
+import re
+import types
+from typing import (
+ Any,
+ Literal,
+ Dict,
+ Iterator,
+ Mapping,
+ MutableSequence,
+ NamedTuple,
+ NoReturn,
+ Tuple,
+ Union,
+ TYPE_CHECKING,
+ Protocol,
+ Callable,
+ Iterable,
+ TypeVar,
+ overload,
+)
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import errno
+import tempfile
+import textwrap
+import inspect
+import ntpath
+import posixpath
+import importlib
+import importlib.abc
+import importlib.machinery
+from pkgutil import get_importer
+
+import _imp
+
+# capture these to bypass sandboxing
+from os import utime
+from os import open as os_open
+from os.path import isdir, split
+
+try:
+ from os import mkdir, rename, unlink
+
+ WRITE_SUPPORT = True
+except ImportError:
+ # no write support, probably under GAE
+ WRITE_SUPPORT = False
+
+from pip._internal.utils._jaraco_text import (
+ yield_lines,
+ drop_comment,
+ join_continuation,
+)
+from pip._vendor.packaging import markers as _packaging_markers
+from pip._vendor.packaging import requirements as _packaging_requirements
+from pip._vendor.packaging import utils as _packaging_utils
+from pip._vendor.packaging import version as _packaging_version
+from pip._vendor.platformdirs import user_cache_dir as _user_cache_dir
+
+if TYPE_CHECKING:
+ from _typeshed import BytesPath, StrPath, StrOrBytesPath
+ from pip._vendor.typing_extensions import Self
+
+
+# Patch: Remove deprecation warning from vendored pkg_resources.
+# Setting PYTHONWARNINGS=error to verify builds produce no warnings
+# causes immediate exceptions.
+# See https://github.com/pypa/pip/issues/12243
+
+
+_T = TypeVar("_T")
+_DistributionT = TypeVar("_DistributionT", bound="Distribution")
+# Type aliases
+_NestedStr = Union[str, Iterable[Union[str, Iterable["_NestedStr"]]]]
+_InstallerTypeT = Callable[["Requirement"], "_DistributionT"]
+_InstallerType = Callable[["Requirement"], Union["Distribution", None]]
+_PkgReqType = Union[str, "Requirement"]
+_EPDistType = Union["Distribution", _PkgReqType]
+_MetadataType = Union["IResourceProvider", None]
+_ResolvedEntryPoint = Any # Can be any attribute in the module
+_ResourceStream = Any # TODO / Incomplete: A readable file-like object
+# Any object works, but let's indicate we expect something like a module (optionally has __loader__ or __file__)
+_ModuleLike = Union[object, types.ModuleType]
+# Any: Should be _ModuleLike but we end up with issues where _ModuleLike doesn't have _ZipLoaderModule's __loader__
+_ProviderFactoryType = Callable[[Any], "IResourceProvider"]
+_DistFinderType = Callable[[_T, str, bool], Iterable["Distribution"]]
+_NSHandlerType = Callable[[_T, str, str, types.ModuleType], Union[str, None]]
+_AdapterT = TypeVar(
+ "_AdapterT", _DistFinderType[Any], _ProviderFactoryType, _NSHandlerType[Any]
+)
+
+
+# Use _typeshed.importlib.LoaderProtocol once available https://github.com/python/typeshed/pull/11890
+class _LoaderProtocol(Protocol):
+ def load_module(self, fullname: str, /) -> types.ModuleType: ...
+
+
+class _ZipLoaderModule(Protocol):
+ __loader__: zipimport.zipimporter
+
+
+_PEP440_FALLBACK = re.compile(r"^v?(?P(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I)
+
+
+class PEP440Warning(RuntimeWarning):
+ """
+ Used when there is an issue with a version or specifier not complying with
+ PEP 440.
+ """
+
+
+parse_version = _packaging_version.Version
+
+
+_state_vars: dict[str, str] = {}
+
+
+def _declare_state(vartype: str, varname: str, initial_value: _T) -> _T:
+ _state_vars[varname] = vartype
+ return initial_value
+
+
+def __getstate__() -> dict[str, Any]:
+ state = {}
+ g = globals()
+ for k, v in _state_vars.items():
+ state[k] = g['_sget_' + v](g[k])
+ return state
+
+
+def __setstate__(state: dict[str, Any]) -> dict[str, Any]:
+ g = globals()
+ for k, v in state.items():
+ g['_sset_' + _state_vars[k]](k, g[k], v)
+ return state
+
+
+def _sget_dict(val):
+ return val.copy()
+
+
+def _sset_dict(key, ob, state):
+ ob.clear()
+ ob.update(state)
+
+
+def _sget_object(val):
+ return val.__getstate__()
+
+
+def _sset_object(key, ob, state):
+ ob.__setstate__(state)
+
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+ """Return this platform's maximum compatible version.
+
+ distutils.util.get_platform() normally reports the minimum version
+ of macOS that would be required to *use* extensions produced by
+ distutils. But what we want when checking compatibility is to know the
+ version of macOS that we are *running*. To allow usage of packages that
+ explicitly require a newer version of macOS, we must also know the
+ current version of the OS.
+
+ If this condition occurs for any other platform with a version in its
+ platform strings, this function should be extended accordingly.
+ """
+ plat = get_build_platform()
+ m = macosVersionString.match(plat)
+ if m is not None and sys.platform == "darwin":
+ try:
+ plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3))
+ except ValueError:
+ # not macOS
+ pass
+ return plat
+
+
+__all__ = [
+ # Basic resource access and distribution/entry point discovery
+ 'require',
+ 'run_script',
+ 'get_provider',
+ 'get_distribution',
+ 'load_entry_point',
+ 'get_entry_map',
+ 'get_entry_info',
+ 'iter_entry_points',
+ 'resource_string',
+ 'resource_stream',
+ 'resource_filename',
+ 'resource_listdir',
+ 'resource_exists',
+ 'resource_isdir',
+ # Environmental control
+ 'declare_namespace',
+ 'working_set',
+ 'add_activation_listener',
+ 'find_distributions',
+ 'set_extraction_path',
+ 'cleanup_resources',
+ 'get_default_cache',
+ # Primary implementation classes
+ 'Environment',
+ 'WorkingSet',
+ 'ResourceManager',
+ 'Distribution',
+ 'Requirement',
+ 'EntryPoint',
+ # Exceptions
+ 'ResolutionError',
+ 'VersionConflict',
+ 'DistributionNotFound',
+ 'UnknownExtra',
+ 'ExtractionError',
+ # Warnings
+ 'PEP440Warning',
+ # Parsing functions and string utilities
+ 'parse_requirements',
+ 'parse_version',
+ 'safe_name',
+ 'safe_version',
+ 'get_platform',
+ 'compatible_platforms',
+ 'yield_lines',
+ 'split_sections',
+ 'safe_extra',
+ 'to_filename',
+ 'invalid_marker',
+ 'evaluate_marker',
+ # filesystem utilities
+ 'ensure_directory',
+ 'normalize_path',
+ # Distribution "precedence" constants
+ 'EGG_DIST',
+ 'BINARY_DIST',
+ 'SOURCE_DIST',
+ 'CHECKOUT_DIST',
+ 'DEVELOP_DIST',
+ # "Provider" interfaces, implementations, and registration/lookup APIs
+ 'IMetadataProvider',
+ 'IResourceProvider',
+ 'FileMetadata',
+ 'PathMetadata',
+ 'EggMetadata',
+ 'EmptyProvider',
+ 'empty_provider',
+ 'NullProvider',
+ 'EggProvider',
+ 'DefaultProvider',
+ 'ZipProvider',
+ 'register_finder',
+ 'register_namespace_handler',
+ 'register_loader_type',
+ 'fixup_namespace_packages',
+ 'get_importer',
+ # Warnings
+ 'PkgResourcesDeprecationWarning',
+ # Deprecated/backward compatibility only
+ 'run_main',
+ 'AvailableDistributions',
+]
+
+
+class ResolutionError(Exception):
+ """Abstract base for dependency resolution errors"""
+
+ def __repr__(self):
+ return self.__class__.__name__ + repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+ """
+ An already-installed version conflicts with the requested version.
+
+ Should be initialized with the installed Distribution and the requested
+ Requirement.
+ """
+
+ _template = "{self.dist} is installed but {self.req} is required"
+
+ @property
+ def dist(self) -> Distribution:
+ return self.args[0]
+
+ @property
+ def req(self) -> Requirement:
+ return self.args[1]
+
+ def report(self):
+ return self._template.format(**locals())
+
+ def with_context(self, required_by: set[Distribution | str]):
+ """
+ If required_by is non-empty, return a version of self that is a
+ ContextualVersionConflict.
+ """
+ if not required_by:
+ return self
+ args = self.args + (required_by,)
+ return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+ """
+ A VersionConflict that accepts a third parameter, the set of the
+ requirements that required the installed Distribution.
+ """
+
+ _template = VersionConflict._template + ' by {self.required_by}'
+
+ @property
+ def required_by(self) -> set[str]:
+ return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+ """A requested distribution was not found"""
+
+ _template = (
+ "The '{self.req}' distribution was not found "
+ "and is required by {self.requirers_str}"
+ )
+
+ @property
+ def req(self) -> Requirement:
+ return self.args[0]
+
+ @property
+ def requirers(self) -> set[str] | None:
+ return self.args[1]
+
+ @property
+ def requirers_str(self):
+ if not self.requirers:
+ return 'the application'
+ return ', '.join(self.requirers)
+
+ def report(self):
+ return self._template.format(**locals())
+
+ def __str__(self):
+ return self.report()
+
+
+class UnknownExtra(ResolutionError):
+ """Distribution doesn't have an "extra feature" of the given name"""
+
+
+_provider_factories: dict[type[_ModuleLike], _ProviderFactoryType] = {}
+
+PY_MAJOR = '{}.{}'.format(*sys.version_info)
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+
+def register_loader_type(
+ loader_type: type[_ModuleLike], provider_factory: _ProviderFactoryType
+):
+ """Register `provider_factory` to make providers for `loader_type`
+
+ `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+ and `provider_factory` is a function that, passed a *module* object,
+ returns an ``IResourceProvider`` for that module.
+ """
+ _provider_factories[loader_type] = provider_factory
+
+
+@overload
+def get_provider(moduleOrReq: str) -> IResourceProvider: ...
+@overload
+def get_provider(moduleOrReq: Requirement) -> Distribution: ...
+def get_provider(moduleOrReq: str | Requirement) -> IResourceProvider | Distribution:
+ """Return an IResourceProvider for the named module or requirement"""
+ if isinstance(moduleOrReq, Requirement):
+ return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+ try:
+ module = sys.modules[moduleOrReq]
+ except KeyError:
+ __import__(moduleOrReq)
+ module = sys.modules[moduleOrReq]
+ loader = getattr(module, '__loader__', None)
+ return _find_adapter(_provider_factories, loader)(module)
+
+
+@functools.lru_cache(maxsize=None)
+def _macos_vers():
+ version = platform.mac_ver()[0]
+ # fallback for MacPorts
+ if version == '':
+ plist = '/System/Library/CoreServices/SystemVersion.plist'
+ if os.path.exists(plist):
+ with open(plist, 'rb') as fh:
+ plist_content = plistlib.load(fh)
+ if 'ProductVersion' in plist_content:
+ version = plist_content['ProductVersion']
+ return version.split('.')
+
+
+def _macos_arch(machine):
+ return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+
+def get_build_platform():
+ """Return this platform's string for platform-specific distributions
+
+ XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+ needs some hacks for Linux and macOS.
+ """
+ from sysconfig import get_platform
+
+ plat = get_platform()
+ if sys.platform == "darwin" and not plat.startswith('macosx-'):
+ try:
+ version = _macos_vers()
+ machine = os.uname()[4].replace(" ", "_")
+ return "macosx-%d.%d-%s" % (
+ int(version[0]),
+ int(version[1]),
+ _macos_arch(machine),
+ )
+ except ValueError:
+ # if someone is running a non-Mac darwin system, this will fall
+ # through to the default implementation
+ pass
+ return plat
+
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided: str | None, required: str | None):
+ """Can code for the `provided` platform run on the `required` platform?
+
+ Returns true if either platform is ``None``, or the platforms are equal.
+
+ XXX Needs compatibility checks for Linux and other unixy OSes.
+ """
+ if provided is None or required is None or provided == required:
+ # easy case
+ return True
+
+ # macOS special cases
+ reqMac = macosVersionString.match(required)
+ if reqMac:
+ provMac = macosVersionString.match(provided)
+
+ # is this a Mac package?
+ if not provMac:
+ # this is backwards compatibility for packages built before
+ # setuptools 0.6. All packages built after this point will
+ # use the new macOS designation.
+ provDarwin = darwinVersionString.match(provided)
+ if provDarwin:
+ dversion = int(provDarwin.group(1))
+ macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+ if (
+ dversion == 7
+ and macosversion >= "10.3"
+ or dversion == 8
+ and macosversion >= "10.4"
+ ):
+ return True
+ # egg isn't macOS or legacy darwin
+ return False
+
+ # are they the same major version and machine type?
+ if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3):
+ return False
+
+ # is the required OS major update >= the provided one?
+ if int(provMac.group(2)) > int(reqMac.group(2)):
+ return False
+
+ return True
+
+ # XXX Linux and other platforms' special cases should go here
+ return False
+
+
+@overload
+def get_distribution(dist: _DistributionT) -> _DistributionT: ...
+@overload
+def get_distribution(dist: _PkgReqType) -> Distribution: ...
+def get_distribution(dist: Distribution | _PkgReqType) -> Distribution:
+ """Return a current distribution object for a Requirement or string"""
+ if isinstance(dist, str):
+ dist = Requirement.parse(dist)
+ if isinstance(dist, Requirement):
+ # Bad type narrowing, dist has to be a Requirement here, so get_provider has to return Distribution
+ dist = get_provider(dist) # type: ignore[assignment]
+ if not isinstance(dist, Distribution):
+ raise TypeError("Expected str, Requirement, or Distribution", dist)
+ return dist
+
+
+def load_entry_point(dist: _EPDistType, group: str, name: str) -> _ResolvedEntryPoint:
+ """Return `name` entry point of `group` for `dist` or raise ImportError"""
+ return get_distribution(dist).load_entry_point(group, name)
+
+
+@overload
+def get_entry_map(
+ dist: _EPDistType, group: None = None
+) -> dict[str, dict[str, EntryPoint]]: ...
+@overload
+def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ...
+def get_entry_map(dist: _EPDistType, group: str | None = None):
+ """Return the entry point map for `group`, or the full entry map"""
+ return get_distribution(dist).get_entry_map(group)
+
+
+def get_entry_info(dist: _EPDistType, group: str, name: str):
+ """Return the EntryPoint object for `group`+`name`, or ``None``"""
+ return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider(Protocol):
+ def has_metadata(self, name: str) -> bool:
+ """Does the package's distribution contain the named metadata?"""
+
+ def get_metadata(self, name: str) -> str:
+ """The named metadata resource as a string"""
+
+ def get_metadata_lines(self, name: str) -> Iterator[str]:
+ """Yield named metadata resource as list of non-blank non-comment lines
+
+ Leading and trailing whitespace is stripped from each line, and lines
+ with ``#`` as the first non-blank character are omitted."""
+
+ def metadata_isdir(self, name: str) -> bool:
+ """Is the named metadata a directory? (like ``os.path.isdir()``)"""
+
+ def metadata_listdir(self, name: str) -> list[str]:
+ """List of metadata names in the directory (like ``os.listdir()``)"""
+
+ def run_script(self, script_name: str, namespace: dict[str, Any]) -> None:
+ """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider, Protocol):
+ """An object that provides access to package resources"""
+
+ def get_resource_filename(
+ self, manager: ResourceManager, resource_name: str
+ ) -> str:
+ """Return a true filesystem path for `resource_name`
+
+ `manager` must be a ``ResourceManager``"""
+
+ def get_resource_stream(
+ self, manager: ResourceManager, resource_name: str
+ ) -> _ResourceStream:
+ """Return a readable file-like object for `resource_name`
+
+ `manager` must be a ``ResourceManager``"""
+
+ def get_resource_string(
+ self, manager: ResourceManager, resource_name: str
+ ) -> bytes:
+ """Return the contents of `resource_name` as :obj:`bytes`
+
+ `manager` must be a ``ResourceManager``"""
+
+ def has_resource(self, resource_name: str) -> bool:
+ """Does the package contain the named resource?"""
+
+ def resource_isdir(self, resource_name: str) -> bool:
+ """Is the named resource a directory? (like ``os.path.isdir()``)"""
+
+ def resource_listdir(self, resource_name: str) -> list[str]:
+ """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet:
+ """A collection of active distributions on sys.path (or a similar list)"""
+
+ def __init__(self, entries: Iterable[str] | None = None):
+ """Create working set from list of path entries (default=sys.path)"""
+ self.entries: list[str] = []
+ self.entry_keys = {}
+ self.by_key = {}
+ self.normalized_to_canonical_keys = {}
+ self.callbacks = []
+
+ if entries is None:
+ entries = sys.path
+
+ for entry in entries:
+ self.add_entry(entry)
+
+ @classmethod
+ def _build_master(cls):
+ """
+ Prepare the master working set.
+ """
+ ws = cls()
+ try:
+ from __main__ import __requires__
+ except ImportError:
+ # The main program does not list any requirements
+ return ws
+
+ # ensure the requirements are met
+ try:
+ ws.require(__requires__)
+ except VersionConflict:
+ return cls._build_from_requirements(__requires__)
+
+ return ws
+
+ @classmethod
+ def _build_from_requirements(cls, req_spec):
+ """
+ Build a working set from a requirement spec. Rewrites sys.path.
+ """
+ # try it without defaults already on sys.path
+ # by starting with an empty path
+ ws = cls([])
+ reqs = parse_requirements(req_spec)
+ dists = ws.resolve(reqs, Environment())
+ for dist in dists:
+ ws.add(dist)
+
+ # add any missing entries from sys.path
+ for entry in sys.path:
+ if entry not in ws.entries:
+ ws.add_entry(entry)
+
+ # then copy back to sys.path
+ sys.path[:] = ws.entries
+ return ws
+
+ def add_entry(self, entry: str):
+ """Add a path item to ``.entries``, finding any distributions on it
+
+ ``find_distributions(entry, True)`` is used to find distributions
+ corresponding to the path entry, and they are added. `entry` is
+ always appended to ``.entries``, even if it is already present.
+ (This is because ``sys.path`` can contain the same value more than
+ once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+ equal ``sys.path``.)
+ """
+ self.entry_keys.setdefault(entry, [])
+ self.entries.append(entry)
+ for dist in find_distributions(entry, True):
+ self.add(dist, entry, False)
+
+ def __contains__(self, dist: Distribution) -> bool:
+ """True if `dist` is the active distribution for its project"""
+ return self.by_key.get(dist.key) == dist
+
+ def find(self, req: Requirement) -> Distribution | None:
+ """Find a distribution matching requirement `req`
+
+ If there is an active distribution for the requested project, this
+ returns it as long as it meets the version requirement specified by
+ `req`. But, if there is an active distribution for the project and it
+ does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+ If there is no active distribution for the requested project, ``None``
+ is returned.
+ """
+ dist = self.by_key.get(req.key)
+
+ if dist is None:
+ canonical_key = self.normalized_to_canonical_keys.get(req.key)
+
+ if canonical_key is not None:
+ req.key = canonical_key
+ dist = self.by_key.get(canonical_key)
+
+ if dist is not None and dist not in req:
+ # XXX add more info
+ raise VersionConflict(dist, req)
+ return dist
+
+ def iter_entry_points(self, group: str, name: str | None = None):
+ """Yield entry point objects from `group` matching `name`
+
+ If `name` is None, yields all entry points in `group` from all
+ distributions in the working set, otherwise only ones matching
+ both `group` and `name` are yielded (in distribution order).
+ """
+ return (
+ entry
+ for dist in self
+ for entry in dist.get_entry_map(group).values()
+ if name is None or name == entry.name
+ )
+
+ def run_script(self, requires: str, script_name: str):
+ """Locate distribution for `requires` and run `script_name` script"""
+ ns = sys._getframe(1).f_globals
+ name = ns['__name__']
+ ns.clear()
+ ns['__name__'] = name
+ self.require(requires)[0].run_script(script_name, ns)
+
+ def __iter__(self) -> Iterator[Distribution]:
+ """Yield distributions for non-duplicate projects in the working set
+
+ The yield order is the order in which the items' path entries were
+ added to the working set.
+ """
+ seen = set()
+ for item in self.entries:
+ if item not in self.entry_keys:
+ # workaround a cache issue
+ continue
+
+ for key in self.entry_keys[item]:
+ if key not in seen:
+ seen.add(key)
+ yield self.by_key[key]
+
+ def add(
+ self,
+ dist: Distribution,
+ entry: str | None = None,
+ insert: bool = True,
+ replace: bool = False,
+ ):
+ """Add `dist` to working set, associated with `entry`
+
+ If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+ On exit from this routine, `entry` is added to the end of the working
+ set's ``.entries`` (if it wasn't already present).
+
+ `dist` is only added to the working set if it's for a project that
+ doesn't already have a distribution in the set, unless `replace=True`.
+ If it's added, any callbacks registered with the ``subscribe()`` method
+ will be called.
+ """
+ if insert:
+ dist.insert_on(self.entries, entry, replace=replace)
+
+ if entry is None:
+ entry = dist.location
+ keys = self.entry_keys.setdefault(entry, [])
+ keys2 = self.entry_keys.setdefault(dist.location, [])
+ if not replace and dist.key in self.by_key:
+ # ignore hidden distros
+ return
+
+ self.by_key[dist.key] = dist
+ normalized_name = _packaging_utils.canonicalize_name(dist.key)
+ self.normalized_to_canonical_keys[normalized_name] = dist.key
+ if dist.key not in keys:
+ keys.append(dist.key)
+ if dist.key not in keys2:
+ keys2.append(dist.key)
+ self._added_new(dist)
+
+ @overload
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None,
+ installer: _InstallerTypeT[_DistributionT],
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[_DistributionT]: ...
+ @overload
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ *,
+ installer: _InstallerTypeT[_DistributionT],
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[_DistributionT]: ...
+ @overload
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[Distribution]: ...
+ def resolve(
+ self,
+ requirements: Iterable[Requirement],
+ env: Environment | None = None,
+ installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+ replace_conflicting: bool = False,
+ extras: tuple[str, ...] | None = None,
+ ) -> list[Distribution] | list[_DistributionT]:
+ """List all distributions needed to (recursively) meet `requirements`
+
+ `requirements` must be a sequence of ``Requirement`` objects. `env`,
+ if supplied, should be an ``Environment`` instance. If
+ not supplied, it defaults to all distributions available within any
+ entry or distribution in the working set. `installer`, if supplied,
+ will be invoked with each requirement that cannot be met by an
+ already-installed distribution; it should return a ``Distribution`` or
+ ``None``.
+
+ Unless `replace_conflicting=True`, raises a VersionConflict exception
+ if
+ any requirements are found on the path that have the correct name but
+ the wrong version. Otherwise, if an `installer` is supplied it will be
+ invoked to obtain the correct version of the requirement and activate
+ it.
+
+ `extras` is a list of the extras to be used with these requirements.
+ This is important because extra requirements may look like `my_req;
+ extra = "my_extra"`, which would otherwise be interpreted as a purely
+ optional requirement. Instead, we want to be able to assert that these
+ requirements are truly required.
+ """
+
+ # set up the stack
+ requirements = list(requirements)[::-1]
+ # set of processed requirements
+ processed = set()
+ # key -> dist
+ best = {}
+ to_activate = []
+
+ req_extras = _ReqExtras()
+
+ # Mapping of requirement to set of distributions that required it;
+ # useful for reporting info about conflicts.
+ required_by = collections.defaultdict(set)
+
+ while requirements:
+ # process dependencies breadth-first
+ req = requirements.pop(0)
+ if req in processed:
+ # Ignore cyclic or redundant dependencies
+ continue
+
+ if not req_extras.markers_pass(req, extras):
+ continue
+
+ dist = self._resolve_dist(
+ req, best, replace_conflicting, env, installer, required_by, to_activate
+ )
+
+ # push the new requirements onto the stack
+ new_requirements = dist.requires(req.extras)[::-1]
+ requirements.extend(new_requirements)
+
+ # Register the new requirements needed by req
+ for new_requirement in new_requirements:
+ required_by[new_requirement].add(req.project_name)
+ req_extras[new_requirement] = req.extras
+
+ processed.add(req)
+
+ # return list of distros to activate
+ return to_activate
+
+ def _resolve_dist(
+ self, req, best, replace_conflicting, env, installer, required_by, to_activate
+ ) -> Distribution:
+ dist = best.get(req.key)
+ if dist is None:
+ # Find the best distribution and add it to the map
+ dist = self.by_key.get(req.key)
+ if dist is None or (dist not in req and replace_conflicting):
+ ws = self
+ if env is None:
+ if dist is None:
+ env = Environment(self.entries)
+ else:
+ # Use an empty environment and workingset to avoid
+ # any further conflicts with the conflicting
+ # distribution
+ env = Environment([])
+ ws = WorkingSet([])
+ dist = best[req.key] = env.best_match(
+ req, ws, installer, replace_conflicting=replace_conflicting
+ )
+ if dist is None:
+ requirers = required_by.get(req, None)
+ raise DistributionNotFound(req, requirers)
+ to_activate.append(dist)
+ if dist not in req:
+ # Oops, the "best" so far conflicts with a dependency
+ dependent_req = required_by[req]
+ raise VersionConflict(dist, req).with_context(dependent_req)
+ return dist
+
+ @overload
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None,
+ installer: _InstallerTypeT[_DistributionT],
+ fallback: bool = True,
+ ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
+ @overload
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ *,
+ installer: _InstallerTypeT[_DistributionT],
+ fallback: bool = True,
+ ) -> tuple[list[_DistributionT], dict[Distribution, Exception]]: ...
+ @overload
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ fallback: bool = True,
+ ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ...
+ def find_plugins(
+ self,
+ plugin_env: Environment,
+ full_env: Environment | None = None,
+ installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+ fallback: bool = True,
+ ) -> tuple[
+ list[Distribution] | list[_DistributionT],
+ dict[Distribution, Exception],
+ ]:
+ """Find all activatable distributions in `plugin_env`
+
+ Example usage::
+
+ distributions, errors = working_set.find_plugins(
+ Environment(plugin_dirlist)
+ )
+ # add plugins+libs to sys.path
+ map(working_set.add, distributions)
+ # display errors
+ print('Could not load', errors)
+
+ The `plugin_env` should be an ``Environment`` instance that contains
+ only distributions that are in the project's "plugin directory" or
+ directories. The `full_env`, if supplied, should be an ``Environment``
+ contains all currently-available distributions. If `full_env` is not
+ supplied, one is created automatically from the ``WorkingSet`` this
+ method is called on, which will typically mean that every directory on
+ ``sys.path`` will be scanned for distributions.
+
+ `installer` is a standard installer callback as used by the
+ ``resolve()`` method. The `fallback` flag indicates whether we should
+ attempt to resolve older versions of a plugin if the newest version
+ cannot be resolved.
+
+ This method returns a 2-tuple: (`distributions`, `error_info`), where
+ `distributions` is a list of the distributions found in `plugin_env`
+ that were loadable, along with any other distributions that are needed
+ to resolve their dependencies. `error_info` is a dictionary mapping
+ unloadable plugin distributions to an exception instance describing the
+ error that occurred. Usually this will be a ``DistributionNotFound`` or
+ ``VersionConflict`` instance.
+ """
+
+ plugin_projects = list(plugin_env)
+ # scan project names in alphabetic order
+ plugin_projects.sort()
+
+ error_info: dict[Distribution, Exception] = {}
+ distributions: dict[Distribution, Exception | None] = {}
+
+ if full_env is None:
+ env = Environment(self.entries)
+ env += plugin_env
+ else:
+ env = full_env + plugin_env
+
+ shadow_set = self.__class__([])
+ # put all our entries in shadow_set
+ list(map(shadow_set.add, self))
+
+ for project_name in plugin_projects:
+ for dist in plugin_env[project_name]:
+ req = [dist.as_requirement()]
+
+ try:
+ resolvees = shadow_set.resolve(req, env, installer)
+
+ except ResolutionError as v:
+ # save error info
+ error_info[dist] = v
+ if fallback:
+ # try the next older version of project
+ continue
+ else:
+ # give up on this project, keep going
+ break
+
+ else:
+ list(map(shadow_set.add, resolvees))
+ distributions.update(dict.fromkeys(resolvees))
+
+ # success, no need to try any more versions of this project
+ break
+
+ sorted_distributions = list(distributions)
+ sorted_distributions.sort()
+
+ return sorted_distributions, error_info
+
+ def require(self, *requirements: _NestedStr):
+ """Ensure that distributions matching `requirements` are activated
+
+ `requirements` must be a string or a (possibly-nested) sequence
+ thereof, specifying the distributions and versions required. The
+ return value is a sequence of the distributions that needed to be
+ activated to fulfill the requirements; all relevant distributions are
+ included, even if they were already activated in this working set.
+ """
+ needed = self.resolve(parse_requirements(requirements))
+
+ for dist in needed:
+ self.add(dist)
+
+ return needed
+
+ def subscribe(
+ self, callback: Callable[[Distribution], object], existing: bool = True
+ ):
+ """Invoke `callback` for all distributions
+
+ If `existing=True` (default),
+ call on all existing ones, as well.
+ """
+ if callback in self.callbacks:
+ return
+ self.callbacks.append(callback)
+ if not existing:
+ return
+ for dist in self:
+ callback(dist)
+
+ def _added_new(self, dist):
+ for callback in self.callbacks:
+ callback(dist)
+
+ def __getstate__(self):
+ return (
+ self.entries[:],
+ self.entry_keys.copy(),
+ self.by_key.copy(),
+ self.normalized_to_canonical_keys.copy(),
+ self.callbacks[:],
+ )
+
+ def __setstate__(self, e_k_b_n_c):
+ entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c
+ self.entries = entries[:]
+ self.entry_keys = keys.copy()
+ self.by_key = by_key.copy()
+ self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy()
+ self.callbacks = callbacks[:]
+
+
+class _ReqExtras(Dict["Requirement", Tuple[str, ...]]):
+ """
+ Map each requirement to the extras that demanded it.
+ """
+
+ def markers_pass(self, req: Requirement, extras: tuple[str, ...] | None = None):
+ """
+ Evaluate markers for req against each extra that
+ demanded it.
+
+ Return False if the req has a marker and fails
+ evaluation. Otherwise, return True.
+ """
+ extra_evals = (
+ req.marker.evaluate({'extra': extra})
+ for extra in self.get(req, ()) + (extras or (None,))
+ )
+ return not req.marker or any(extra_evals)
+
+
+class Environment:
+ """Searchable snapshot of distributions on a search path"""
+
+ def __init__(
+ self,
+ search_path: Iterable[str] | None = None,
+ platform: str | None = get_supported_platform(),
+ python: str | None = PY_MAJOR,
+ ):
+ """Snapshot distributions available on a search path
+
+ Any distributions found on `search_path` are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used.
+
+ `platform` is an optional string specifying the name of the platform
+ that platform-specific distributions must be compatible with. If
+ unspecified, it defaults to the current platform. `python` is an
+ optional string naming the desired version of Python (e.g. ``'3.6'``);
+ it defaults to the current version.
+
+ You may explicitly set `platform` (and/or `python`) to ``None`` if you
+ wish to map *all* distributions, not just those compatible with the
+ running platform or Python version.
+ """
+ self._distmap = {}
+ self.platform = platform
+ self.python = python
+ self.scan(search_path)
+
+ def can_add(self, dist: Distribution):
+ """Is distribution `dist` acceptable for this environment?
+
+ The distribution must match the platform and python version
+ requirements specified when this environment was created, or False
+ is returned.
+ """
+ py_compat = (
+ self.python is None
+ or dist.py_version is None
+ or dist.py_version == self.python
+ )
+ return py_compat and compatible_platforms(dist.platform, self.platform)
+
+ def remove(self, dist: Distribution):
+ """Remove `dist` from the environment"""
+ self._distmap[dist.key].remove(dist)
+
+ def scan(self, search_path: Iterable[str] | None = None):
+ """Scan `search_path` for distributions usable in this environment
+
+ Any distributions found are added to the environment.
+ `search_path` should be a sequence of ``sys.path`` items. If not
+ supplied, ``sys.path`` is used. Only distributions conforming to
+ the platform/python version defined at initialization are added.
+ """
+ if search_path is None:
+ search_path = sys.path
+
+ for item in search_path:
+ for dist in find_distributions(item):
+ self.add(dist)
+
+ def __getitem__(self, project_name: str) -> list[Distribution]:
+ """Return a newest-to-oldest list of distributions for `project_name`
+
+ Uses case-insensitive `project_name` comparison, assuming all the
+ project's distributions use their project's name converted to all
+ lowercase as their key.
+
+ """
+ distribution_key = project_name.lower()
+ return self._distmap.get(distribution_key, [])
+
+ def add(self, dist: Distribution):
+ """Add `dist` if we ``can_add()`` it and it has not already been added"""
+ if self.can_add(dist) and dist.has_version():
+ dists = self._distmap.setdefault(dist.key, [])
+ if dist not in dists:
+ dists.append(dist)
+ dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+ @overload
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: _InstallerTypeT[_DistributionT],
+ replace_conflicting: bool = False,
+ ) -> _DistributionT: ...
+ @overload
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: _InstallerType | None = None,
+ replace_conflicting: bool = False,
+ ) -> Distribution | None: ...
+ def best_match(
+ self,
+ req: Requirement,
+ working_set: WorkingSet,
+ installer: _InstallerType | None | _InstallerTypeT[_DistributionT] = None,
+ replace_conflicting: bool = False,
+ ) -> Distribution | None:
+ """Find distribution best matching `req` and usable on `working_set`
+
+ This calls the ``find(req)`` method of the `working_set` to see if a
+ suitable distribution is already active. (This may raise
+ ``VersionConflict`` if an unsuitable version of the project is already
+ active in the specified `working_set`.) If a suitable distribution
+ isn't active, this method returns the newest distribution in the
+ environment that meets the ``Requirement`` in `req`. If no suitable
+ distribution is found, and `installer` is supplied, then the result of
+ calling the environment's ``obtain(req, installer)`` method will be
+ returned.
+ """
+ try:
+ dist = working_set.find(req)
+ except VersionConflict:
+ if not replace_conflicting:
+ raise
+ dist = None
+ if dist is not None:
+ return dist
+ for dist in self[req.key]:
+ if dist in req:
+ return dist
+ # try to download/install
+ return self.obtain(req, installer)
+
+ @overload
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: _InstallerTypeT[_DistributionT],
+ ) -> _DistributionT: ...
+ @overload
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: Callable[[Requirement], None] | None = None,
+ ) -> None: ...
+ @overload
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: _InstallerType | None = None,
+ ) -> Distribution | None: ...
+ def obtain(
+ self,
+ requirement: Requirement,
+ installer: Callable[[Requirement], None]
+ | _InstallerType
+ | None
+ | _InstallerTypeT[_DistributionT] = None,
+ ) -> Distribution | None:
+ """Obtain a distribution matching `requirement` (e.g. via download)
+
+ Obtain a distro that matches requirement (e.g. via download). In the
+ base ``Environment`` class, this routine just returns
+ ``installer(requirement)``, unless `installer` is None, in which case
+ None is returned instead. This method is a hook that allows subclasses
+ to attempt other ways of obtaining a distribution before falling back
+ to the `installer` argument."""
+ return installer(requirement) if installer else None
+
+ def __iter__(self) -> Iterator[str]:
+ """Yield the unique project names of the available distributions"""
+ for key in self._distmap.keys():
+ if self[key]:
+ yield key
+
+ def __iadd__(self, other: Distribution | Environment):
+ """In-place addition of a distribution or environment"""
+ if isinstance(other, Distribution):
+ self.add(other)
+ elif isinstance(other, Environment):
+ for project in other:
+ for dist in other[project]:
+ self.add(dist)
+ else:
+ raise TypeError("Can't add %r to environment" % (other,))
+ return self
+
+ def __add__(self, other: Distribution | Environment):
+ """Add an environment or distribution to an environment"""
+ new = self.__class__([], platform=None, python=None)
+ for env in self, other:
+ new += env
+ return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+ """An error occurred extracting a resource
+
+ The following attributes are available from instances of this exception:
+
+ manager
+ The resource manager that raised this exception
+
+ cache_path
+ The base directory for resource extraction
+
+ original_error
+ The exception instance that caused extraction to fail
+ """
+
+ manager: ResourceManager
+ cache_path: str
+ original_error: BaseException | None
+
+
+class ResourceManager:
+ """Manage resource extraction and packages"""
+
+ extraction_path: str | None = None
+
+ def __init__(self):
+ self.cached_files = {}
+
+ def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
+ """Does the named resource exist?"""
+ return get_provider(package_or_requirement).has_resource(resource_name)
+
+ def resource_isdir(self, package_or_requirement: _PkgReqType, resource_name: str):
+ """Is the named resource an existing directory?"""
+ return get_provider(package_or_requirement).resource_isdir(resource_name)
+
+ def resource_filename(
+ self, package_or_requirement: _PkgReqType, resource_name: str
+ ):
+ """Return a true filesystem path for specified resource"""
+ return get_provider(package_or_requirement).get_resource_filename(
+ self, resource_name
+ )
+
+ def resource_stream(self, package_or_requirement: _PkgReqType, resource_name: str):
+ """Return a readable file-like object for specified resource"""
+ return get_provider(package_or_requirement).get_resource_stream(
+ self, resource_name
+ )
+
+ def resource_string(
+ self, package_or_requirement: _PkgReqType, resource_name: str
+ ) -> bytes:
+ """Return specified resource as :obj:`bytes`"""
+ return get_provider(package_or_requirement).get_resource_string(
+ self, resource_name
+ )
+
+ def resource_listdir(self, package_or_requirement: _PkgReqType, resource_name: str):
+ """List the contents of the named resource directory"""
+ return get_provider(package_or_requirement).resource_listdir(resource_name)
+
+ def extraction_error(self) -> NoReturn:
+ """Give an error message for problems extracting file(s)"""
+
+ old_exc = sys.exc_info()[1]
+ cache_path = self.extraction_path or get_default_cache()
+
+ tmpl = textwrap.dedent(
+ """
+ Can't extract file(s) to egg cache
+
+ The following error occurred while trying to extract file(s)
+ to the Python egg cache:
+
+ {old_exc}
+
+ The Python egg cache directory is currently set to:
+
+ {cache_path}
+
+ Perhaps your account does not have write access to this directory?
+ You can change the cache directory by setting the PYTHON_EGG_CACHE
+ environment variable to point to an accessible directory.
+ """
+ ).lstrip()
+ err = ExtractionError(tmpl.format(**locals()))
+ err.manager = self
+ err.cache_path = cache_path
+ err.original_error = old_exc
+ raise err
+
+ def get_cache_path(self, archive_name: str, names: Iterable[StrPath] = ()):
+ """Return absolute location in cache for `archive_name` and `names`
+
+ The parent directory of the resulting path will be created if it does
+ not already exist. `archive_name` should be the base filename of the
+ enclosing egg (which may not be the name of the enclosing zipfile!),
+ including its ".egg" extension. `names`, if provided, should be a
+ sequence of path name parts "under" the egg's extraction location.
+
+ This method should only be called by resource providers that need to
+ obtain an extraction location, and only for names they intend to
+ extract, as it tracks the generated names for possible cleanup later.
+ """
+ extract_path = self.extraction_path or get_default_cache()
+ target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
+ try:
+ _bypass_ensure_directory(target_path)
+ except Exception:
+ self.extraction_error()
+
+ self._warn_unsafe_extraction_path(extract_path)
+
+ self.cached_files[target_path] = True
+ return target_path
+
+ @staticmethod
+ def _warn_unsafe_extraction_path(path):
+ """
+ If the default extraction path is overridden and set to an insecure
+ location, such as /tmp, it opens up an opportunity for an attacker to
+ replace an extracted file with an unauthorized payload. Warn the user
+ if a known insecure location is used.
+
+ See Distribute #375 for more details.
+ """
+ if os.name == 'nt' and not path.startswith(os.environ['windir']):
+ # On Windows, permissions are generally restrictive by default
+ # and temp directories are not writable by other users, so
+ # bypass the warning.
+ return
+ mode = os.stat(path).st_mode
+ if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+ msg = (
+ "Extraction path is writable by group/others "
+ "and vulnerable to attack when "
+ "used with get_resource_filename ({path}). "
+ "Consider a more secure "
+ "location (set with .set_extraction_path or the "
+ "PYTHON_EGG_CACHE environment variable)."
+ ).format(**locals())
+ warnings.warn(msg, UserWarning)
+
+ def postprocess(self, tempname: StrOrBytesPath, filename: StrOrBytesPath):
+ """Perform any platform-specific postprocessing of `tempname`
+
+ This is where Mac header rewrites should be done; other platforms don't
+ have anything special they should do.
+
+ Resource providers should call this method ONLY after successfully
+ extracting a compressed resource. They must NOT call it on resources
+ that are already in the filesystem.
+
+ `tempname` is the current (temporary) name of the file, and `filename`
+ is the name it will be renamed to by the caller after this routine
+ returns.
+ """
+
+ if os.name == 'posix':
+ # Make the resource executable
+ mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+ os.chmod(tempname, mode)
+
+ def set_extraction_path(self, path: str):
+ """Set the base path where resources will be extracted to, if needed.
+
+ If you do not call this routine before any extractions take place, the
+ path defaults to the return value of ``get_default_cache()``. (Which
+ is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+ platform-specific fallbacks. See that routine's documentation for more
+ details.)
+
+ Resources are extracted to subdirectories of this path based upon
+ information given by the ``IResourceProvider``. You may set this to a
+ temporary directory, but then you must call ``cleanup_resources()`` to
+ delete the extracted files when done. There is no guarantee that
+ ``cleanup_resources()`` will be able to remove all extracted files.
+
+ (Note: you may not change the extraction path for a given resource
+ manager once resources have been extracted, unless you first call
+ ``cleanup_resources()``.)
+ """
+ if self.cached_files:
+ raise ValueError("Can't change extraction path, files already extracted")
+
+ self.extraction_path = path
+
+ def cleanup_resources(self, force: bool = False) -> list[str]:
+ """
+ Delete all extracted resource files and directories, returning a list
+ of the file and directory names that could not be successfully removed.
+ This function does not have any concurrency protection, so it should
+ generally only be called when the extraction path is a temporary
+ directory exclusive to a single process. This method is not
+ automatically called; you must call it explicitly or register it as an
+ ``atexit`` function if you wish to ensure cleanup of a temporary
+ directory used for extractions.
+ """
+ # XXX
+ return []
+
+
+def get_default_cache() -> str:
+ """
+ Return the ``PYTHON_EGG_CACHE`` environment variable
+ or a platform-relevant user cache dir for an app
+ named "Python-Eggs".
+ """
+ return os.environ.get('PYTHON_EGG_CACHE') or _user_cache_dir(appname='Python-Eggs')
+
+
+def safe_name(name: str):
+ """Convert an arbitrary string to a standard distribution name
+
+ Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+ """
+ return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version: str):
+ """
+ Convert an arbitrary string to a standard version string
+ """
+ try:
+ # normalize the version
+ return str(_packaging_version.Version(version))
+ except _packaging_version.InvalidVersion:
+ version = version.replace(' ', '.')
+ return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def _forgiving_version(version):
+ """Fallback when ``safe_version`` is not safe enough
+ >>> parse_version(_forgiving_version('0.23ubuntu1'))
+
+ >>> parse_version(_forgiving_version('0.23-'))
+
+ >>> parse_version(_forgiving_version('0.-_'))
+
+ >>> parse_version(_forgiving_version('42.+?1'))
+
+ >>> parse_version(_forgiving_version('hello world'))
+
+ """
+ version = version.replace(' ', '.')
+ match = _PEP440_FALLBACK.search(version)
+ if match:
+ safe = match["safe"]
+ rest = version[len(safe) :]
+ else:
+ safe = "0"
+ rest = version
+ local = f"sanitized.{_safe_segment(rest)}".strip(".")
+ return f"{safe}.dev0+{local}"
+
+
+def _safe_segment(segment):
+ """Convert an arbitrary string into a safe segment"""
+ segment = re.sub('[^A-Za-z0-9.]+', '-', segment)
+ segment = re.sub('-[^A-Za-z0-9]+', '-', segment)
+ return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-")
+
+
+def safe_extra(extra: str):
+ """Convert an arbitrary string to a standard 'extra' name
+
+ Any runs of non-alphanumeric characters are replaced with a single '_',
+ and the result is always lowercased.
+ """
+ return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
+
+
+def to_filename(name: str):
+ """Convert a project or version name to its filename-escaped form
+
+ Any '-' characters are currently replaced with '_'.
+ """
+ return name.replace('-', '_')
+
+
+def invalid_marker(text: str):
+ """
+ Validate text as a PEP 508 environment marker; return an exception
+ if invalid or False otherwise.
+ """
+ try:
+ evaluate_marker(text)
+ except SyntaxError as e:
+ e.filename = None
+ e.lineno = None
+ return e
+ return False
+
+
+def evaluate_marker(text: str, extra: str | None = None) -> bool:
+ """
+ Evaluate a PEP 508 environment marker.
+ Return a boolean indicating the marker result in this environment.
+ Raise SyntaxError if marker is invalid.
+
+ This implementation uses the 'pyparsing' module.
+ """
+ try:
+ marker = _packaging_markers.Marker(text)
+ return marker.evaluate()
+ except _packaging_markers.InvalidMarker as e:
+ raise SyntaxError(e) from e
+
+
+class NullProvider:
+ """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+ egg_name: str | None = None
+ egg_info: str | None = None
+ loader: _LoaderProtocol | None = None
+
+ def __init__(self, module: _ModuleLike):
+ self.loader = getattr(module, '__loader__', None)
+ self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
+ return self._fn(self.module_path, resource_name)
+
+ def get_resource_stream(self, manager: ResourceManager, resource_name: str):
+ return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+ def get_resource_string(
+ self, manager: ResourceManager, resource_name: str
+ ) -> bytes:
+ return self._get(self._fn(self.module_path, resource_name))
+
+ def has_resource(self, resource_name: str):
+ return self._has(self._fn(self.module_path, resource_name))
+
+ def _get_metadata_path(self, name):
+ return self._fn(self.egg_info, name)
+
+ def has_metadata(self, name: str) -> bool:
+ if not self.egg_info:
+ return False
+
+ path = self._get_metadata_path(name)
+ return self._has(path)
+
+ def get_metadata(self, name: str):
+ if not self.egg_info:
+ return ""
+ path = self._get_metadata_path(name)
+ value = self._get(path)
+ try:
+ return value.decode('utf-8')
+ except UnicodeDecodeError as exc:
+ # Include the path in the error message to simplify
+ # troubleshooting, and without changing the exception type.
+ exc.reason += ' in {} file at path: {}'.format(name, path)
+ raise
+
+ def get_metadata_lines(self, name: str) -> Iterator[str]:
+ return yield_lines(self.get_metadata(name))
+
+ def resource_isdir(self, resource_name: str):
+ return self._isdir(self._fn(self.module_path, resource_name))
+
+ def metadata_isdir(self, name: str) -> bool:
+ return bool(self.egg_info and self._isdir(self._fn(self.egg_info, name)))
+
+ def resource_listdir(self, resource_name: str):
+ return self._listdir(self._fn(self.module_path, resource_name))
+
+ def metadata_listdir(self, name: str) -> list[str]:
+ if self.egg_info:
+ return self._listdir(self._fn(self.egg_info, name))
+ return []
+
+ def run_script(self, script_name: str, namespace: dict[str, Any]):
+ script = 'scripts/' + script_name
+ if not self.has_metadata(script):
+ raise ResolutionError(
+ "Script {script!r} not found in metadata at {self.egg_info!r}".format(
+ **locals()
+ ),
+ )
+
+ script_text = self.get_metadata(script).replace('\r\n', '\n')
+ script_text = script_text.replace('\r', '\n')
+ script_filename = self._fn(self.egg_info, script)
+ namespace['__file__'] = script_filename
+ if os.path.exists(script_filename):
+ source = _read_utf8_with_fallback(script_filename)
+ code = compile(source, script_filename, 'exec')
+ exec(code, namespace, namespace)
+ else:
+ from linecache import cache
+
+ cache[script_filename] = (
+ len(script_text),
+ 0,
+ script_text.split('\n'),
+ script_filename,
+ )
+ script_code = compile(script_text, script_filename, 'exec')
+ exec(script_code, namespace, namespace)
+
+ def _has(self, path) -> bool:
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _isdir(self, path) -> bool:
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _listdir(self, path) -> list[str]:
+ raise NotImplementedError(
+ "Can't perform this operation for unregistered loader type"
+ )
+
+ def _fn(self, base: str | None, resource_name: str):
+ if base is None:
+ raise TypeError(
+ "`base` parameter in `_fn` is `None`. Either override this method or check the parameter first."
+ )
+ self._validate_resource_path(resource_name)
+ if resource_name:
+ return os.path.join(base, *resource_name.split('/'))
+ return base
+
+ @staticmethod
+ def _validate_resource_path(path):
+ """
+ Validate the resource paths according to the docs.
+ https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access
+
+ >>> warned = getfixture('recwarn')
+ >>> warnings.simplefilter('always')
+ >>> vrp = NullProvider._validate_resource_path
+ >>> vrp('foo/bar.txt')
+ >>> bool(warned)
+ False
+ >>> vrp('../foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('/foo/bar.txt')
+ >>> bool(warned)
+ True
+ >>> vrp('foo/../../bar.txt')
+ >>> bool(warned)
+ True
+ >>> warned.clear()
+ >>> vrp('foo/f../bar.txt')
+ >>> bool(warned)
+ False
+
+ Windows path separators are straight-up disallowed.
+ >>> vrp(r'\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ >>> vrp(r'C:\\foo/bar.txt')
+ Traceback (most recent call last):
+ ...
+ ValueError: Use of .. or absolute path in a resource path \
+is not allowed.
+
+ Blank values are allowed
+
+ >>> vrp('')
+ >>> bool(warned)
+ False
+
+ Non-string values are not.
+
+ >>> vrp(None)
+ Traceback (most recent call last):
+ ...
+ AttributeError: ...
+ """
+ invalid = (
+ os.path.pardir in path.split(posixpath.sep)
+ or posixpath.isabs(path)
+ or ntpath.isabs(path)
+ or path.startswith("\\")
+ )
+ if not invalid:
+ return
+
+ msg = "Use of .. or absolute path in a resource path is not allowed."
+
+ # Aggressively disallow Windows absolute paths
+ if (path.startswith("\\") or ntpath.isabs(path)) and not posixpath.isabs(path):
+ raise ValueError(msg)
+
+ # for compatibility, warn; in future
+ # raise ValueError(msg)
+ issue_warning(
+ msg[:-1] + " and will raise exceptions in a future release.",
+ DeprecationWarning,
+ )
+
+ def _get(self, path) -> bytes:
+ if hasattr(self.loader, 'get_data') and self.loader:
+ # Already checked get_data exists
+ return self.loader.get_data(path) # type: ignore[attr-defined]
+ raise NotImplementedError(
+ "Can't perform this operation for loaders without 'get_data()'"
+ )
+
+
+register_loader_type(object, NullProvider)
+
+
+def _parents(path):
+ """
+ yield all parents of path including path
+ """
+ last = None
+ while path != last:
+ yield path
+ last = path
+ path, _ = os.path.split(path)
+
+
+class EggProvider(NullProvider):
+ """Provider based on a virtual filesystem"""
+
+ def __init__(self, module: _ModuleLike):
+ super().__init__(module)
+ self._setup_prefix()
+
+ def _setup_prefix(self):
+ # Assume that metadata may be nested inside a "basket"
+ # of multiple eggs and use module_path instead of .archive.
+ eggs = filter(_is_egg_path, _parents(self.module_path))
+ egg = next(eggs, None)
+ egg and self._set_egg(egg)
+
+ def _set_egg(self, path: str):
+ self.egg_name = os.path.basename(path)
+ self.egg_info = os.path.join(path, 'EGG-INFO')
+ self.egg_root = path
+
+
+class DefaultProvider(EggProvider):
+ """Provides access to package resources in the filesystem"""
+
+ def _has(self, path) -> bool:
+ return os.path.exists(path)
+
+ def _isdir(self, path) -> bool:
+ return os.path.isdir(path)
+
+ def _listdir(self, path):
+ return os.listdir(path)
+
+ def get_resource_stream(self, manager: object, resource_name: str):
+ return open(self._fn(self.module_path, resource_name), 'rb')
+
+ def _get(self, path) -> bytes:
+ with open(path, 'rb') as stream:
+ return stream.read()
+
+ @classmethod
+ def _register(cls):
+ loader_names = (
+ 'SourceFileLoader',
+ 'SourcelessFileLoader',
+ )
+ for name in loader_names:
+ loader_cls = getattr(importlib.machinery, name, type(None))
+ register_loader_type(loader_cls, cls)
+
+
+DefaultProvider._register()
+
+
+class EmptyProvider(NullProvider):
+ """Provider that returns nothing for all requests"""
+
+ # A special case, we don't want all Providers inheriting from NullProvider to have a potentially None module_path
+ module_path: str | None = None # type: ignore[assignment]
+
+ _isdir = _has = lambda self, path: False
+
+ def _get(self, path) -> bytes:
+ return b''
+
+ def _listdir(self, path):
+ return []
+
+ def __init__(self):
+ pass
+
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(Dict[str, "MemoizedZipManifests.manifest_mod"]):
+ """
+ zip manifest builder
+ """
+
+ # `path` could be `StrPath | IO[bytes]` but that violates the LSP for `MemoizedZipManifests.load`
+ @classmethod
+ def build(cls, path: str):
+ """
+ Build a dictionary similar to the zipimport directory
+ caches, except instead of tuples, store ZipInfo objects.
+
+ Use a platform-specific path separator (os.sep) for the path keys
+ for compatibility with pypy on Windows.
+ """
+ with zipfile.ZipFile(path) as zfile:
+ items = (
+ (
+ name.replace('/', os.sep),
+ zfile.getinfo(name),
+ )
+ for name in zfile.namelist()
+ )
+ return dict(items)
+
+ load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+ """
+ Memoized zipfile manifests.
+ """
+
+ class manifest_mod(NamedTuple):
+ manifest: dict[str, zipfile.ZipInfo]
+ mtime: float
+
+ def load(self, path: str) -> dict[str, zipfile.ZipInfo]: # type: ignore[override] # ZipManifests.load is a classmethod
+ """
+ Load a manifest at path or return a suitable manifest already loaded.
+ """
+ path = os.path.normpath(path)
+ mtime = os.stat(path).st_mtime
+
+ if path not in self or self[path].mtime != mtime:
+ manifest = self.build(path)
+ self[path] = self.manifest_mod(manifest, mtime)
+
+ return self[path].manifest
+
+
+class ZipProvider(EggProvider):
+ """Resource support for zips and eggs"""
+
+ eagers: list[str] | None = None
+ _zip_manifests = MemoizedZipManifests()
+ # ZipProvider's loader should always be a zipimporter or equivalent
+ loader: zipimport.zipimporter
+
+ def __init__(self, module: _ZipLoaderModule):
+ super().__init__(module)
+ self.zip_pre = self.loader.archive + os.sep
+
+ def _zipinfo_name(self, fspath):
+ # Convert a virtual filename (full path to file) into a zipfile subpath
+ # usable with the zipimport directory cache for our target archive
+ fspath = fspath.rstrip(os.sep)
+ if fspath == self.loader.archive:
+ return ''
+ if fspath.startswith(self.zip_pre):
+ return fspath[len(self.zip_pre) :]
+ raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre))
+
+ def _parts(self, zip_path):
+ # Convert a zipfile subpath into an egg-relative path part list.
+ # pseudo-fs path
+ fspath = self.zip_pre + zip_path
+ if fspath.startswith(self.egg_root + os.sep):
+ return fspath[len(self.egg_root) + 1 :].split(os.sep)
+ raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root))
+
+ @property
+ def zipinfo(self):
+ return self._zip_manifests.load(self.loader.archive)
+
+ def get_resource_filename(self, manager: ResourceManager, resource_name: str):
+ if not self.egg_name:
+ raise NotImplementedError(
+ "resource_filename() only supported for .egg, not .zip"
+ )
+ # no need to lock for extraction, since we use temp names
+ zip_path = self._resource_to_zip(resource_name)
+ eagers = self._get_eager_resources()
+ if '/'.join(self._parts(zip_path)) in eagers:
+ for name in eagers:
+ self._extract_resource(manager, self._eager_to_zip(name))
+ return self._extract_resource(manager, zip_path)
+
+ @staticmethod
+ def _get_date_and_size(zip_stat):
+ size = zip_stat.file_size
+ # ymdhms+wday, yday, dst
+ date_time = zip_stat.date_time + (0, 0, -1)
+ # 1980 offset already done
+ timestamp = time.mktime(date_time)
+ return timestamp, size
+
+ # FIXME: 'ZipProvider._extract_resource' is too complex (12)
+ def _extract_resource(self, manager: ResourceManager, zip_path) -> str: # noqa: C901
+ if zip_path in self._index():
+ for name in self._index()[zip_path]:
+ last = self._extract_resource(manager, os.path.join(zip_path, name))
+ # return the extracted directory name
+ return os.path.dirname(last)
+
+ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+ if not WRITE_SUPPORT:
+ raise OSError(
+ '"os.rename" and "os.unlink" are not supported on this platform'
+ )
+ try:
+ if not self.egg_name:
+ raise OSError(
+ '"egg_name" is empty. This likely means no egg could be found from the "module_path".'
+ )
+ real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path))
+
+ if self._is_current(real_path, zip_path):
+ return real_path
+
+ outf, tmpnam = _mkstemp(
+ ".$extract",
+ dir=os.path.dirname(real_path),
+ )
+ os.write(outf, self.loader.get_data(zip_path))
+ os.close(outf)
+ utime(tmpnam, (timestamp, timestamp))
+ manager.postprocess(tmpnam, real_path)
+
+ try:
+ rename(tmpnam, real_path)
+
+ except OSError:
+ if os.path.isfile(real_path):
+ if self._is_current(real_path, zip_path):
+ # the file became current since it was checked above,
+ # so proceed.
+ return real_path
+ # Windows, del old file and retry
+ elif os.name == 'nt':
+ unlink(real_path)
+ rename(tmpnam, real_path)
+ return real_path
+ raise
+
+ except OSError:
+ # report a user-friendly error
+ manager.extraction_error()
+
+ return real_path
+
+ def _is_current(self, file_path, zip_path):
+ """
+ Return True if the file_path is current for this zip_path
+ """
+ timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+ if not os.path.isfile(file_path):
+ return False
+ stat = os.stat(file_path)
+ if stat.st_size != size or stat.st_mtime != timestamp:
+ return False
+ # check that the contents match
+ zip_contents = self.loader.get_data(zip_path)
+ with open(file_path, 'rb') as f:
+ file_contents = f.read()
+ return zip_contents == file_contents
+
+ def _get_eager_resources(self):
+ if self.eagers is None:
+ eagers = []
+ for name in ('native_libs.txt', 'eager_resources.txt'):
+ if self.has_metadata(name):
+ eagers.extend(self.get_metadata_lines(name))
+ self.eagers = eagers
+ return self.eagers
+
+ def _index(self):
+ try:
+ return self._dirindex
+ except AttributeError:
+ ind = {}
+ for path in self.zipinfo:
+ parts = path.split(os.sep)
+ while parts:
+ parent = os.sep.join(parts[:-1])
+ if parent in ind:
+ ind[parent].append(parts[-1])
+ break
+ else:
+ ind[parent] = [parts.pop()]
+ self._dirindex = ind
+ return ind
+
+ def _has(self, fspath) -> bool:
+ zip_path = self._zipinfo_name(fspath)
+ return zip_path in self.zipinfo or zip_path in self._index()
+
+ def _isdir(self, fspath) -> bool:
+ return self._zipinfo_name(fspath) in self._index()
+
+ def _listdir(self, fspath):
+ return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+ def _eager_to_zip(self, resource_name: str):
+ return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+ def _resource_to_zip(self, resource_name: str):
+ return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+ """Metadata handler for standalone PKG-INFO files
+
+ Usage::
+
+ metadata = FileMetadata("/path/to/PKG-INFO")
+
+ This provider rejects all data and metadata requests except for PKG-INFO,
+ which is treated as existing, and will be the contents of the file at
+ the provided location.
+ """
+
+ def __init__(self, path: StrPath):
+ self.path = path
+
+ def _get_metadata_path(self, name):
+ return self.path
+
+ def has_metadata(self, name: str) -> bool:
+ return name == 'PKG-INFO' and os.path.isfile(self.path)
+
+ def get_metadata(self, name: str):
+ if name != 'PKG-INFO':
+ raise KeyError("No metadata except PKG-INFO is available")
+
+ with open(self.path, encoding='utf-8', errors="replace") as f:
+ metadata = f.read()
+ self._warn_on_replacement(metadata)
+ return metadata
+
+ def _warn_on_replacement(self, metadata):
+ replacement_char = '�'
+ if replacement_char in metadata:
+ tmpl = "{self.path} could not be properly decoded in UTF-8"
+ msg = tmpl.format(**locals())
+ warnings.warn(msg)
+
+ def get_metadata_lines(self, name: str) -> Iterator[str]:
+ return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+ """Metadata provider for egg directories
+
+ Usage::
+
+ # Development eggs:
+
+ egg_info = "/path/to/PackageName.egg-info"
+ base_dir = os.path.dirname(egg_info)
+ metadata = PathMetadata(base_dir, egg_info)
+ dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+ dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+ # Unpacked egg directories:
+
+ egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+ dist = Distribution.from_filename(egg_path, metadata=metadata)
+ """
+
+ def __init__(self, path: str, egg_info: str):
+ self.module_path = path
+ self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+ """Metadata provider for .egg files"""
+
+ def __init__(self, importer: zipimport.zipimporter):
+ """Create a metadata provider from a zipimporter"""
+
+ self.zip_pre = importer.archive + os.sep
+ self.loader = importer
+ if importer.prefix:
+ self.module_path = os.path.join(importer.archive, importer.prefix)
+ else:
+ self.module_path = importer.archive
+ self._setup_prefix()
+
+
+_distribution_finders: dict[type, _DistFinderType[Any]] = _declare_state(
+ 'dict', '_distribution_finders', {}
+)
+
+
+def register_finder(importer_type: type[_T], distribution_finder: _DistFinderType[_T]):
+ """Register `distribution_finder` to find distributions in sys.path items
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `distribution_finder` is a callable that, passed a path
+ item and the importer instance, yields ``Distribution`` instances found on
+ that path item. See ``pkg_resources.find_on_path`` for an example."""
+ _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item: str, only: bool = False):
+ """Yield distributions accessible via `path_item`"""
+ importer = get_importer(path_item)
+ finder = _find_adapter(_distribution_finders, importer)
+ return finder(importer, path_item, only)
+
+
+def find_eggs_in_zip(
+ importer: zipimport.zipimporter, path_item: str, only: bool = False
+) -> Iterator[Distribution]:
+ """
+ Find eggs in zip files; possibly multiple nested eggs.
+ """
+ if importer.archive.endswith('.whl'):
+ # wheels are not supported with this finder
+ # they don't have PKG-INFO metadata, and won't ever contain eggs
+ return
+ metadata = EggMetadata(importer)
+ if metadata.has_metadata('PKG-INFO'):
+ yield Distribution.from_filename(path_item, metadata=metadata)
+ if only:
+ # don't yield nested distros
+ return
+ for subitem in metadata.resource_listdir(''):
+ if _is_egg_path(subitem):
+ subpath = os.path.join(path_item, subitem)
+ dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath)
+ yield from dists
+ elif subitem.lower().endswith(('.dist-info', '.egg-info')):
+ subpath = os.path.join(path_item, subitem)
+ submeta = EggMetadata(zipimport.zipimporter(subpath))
+ submeta.egg_info = subpath
+ yield Distribution.from_location(path_item, subitem, submeta)
+
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+
+def find_nothing(
+ importer: object | None, path_item: str | None, only: bool | None = False
+):
+ return ()
+
+
+register_finder(object, find_nothing)
+
+
+def find_on_path(importer: object | None, path_item, only=False):
+ """Yield distributions accessible on a sys.path directory"""
+ path_item = _normalize_cached(path_item)
+
+ if _is_unpacked_egg(path_item):
+ yield Distribution.from_filename(
+ path_item,
+ metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')),
+ )
+ return
+
+ entries = (os.path.join(path_item, child) for child in safe_listdir(path_item))
+
+ # scan for .egg and .egg-info in directory
+ for entry in sorted(entries):
+ fullpath = os.path.join(path_item, entry)
+ factory = dist_factory(path_item, entry, only)
+ yield from factory(fullpath)
+
+
+def dist_factory(path_item, entry, only):
+ """Return a dist_factory for the given entry."""
+ lower = entry.lower()
+ is_egg_info = lower.endswith('.egg-info')
+ is_dist_info = lower.endswith('.dist-info') and os.path.isdir(
+ os.path.join(path_item, entry)
+ )
+ is_meta = is_egg_info or is_dist_info
+ return (
+ distributions_from_metadata
+ if is_meta
+ else find_distributions
+ if not only and _is_egg_path(entry)
+ else resolve_egg_link
+ if not only and lower.endswith('.egg-link')
+ else NoDists()
+ )
+
+
+class NoDists:
+ """
+ >>> bool(NoDists())
+ False
+
+ >>> list(NoDists()('anything'))
+ []
+ """
+
+ def __bool__(self):
+ return False
+
+ def __call__(self, fullpath):
+ return iter(())
+
+
+def safe_listdir(path: StrOrBytesPath):
+ """
+ Attempt to list contents of path, but suppress some exceptions.
+ """
+ try:
+ return os.listdir(path)
+ except (PermissionError, NotADirectoryError):
+ pass
+ except OSError as e:
+ # Ignore the directory if does not exist, not a directory or
+ # permission denied
+ if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT):
+ raise
+ return ()
+
+
+def distributions_from_metadata(path: str):
+ root = os.path.dirname(path)
+ if os.path.isdir(path):
+ if len(os.listdir(path)) == 0:
+ # empty metadata dir; skip
+ return
+ metadata: _MetadataType = PathMetadata(root, path)
+ else:
+ metadata = FileMetadata(path)
+ entry = os.path.basename(path)
+ yield Distribution.from_location(
+ root,
+ entry,
+ metadata,
+ precedence=DEVELOP_DIST,
+ )
+
+
+def non_empty_lines(path):
+ """
+ Yield non-empty lines from file at path
+ """
+ for line in _read_utf8_with_fallback(path).splitlines():
+ line = line.strip()
+ if line:
+ yield line
+
+
+def resolve_egg_link(path):
+ """
+ Given a path to an .egg-link, resolve distributions
+ present in the referenced path.
+ """
+ referenced_paths = non_empty_lines(path)
+ resolved_paths = (
+ os.path.join(os.path.dirname(path), ref) for ref in referenced_paths
+ )
+ dist_groups = map(find_distributions, resolved_paths)
+ return next(dist_groups, ())
+
+
+if hasattr(pkgutil, 'ImpImporter'):
+ register_finder(pkgutil.ImpImporter, find_on_path)
+
+register_finder(importlib.machinery.FileFinder, find_on_path)
+
+_namespace_handlers: dict[type, _NSHandlerType[Any]] = _declare_state(
+ 'dict', '_namespace_handlers', {}
+)
+_namespace_packages: dict[str | None, list[str]] = _declare_state(
+ 'dict', '_namespace_packages', {}
+)
+
+
+def register_namespace_handler(
+ importer_type: type[_T], namespace_handler: _NSHandlerType[_T]
+):
+ """Register `namespace_handler` to declare namespace packages
+
+ `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+ handler), and `namespace_handler` is a callable like this::
+
+ def namespace_handler(importer, path_entry, moduleName, module):
+ # return a path_entry to use for child packages
+
+ Namespace handlers are only called if the importer object has already
+ agreed that it can handle the relevant path item, and they should only
+ return a subpath if the module __path__ does not already contain an
+ equivalent subpath. For an example namespace handler, see
+ ``pkg_resources.file_ns_handler``.
+ """
+ _namespace_handlers[importer_type] = namespace_handler
+
+
+def _handle_ns(packageName, path_item):
+ """Ensure that named package includes a subpath of path_item (if needed)"""
+
+ importer = get_importer(path_item)
+ if importer is None:
+ return None
+
+ # use find_spec (PEP 451) and fall-back to find_module (PEP 302)
+ try:
+ spec = importer.find_spec(packageName)
+ except AttributeError:
+ # capture warnings due to #1111
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ loader = importer.find_module(packageName)
+ else:
+ loader = spec.loader if spec else None
+
+ if loader is None:
+ return None
+ module = sys.modules.get(packageName)
+ if module is None:
+ module = sys.modules[packageName] = types.ModuleType(packageName)
+ module.__path__ = []
+ _set_parent_ns(packageName)
+ elif not hasattr(module, '__path__'):
+ raise TypeError("Not a package:", packageName)
+ handler = _find_adapter(_namespace_handlers, importer)
+ subpath = handler(importer, path_item, packageName, module)
+ if subpath is not None:
+ path = module.__path__
+ path.append(subpath)
+ importlib.import_module(packageName)
+ _rebuild_mod_path(path, packageName, module)
+ return subpath
+
+
+def _rebuild_mod_path(orig_path, package_name, module: types.ModuleType):
+ """
+ Rebuild module.__path__ ensuring that all entries are ordered
+ corresponding to their sys.path order
+ """
+ sys_path = [_normalize_cached(p) for p in sys.path]
+
+ def safe_sys_path_index(entry):
+ """
+ Workaround for #520 and #513.
+ """
+ try:
+ return sys_path.index(entry)
+ except ValueError:
+ return float('inf')
+
+ def position_in_sys_path(path):
+ """
+ Return the ordinal of the path based on its position in sys.path
+ """
+ path_parts = path.split(os.sep)
+ module_parts = package_name.count('.') + 1
+ parts = path_parts[:-module_parts]
+ return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
+
+ new_path = sorted(orig_path, key=position_in_sys_path)
+ new_path = [_normalize_cached(p) for p in new_path]
+
+ if isinstance(module.__path__, list):
+ module.__path__[:] = new_path
+ else:
+ module.__path__ = new_path
+
+
+def declare_namespace(packageName: str):
+ """Declare that package 'packageName' is a namespace package"""
+
+ msg = (
+ f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n"
+ "Implementing implicit namespace packages (as specified in PEP 420) "
+ "is preferred to `pkg_resources.declare_namespace`. "
+ "See https://setuptools.pypa.io/en/latest/references/"
+ "keywords.html#keyword-namespace-packages"
+ )
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+
+ _imp.acquire_lock()
+ try:
+ if packageName in _namespace_packages:
+ return
+
+ path: MutableSequence[str] = sys.path
+ parent, _, _ = packageName.rpartition('.')
+
+ if parent:
+ declare_namespace(parent)
+ if parent not in _namespace_packages:
+ __import__(parent)
+ try:
+ path = sys.modules[parent].__path__
+ except AttributeError as e:
+ raise TypeError("Not a package:", parent) from e
+
+ # Track what packages are namespaces, so when new path items are added,
+ # they can be updated
+ _namespace_packages.setdefault(parent or None, []).append(packageName)
+ _namespace_packages.setdefault(packageName, [])
+
+ for path_item in path:
+ # Ensure all the parent's path items are reflected in the child,
+ # if they apply
+ _handle_ns(packageName, path_item)
+
+ finally:
+ _imp.release_lock()
+
+
+def fixup_namespace_packages(path_item: str, parent: str | None = None):
+ """Ensure that previously-declared namespace packages include path_item"""
+ _imp.acquire_lock()
+ try:
+ for package in _namespace_packages.get(parent, ()):
+ subpath = _handle_ns(package, path_item)
+ if subpath:
+ fixup_namespace_packages(subpath, package)
+ finally:
+ _imp.release_lock()
+
+
+def file_ns_handler(
+ importer: object,
+ path_item: StrPath,
+ packageName: str,
+ module: types.ModuleType,
+):
+ """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+ subpath = os.path.join(path_item, packageName.split('.')[-1])
+ normalized = _normalize_cached(subpath)
+ for item in module.__path__:
+ if _normalize_cached(item) == normalized:
+ break
+ else:
+ # Only return the path if it's not already there
+ return subpath
+
+
+if hasattr(pkgutil, 'ImpImporter'):
+ register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+register_namespace_handler(importlib.machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(
+ importer: object,
+ path_item: str | None,
+ packageName: str | None,
+ module: _ModuleLike | None,
+):
+ return None
+
+
+register_namespace_handler(object, null_ns_handler)
+
+
+@overload
+def normalize_path(filename: StrPath) -> str: ...
+@overload
+def normalize_path(filename: BytesPath) -> bytes: ...
+def normalize_path(filename: StrOrBytesPath):
+ """Normalize a file/dir name for comparison purposes"""
+ return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename))))
+
+
+def _cygwin_patch(filename: StrOrBytesPath): # pragma: nocover
+ """
+ Contrary to POSIX 2008, on Cygwin, getcwd (3) contains
+ symlink components. Using
+ os.path.abspath() works around this limitation. A fix in os.getcwd()
+ would probably better, in Cygwin even more so, except
+ that this seems to be by design...
+ """
+ return os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+
+
+if TYPE_CHECKING:
+ # https://github.com/python/mypy/issues/16261
+ # https://github.com/python/typeshed/issues/6347
+ @overload
+ def _normalize_cached(filename: StrPath) -> str: ...
+ @overload
+ def _normalize_cached(filename: BytesPath) -> bytes: ...
+ def _normalize_cached(filename: StrOrBytesPath) -> str | bytes: ...
+else:
+
+ @functools.lru_cache(maxsize=None)
+ def _normalize_cached(filename):
+ return normalize_path(filename)
+
+
+def _is_egg_path(path):
+ """
+ Determine if given path appears to be an egg.
+ """
+ return _is_zip_egg(path) or _is_unpacked_egg(path)
+
+
+def _is_zip_egg(path):
+ return (
+ path.lower().endswith('.egg')
+ and os.path.isfile(path)
+ and zipfile.is_zipfile(path)
+ )
+
+
+def _is_unpacked_egg(path):
+ """
+ Determine if given path appears to be an unpacked egg.
+ """
+ return path.lower().endswith('.egg') and os.path.isfile(
+ os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ )
+
+
+def _set_parent_ns(packageName):
+ parts = packageName.split('.')
+ name = parts.pop()
+ if parts:
+ parent = '.'.join(parts)
+ setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+ r"""
+ (?P[^-]+) (
+ -(?P[^-]+) (
+ -py(?P[^-]+) (
+ -(?P.+)
+ )?
+ )?
+ )?
+ """,
+ re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint:
+ """Object representing an advertised importable object"""
+
+ def __init__(
+ self,
+ name: str,
+ module_name: str,
+ attrs: Iterable[str] = (),
+ extras: Iterable[str] = (),
+ dist: Distribution | None = None,
+ ):
+ if not MODULE(module_name):
+ raise ValueError("Invalid module name", module_name)
+ self.name = name
+ self.module_name = module_name
+ self.attrs = tuple(attrs)
+ self.extras = tuple(extras)
+ self.dist = dist
+
+ def __str__(self):
+ s = "%s = %s" % (self.name, self.module_name)
+ if self.attrs:
+ s += ':' + '.'.join(self.attrs)
+ if self.extras:
+ s += ' [%s]' % ','.join(self.extras)
+ return s
+
+ def __repr__(self):
+ return "EntryPoint.parse(%r)" % str(self)
+
+ @overload
+ def load(
+ self,
+ require: Literal[True] = True,
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ ) -> _ResolvedEntryPoint: ...
+ @overload
+ def load(
+ self,
+ require: Literal[False],
+ *args: Any,
+ **kwargs: Any,
+ ) -> _ResolvedEntryPoint: ...
+ def load(
+ self,
+ require: bool = True,
+ *args: Environment | _InstallerType | None,
+ **kwargs: Environment | _InstallerType | None,
+ ) -> _ResolvedEntryPoint:
+ """
+ Require packages for this EntryPoint, then resolve it.
+ """
+ if not require or args or kwargs:
+ warnings.warn(
+ "Parameters to load are deprecated. Call .resolve and "
+ ".require separately.",
+ PkgResourcesDeprecationWarning,
+ stacklevel=2,
+ )
+ if require:
+ # We could pass `env` and `installer` directly,
+ # but keeping `*args` and `**kwargs` for backwards compatibility
+ self.require(*args, **kwargs) # type: ignore
+ return self.resolve()
+
+ def resolve(self) -> _ResolvedEntryPoint:
+ """
+ Resolve the entry point from its module and attrs.
+ """
+ module = __import__(self.module_name, fromlist=['__name__'], level=0)
+ try:
+ return functools.reduce(getattr, self.attrs, module)
+ except AttributeError as exc:
+ raise ImportError(str(exc)) from exc
+
+ def require(
+ self,
+ env: Environment | None = None,
+ installer: _InstallerType | None = None,
+ ):
+ if not self.dist:
+ error_cls = UnknownExtra if self.extras else AttributeError
+ raise error_cls("Can't require() without a distribution", self)
+
+ # Get the requirements for this entry point with all its extras and
+ # then resolve them. We have to pass `extras` along when resolving so
+ # that the working set knows what extras we want. Otherwise, for
+ # dist-info distributions, the working set will assume that the
+ # requirements for that extra are purely optional and skip over them.
+ reqs = self.dist.requires(self.extras)
+ items = working_set.resolve(reqs, env, installer, extras=self.extras)
+ list(map(working_set.add, items))
+
+ pattern = re.compile(
+ r'\s*'
+ r'(?P.+?)\s*'
+ r'=\s*'
+ r'(?P[\w.]+)\s*'
+ r'(:\s*(?P[\w.]+))?\s*'
+ r'(?P