summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/pip/_internal/resolution
diff options
context:
space:
mode:
authorcyfraeviolae <cyfraeviolae>2024-04-03 03:17:55 -0400
committercyfraeviolae <cyfraeviolae>2024-04-03 03:17:55 -0400
commit12cf076118570eebbff08c6b3090e0d4798447a1 (patch)
tree3ba25e17e3c3a5e82316558ba3864b955919ff72 /venv/lib/python3.11/site-packages/pip/_internal/resolution
parentc45662ff3923b34614ddcc8feb9195541166dcc5 (diff)
no venv
Diffstat (limited to 'venv/lib/python3.11/site-packages/pip/_internal/resolution')
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pycbin208 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pycbin1379 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py20
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pycbin215 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pycbin23672 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py598
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pycbin219 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pycbin9307 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pycbin31401 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pycbin35794 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pycbin6767 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pycbin11458 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pycbin5449 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pycbin12223 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pycbin13460 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py141
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py597
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py812
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py155
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py255
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py80
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py166
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py317
26 files changed, 0 insertions, 3141 deletions
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 692bdf9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 9d8bba9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py
deleted file mode 100644
index 42dade1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from typing import Callable, List, Optional
-
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.req.req_set import RequirementSet
-
-InstallRequirementProvider = Callable[
- [str, Optional[InstallRequirement]], InstallRequirement
-]
-
-
-class BaseResolver:
- def resolve(
- self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
- ) -> RequirementSet:
- raise NotImplementedError()
-
- def get_installation_order(
- self, req_set: RequirementSet
- ) -> List[InstallRequirement]:
- raise NotImplementedError()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 124d601..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc
deleted file mode 100644
index 8e0796b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py
deleted file mode 100644
index 5ddb848..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py
+++ /dev/null
@@ -1,598 +0,0 @@
-"""Dependency Resolution
-
-The dependency resolution in pip is performed as follows:
-
-for top-level requirements:
- a. only one spec allowed per project, regardless of conflicts or not.
- otherwise a "double requirement" exception is raised
- b. they override sub-dependency requirements.
-for sub-dependencies
- a. "first found, wins" (where the order is breadth first)
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-import logging
-import sys
-from collections import defaultdict
-from itertools import chain
-from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
-
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.requirements import Requirement
-
-from pip._internal.cache import WheelCache
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled,
- DistributionNotFound,
- HashError,
- HashErrors,
- InstallationError,
- NoneMetadataError,
- UnsupportedPythonVersion,
-)
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.req_install import (
- InstallRequirement,
- check_invalid_constraint_type,
-)
-from pip._internal.req.req_set import RequirementSet
-from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
-from pip._internal.utils import compatibility_tags
-from pip._internal.utils.compatibility_tags import get_supported
-from pip._internal.utils.direct_url_helpers import direct_url_from_link
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import normalize_version_info
-from pip._internal.utils.packaging import check_requires_python
-
-logger = logging.getLogger(__name__)
-
-DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
-
-
-def _check_dist_requires_python(
- dist: BaseDistribution,
- version_info: Tuple[int, int, int],
- ignore_requires_python: bool = False,
-) -> None:
- """
- Check whether the given Python version is compatible with a distribution's
- "Requires-Python" value.
-
- :param version_info: A 3-tuple of ints representing the Python
- major-minor-micro version to check.
- :param ignore_requires_python: Whether to ignore the "Requires-Python"
- value if the given Python version isn't compatible.
-
- :raises UnsupportedPythonVersion: When the given Python version isn't
- compatible.
- """
- # This idiosyncratically converts the SpecifierSet to str and let
- # check_requires_python then parse it again into SpecifierSet. But this
- # is the legacy resolver so I'm just not going to bother refactoring.
- try:
- requires_python = str(dist.requires_python)
- except FileNotFoundError as e:
- raise NoneMetadataError(dist, str(e))
- try:
- is_compatible = check_requires_python(
- requires_python,
- version_info=version_info,
- )
- except specifiers.InvalidSpecifier as exc:
- logger.warning(
- "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
- )
- return
-
- if is_compatible:
- return
-
- version = ".".join(map(str, version_info))
- if ignore_requires_python:
- logger.debug(
- "Ignoring failed Requires-Python check for package %r: %s not in %r",
- dist.raw_name,
- version,
- requires_python,
- )
- return
-
- raise UnsupportedPythonVersion(
- "Package {!r} requires a different Python: {} not in {!r}".format(
- dist.raw_name, version, requires_python
- )
- )
-
-
-class Resolver(BaseResolver):
- """Resolves which packages need to be installed/uninstalled to perform \
- the requested operation without breaking the requirements of any package.
- """
-
- _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
- def __init__(
- self,
- preparer: RequirementPreparer,
- finder: PackageFinder,
- wheel_cache: Optional[WheelCache],
- make_install_req: InstallRequirementProvider,
- use_user_site: bool,
- ignore_dependencies: bool,
- ignore_installed: bool,
- ignore_requires_python: bool,
- force_reinstall: bool,
- upgrade_strategy: str,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ) -> None:
- super().__init__()
- assert upgrade_strategy in self._allowed_strategies
-
- if py_version_info is None:
- py_version_info = sys.version_info[:3]
- else:
- py_version_info = normalize_version_info(py_version_info)
-
- self._py_version_info = py_version_info
-
- self.preparer = preparer
- self.finder = finder
- self.wheel_cache = wheel_cache
-
- self.upgrade_strategy = upgrade_strategy
- self.force_reinstall = force_reinstall
- self.ignore_dependencies = ignore_dependencies
- self.ignore_installed = ignore_installed
- self.ignore_requires_python = ignore_requires_python
- self.use_user_site = use_user_site
- self._make_install_req = make_install_req
-
- self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
-
- def resolve(
- self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
- ) -> RequirementSet:
- """Resolve what operations need to be done
-
- As a side-effect of this method, the packages (and their dependencies)
- are downloaded, unpacked and prepared for installation. This
- preparation is done by ``pip.operations.prepare``.
-
- Once PyPI has static dependency metadata available, it would be
- possible to move the preparation to become a step separated from
- dependency resolution.
- """
- requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
- for req in root_reqs:
- if req.constraint:
- check_invalid_constraint_type(req)
- self._add_requirement_to_set(requirement_set, req)
-
- # Actually prepare the files, and collect any exceptions. Most hash
- # exceptions cannot be checked ahead of time, because
- # _populate_link() needs to be called before we can make decisions
- # based on link type.
- discovered_reqs: List[InstallRequirement] = []
- hash_errors = HashErrors()
- for req in chain(requirement_set.all_requirements, discovered_reqs):
- try:
- discovered_reqs.extend(self._resolve_one(requirement_set, req))
- except HashError as exc:
- exc.req = req
- hash_errors.append(exc)
-
- if hash_errors:
- raise hash_errors
-
- return requirement_set
-
- def _add_requirement_to_set(
- self,
- requirement_set: RequirementSet,
- install_req: InstallRequirement,
- parent_req_name: Optional[str] = None,
- extras_requested: Optional[Iterable[str]] = None,
- ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
- """Add install_req as a requirement to install.
-
- :param parent_req_name: The name of the requirement that needed this
- added. The name is used because when multiple unnamed requirements
- resolve to the same name, we could otherwise end up with dependency
- links that point outside the Requirements set. parent_req must
- already be added. Note that None implies that this is a user
- supplied requirement, vs an inferred one.
- :param extras_requested: an iterable of extras used to evaluate the
- environment markers.
- :return: Additional requirements to scan. That is either [] if
- the requirement is not applicable, or [install_req] if the
- requirement is applicable and has just been added.
- """
- # If the markers do not match, ignore this requirement.
- if not install_req.match_markers(extras_requested):
- logger.info(
- "Ignoring %s: markers '%s' don't match your environment",
- install_req.name,
- install_req.markers,
- )
- return [], None
-
- # If the wheel is not supported, raise an error.
- # Should check this after filtering out based on environment markers to
- # allow specifying different wheels based on the environment/OS, in a
- # single requirements file.
- if install_req.link and install_req.link.is_wheel:
- wheel = Wheel(install_req.link.filename)
- tags = compatibility_tags.get_supported()
- if requirement_set.check_supported_wheels and not wheel.supported(tags):
- raise InstallationError(
- f"{wheel.filename} is not a supported wheel on this platform."
- )
-
- # This next bit is really a sanity check.
- assert (
- not install_req.user_supplied or parent_req_name is None
- ), "a user supplied req shouldn't have a parent"
-
- # Unnamed requirements are scanned again and the requirement won't be
- # added as a dependency until after scanning.
- if not install_req.name:
- requirement_set.add_unnamed_requirement(install_req)
- return [install_req], None
-
- try:
- existing_req: Optional[
- InstallRequirement
- ] = requirement_set.get_requirement(install_req.name)
- except KeyError:
- existing_req = None
-
- has_conflicting_requirement = (
- parent_req_name is None
- and existing_req
- and not existing_req.constraint
- and existing_req.extras == install_req.extras
- and existing_req.req
- and install_req.req
- and existing_req.req.specifier != install_req.req.specifier
- )
- if has_conflicting_requirement:
- raise InstallationError(
- "Double requirement given: {} (already in {}, name={!r})".format(
- install_req, existing_req, install_req.name
- )
- )
-
- # When no existing requirement exists, add the requirement as a
- # dependency and it will be scanned again after.
- if not existing_req:
- requirement_set.add_named_requirement(install_req)
- # We'd want to rescan this requirement later
- return [install_req], install_req
-
- # Assume there's no need to scan, and that we've already
- # encountered this for scanning.
- if install_req.constraint or not existing_req.constraint:
- return [], existing_req
-
- does_not_satisfy_constraint = install_req.link and not (
- existing_req.link and install_req.link.path == existing_req.link.path
- )
- if does_not_satisfy_constraint:
- raise InstallationError(
- f"Could not satisfy constraints for '{install_req.name}': "
- "installation from path or url cannot be "
- "constrained to a version"
- )
- # If we're now installing a constraint, mark the existing
- # object for real installation.
- existing_req.constraint = False
- # If we're now installing a user supplied requirement,
- # mark the existing object as such.
- if install_req.user_supplied:
- existing_req.user_supplied = True
- existing_req.extras = tuple(
- sorted(set(existing_req.extras) | set(install_req.extras))
- )
- logger.debug(
- "Setting %s extras to: %s",
- existing_req,
- existing_req.extras,
- )
- # Return the existing requirement for addition to the parent and
- # scanning again.
- return [existing_req], existing_req
-
- def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
- if self.upgrade_strategy == "to-satisfy-only":
- return False
- elif self.upgrade_strategy == "eager":
- return True
- else:
- assert self.upgrade_strategy == "only-if-needed"
- return req.user_supplied or req.constraint
-
- def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
- """
- Set a requirement to be installed.
- """
- # Don't uninstall the conflict if doing a user install and the
- # conflict is not a user install.
- if not self.use_user_site or req.satisfied_by.in_usersite:
- req.should_reinstall = True
- req.satisfied_by = None
-
- def _check_skip_installed(
- self, req_to_install: InstallRequirement
- ) -> Optional[str]:
- """Check if req_to_install should be skipped.
-
- This will check if the req is installed, and whether we should upgrade
- or reinstall it, taking into account all the relevant user options.
-
- After calling this req_to_install will only have satisfied_by set to
- None if the req_to_install is to be upgraded/reinstalled etc. Any
- other value will be a dist recording the current thing installed that
- satisfies the requirement.
-
- Note that for vcs urls and the like we can't assess skipping in this
- routine - we simply identify that we need to pull the thing down,
- then later on it is pulled down and introspected to assess upgrade/
- reinstalls etc.
-
- :return: A text reason for why it was skipped, or None.
- """
- if self.ignore_installed:
- return None
-
- req_to_install.check_if_exists(self.use_user_site)
- if not req_to_install.satisfied_by:
- return None
-
- if self.force_reinstall:
- self._set_req_to_reinstall(req_to_install)
- return None
-
- if not self._is_upgrade_allowed(req_to_install):
- if self.upgrade_strategy == "only-if-needed":
- return "already satisfied, skipping upgrade"
- return "already satisfied"
-
- # Check for the possibility of an upgrade. For link-based
- # requirements we have to pull the tree down and inspect to assess
- # the version #, so it's handled way down.
- if not req_to_install.link:
- try:
- self.finder.find_requirement(req_to_install, upgrade=True)
- except BestVersionAlreadyInstalled:
- # Then the best version is installed.
- return "already up-to-date"
- except DistributionNotFound:
- # No distribution found, so we squash the error. It will
- # be raised later when we re-try later to do the install.
- # Why don't we just raise here?
- pass
-
- self._set_req_to_reinstall(req_to_install)
- return None
-
- def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
- upgrade = self._is_upgrade_allowed(req)
- best_candidate = self.finder.find_requirement(req, upgrade)
- if not best_candidate:
- return None
-
- # Log a warning per PEP 592 if necessary before returning.
- link = best_candidate.link
- if link.is_yanked:
- reason = link.yanked_reason or "<none given>"
- msg = (
- # Mark this as a unicode string to prevent
- # "UnicodeEncodeError: 'ascii' codec can't encode character"
- # in Python 2 when the reason contains non-ascii characters.
- "The candidate selected for download or install is a "
- f"yanked version: {best_candidate}\n"
- f"Reason for being yanked: {reason}"
- )
- logger.warning(msg)
-
- return link
-
- def _populate_link(self, req: InstallRequirement) -> None:
- """Ensure that if a link can be found for this, that it is found.
-
- Note that req.link may still be None - if the requirement is already
- installed and not needed to be upgraded based on the return value of
- _is_upgrade_allowed().
-
- If preparer.require_hashes is True, don't use the wheel cache, because
- cached wheels, always built locally, have different hashes than the
- files downloaded from the index server and thus throw false hash
- mismatches. Furthermore, cached wheels at present have undeterministic
- contents due to file modification times.
- """
- if req.link is None:
- req.link = self._find_requirement_link(req)
-
- if self.wheel_cache is None or self.preparer.require_hashes:
- return
- cache_entry = self.wheel_cache.get_cache_entry(
- link=req.link,
- package_name=req.name,
- supported_tags=get_supported(),
- )
- if cache_entry is not None:
- logger.debug("Using cached wheel link: %s", cache_entry.link)
- if req.link is req.original_link and cache_entry.persistent:
- req.cached_wheel_source_link = req.link
- if cache_entry.origin is not None:
- req.download_info = cache_entry.origin
- else:
- # Legacy cache entry that does not have origin.json.
- # download_info may miss the archive_info.hashes field.
- req.download_info = direct_url_from_link(
- req.link, link_is_in_wheel_cache=cache_entry.persistent
- )
- req.link = cache_entry.link
-
- def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
- """Takes a InstallRequirement and returns a single AbstractDist \
- representing a prepared variant of the same.
- """
- if req.editable:
- return self.preparer.prepare_editable_requirement(req)
-
- # satisfied_by is only evaluated by calling _check_skip_installed,
- # so it must be None here.
- assert req.satisfied_by is None
- skip_reason = self._check_skip_installed(req)
-
- if req.satisfied_by:
- return self.preparer.prepare_installed_requirement(req, skip_reason)
-
- # We eagerly populate the link, since that's our "legacy" behavior.
- self._populate_link(req)
- dist = self.preparer.prepare_linked_requirement(req)
-
- # NOTE
- # The following portion is for determining if a certain package is
- # going to be re-installed/upgraded or not and reporting to the user.
- # This should probably get cleaned up in a future refactor.
-
- # req.req is only avail after unpack for URL
- # pkgs repeat check_if_exists to uninstall-on-upgrade
- # (#14)
- if not self.ignore_installed:
- req.check_if_exists(self.use_user_site)
-
- if req.satisfied_by:
- should_modify = (
- self.upgrade_strategy != "to-satisfy-only"
- or self.force_reinstall
- or self.ignore_installed
- or req.link.scheme == "file"
- )
- if should_modify:
- self._set_req_to_reinstall(req)
- else:
- logger.info(
- "Requirement already satisfied (use --upgrade to upgrade): %s",
- req,
- )
- return dist
-
- def _resolve_one(
- self,
- requirement_set: RequirementSet,
- req_to_install: InstallRequirement,
- ) -> List[InstallRequirement]:
- """Prepare a single requirements file.
-
- :return: A list of additional InstallRequirements to also install.
- """
- # Tell user what we are doing for this requirement:
- # obtain (editable), skipping, processing (local url), collecting
- # (remote url or package name)
- if req_to_install.constraint or req_to_install.prepared:
- return []
-
- req_to_install.prepared = True
-
- # Parse and return dependencies
- dist = self._get_dist_for(req_to_install)
- # This will raise UnsupportedPythonVersion if the given Python
- # version isn't compatible with the distribution's Requires-Python.
- _check_dist_requires_python(
- dist,
- version_info=self._py_version_info,
- ignore_requires_python=self.ignore_requires_python,
- )
-
- more_reqs: List[InstallRequirement] = []
-
- def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
- # This idiosyncratically converts the Requirement to str and let
- # make_install_req then parse it again into Requirement. But this is
- # the legacy resolver so I'm just not going to bother refactoring.
- sub_install_req = self._make_install_req(str(subreq), req_to_install)
- parent_req_name = req_to_install.name
- to_scan_again, add_to_parent = self._add_requirement_to_set(
- requirement_set,
- sub_install_req,
- parent_req_name=parent_req_name,
- extras_requested=extras_requested,
- )
- if parent_req_name and add_to_parent:
- self._discovered_dependencies[parent_req_name].append(add_to_parent)
- more_reqs.extend(to_scan_again)
-
- with indent_log():
- # We add req_to_install before its dependencies, so that we
- # can refer to it when adding dependencies.
- if not requirement_set.has_requirement(req_to_install.name):
- # 'unnamed' requirements will get added here
- # 'unnamed' requirements can only come from being directly
- # provided by the user.
- assert req_to_install.user_supplied
- self._add_requirement_to_set(
- requirement_set, req_to_install, parent_req_name=None
- )
-
- if not self.ignore_dependencies:
- if req_to_install.extras:
- logger.debug(
- "Installing extra requirements: %r",
- ",".join(req_to_install.extras),
- )
- missing_requested = sorted(
- set(req_to_install.extras) - set(dist.iter_provided_extras())
- )
- for missing in missing_requested:
- logger.warning(
- "%s %s does not provide the extra '%s'",
- dist.raw_name,
- dist.version,
- missing,
- )
-
- available_requested = sorted(
- set(dist.iter_provided_extras()) & set(req_to_install.extras)
- )
- for subreq in dist.iter_dependencies(available_requested):
- add_req(subreq, extras_requested=available_requested)
-
- return more_reqs
-
- def get_installation_order(
- self, req_set: RequirementSet
- ) -> List[InstallRequirement]:
- """Create the installation order.
-
- The installation order is topological - requirements are installed
- before the requiring thing. We break cycles at an arbitrary point,
- and make no other guarantees.
- """
- # The current implementation, which we may change at any point
- # installs the user specified things in the order given, except when
- # dependencies must come earlier to achieve topological order.
- order = []
- ordered_reqs: Set[InstallRequirement] = set()
-
- def schedule(req: InstallRequirement) -> None:
- if req.satisfied_by or req in ordered_reqs:
- return
- if req.constraint:
- return
- ordered_reqs.add(req)
- for dep in self._discovered_dependencies[req.name]:
- schedule(dep)
- order.append(req)
-
- for install_req in req_set.requirements.values():
- schedule(install_req)
- return order
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index ae5d57a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 74e9558..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc
deleted file mode 100644
index 0990251..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc
deleted file mode 100644
index 30dac5d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc
deleted file mode 100644
index 492b5bd..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc
deleted file mode 100644
index 03ae80a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc
deleted file mode 100644
index e8a1b66..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc
deleted file mode 100644
index fbe135d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc
deleted file mode 100644
index f79ed40..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py
deleted file mode 100644
index 9c0ef5c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py
+++ /dev/null
@@ -1,141 +0,0 @@
-from typing import FrozenSet, Iterable, Optional, Tuple, Union
-
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName
-from pip._vendor.packaging.version import LegacyVersion, Version
-
-from pip._internal.models.link import Link, links_equivalent
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.hashes import Hashes
-
-CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
-CandidateVersion = Union[LegacyVersion, Version]
-
-
-def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
- if not extras:
- return project
- extras_expr = ",".join(sorted(extras))
- return f"{project}[{extras_expr}]"
-
-
-class Constraint:
- def __init__(
- self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
- ) -> None:
- self.specifier = specifier
- self.hashes = hashes
- self.links = links
-
- @classmethod
- def empty(cls) -> "Constraint":
- return Constraint(SpecifierSet(), Hashes(), frozenset())
-
- @classmethod
- def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
- links = frozenset([ireq.link]) if ireq.link else frozenset()
- return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
-
- def __bool__(self) -> bool:
- return bool(self.specifier) or bool(self.hashes) or bool(self.links)
-
- def __and__(self, other: InstallRequirement) -> "Constraint":
- if not isinstance(other, InstallRequirement):
- return NotImplemented
- specifier = self.specifier & other.specifier
- hashes = self.hashes & other.hashes(trust_internet=False)
- links = self.links
- if other.link:
- links = links.union([other.link])
- return Constraint(specifier, hashes, links)
-
- def is_satisfied_by(self, candidate: "Candidate") -> bool:
- # Reject if there are any mismatched URL constraints on this package.
- if self.links and not all(_match_link(link, candidate) for link in self.links):
- return False
- # We can safely always allow prereleases here since PackageFinder
- # already implements the prerelease logic, and would have filtered out
- # prerelease candidates if the user does not expect them.
- return self.specifier.contains(candidate.version, prereleases=True)
-
-
-class Requirement:
- @property
- def project_name(self) -> NormalizedName:
- """The "project name" of a requirement.
-
- This is different from ``name`` if this requirement contains extras,
- in which case ``name`` would contain the ``[...]`` part, while this
- refers to the name of the project.
- """
- raise NotImplementedError("Subclass should override")
-
- @property
- def name(self) -> str:
- """The name identifying this requirement in the resolver.
-
- This is different from ``project_name`` if this requirement contains
- extras, where ``project_name`` would not contain the ``[...]`` part.
- """
- raise NotImplementedError("Subclass should override")
-
- def is_satisfied_by(self, candidate: "Candidate") -> bool:
- return False
-
- def get_candidate_lookup(self) -> CandidateLookup:
- raise NotImplementedError("Subclass should override")
-
- def format_for_error(self) -> str:
- raise NotImplementedError("Subclass should override")
-
-
-def _match_link(link: Link, candidate: "Candidate") -> bool:
- if candidate.source_link:
- return links_equivalent(link, candidate.source_link)
- return False
-
-
-class Candidate:
- @property
- def project_name(self) -> NormalizedName:
- """The "project name" of the candidate.
-
- This is different from ``name`` if this candidate contains extras,
- in which case ``name`` would contain the ``[...]`` part, while this
- refers to the name of the project.
- """
- raise NotImplementedError("Override in subclass")
-
- @property
- def name(self) -> str:
- """The name identifying this candidate in the resolver.
-
- This is different from ``project_name`` if this candidate contains
- extras, where ``project_name`` would not contain the ``[...]`` part.
- """
- raise NotImplementedError("Override in subclass")
-
- @property
- def version(self) -> CandidateVersion:
- raise NotImplementedError("Override in subclass")
-
- @property
- def is_installed(self) -> bool:
- raise NotImplementedError("Override in subclass")
-
- @property
- def is_editable(self) -> bool:
- raise NotImplementedError("Override in subclass")
-
- @property
- def source_link(self) -> Optional[Link]:
- raise NotImplementedError("Override in subclass")
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- raise NotImplementedError("Override in subclass")
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- raise NotImplementedError("Override in subclass")
-
- def format_for_error(self) -> str:
- raise NotImplementedError("Subclass should override")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py
deleted file mode 100644
index 4125cda..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py
+++ /dev/null
@@ -1,597 +0,0 @@
-import logging
-import sys
-from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
-
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.packaging.version import Version
-
-from pip._internal.exceptions import (
- HashError,
- InstallationSubprocessError,
- MetadataInconsistent,
-)
-from pip._internal.metadata import BaseDistribution
-from pip._internal.models.link import Link, links_equivalent
-from pip._internal.models.wheel import Wheel
-from pip._internal.req.constructors import (
- install_req_from_editable,
- install_req_from_line,
-)
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.direct_url_helpers import direct_url_from_link
-from pip._internal.utils.misc import normalize_version_info
-
-from .base import Candidate, CandidateVersion, Requirement, format_name
-
-if TYPE_CHECKING:
- from .factory import Factory
-
-logger = logging.getLogger(__name__)
-
-BaseCandidate = Union[
- "AlreadyInstalledCandidate",
- "EditableCandidate",
- "LinkCandidate",
-]
-
-# Avoid conflicting with the PyPI package "Python".
-REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
-
-
-def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
- """The runtime version of BaseCandidate."""
- base_candidate_classes = (
- AlreadyInstalledCandidate,
- EditableCandidate,
- LinkCandidate,
- )
- if isinstance(candidate, base_candidate_classes):
- return candidate
- return None
-
-
-def make_install_req_from_link(
- link: Link, template: InstallRequirement
-) -> InstallRequirement:
- assert not template.editable, "template is editable"
- if template.req:
- line = str(template.req)
- else:
- line = link.url
- ireq = install_req_from_line(
- line,
- user_supplied=template.user_supplied,
- comes_from=template.comes_from,
- use_pep517=template.use_pep517,
- isolated=template.isolated,
- constraint=template.constraint,
- global_options=template.global_options,
- hash_options=template.hash_options,
- config_settings=template.config_settings,
- )
- ireq.original_link = template.original_link
- ireq.link = link
- ireq.extras = template.extras
- return ireq
-
-
-def make_install_req_from_editable(
- link: Link, template: InstallRequirement
-) -> InstallRequirement:
- assert template.editable, "template not editable"
- ireq = install_req_from_editable(
- link.url,
- user_supplied=template.user_supplied,
- comes_from=template.comes_from,
- use_pep517=template.use_pep517,
- isolated=template.isolated,
- constraint=template.constraint,
- permit_editable_wheels=template.permit_editable_wheels,
- global_options=template.global_options,
- hash_options=template.hash_options,
- config_settings=template.config_settings,
- )
- ireq.extras = template.extras
- return ireq
-
-
-def _make_install_req_from_dist(
- dist: BaseDistribution, template: InstallRequirement
-) -> InstallRequirement:
- if template.req:
- line = str(template.req)
- elif template.link:
- line = f"{dist.canonical_name} @ {template.link.url}"
- else:
- line = f"{dist.canonical_name}=={dist.version}"
- ireq = install_req_from_line(
- line,
- user_supplied=template.user_supplied,
- comes_from=template.comes_from,
- use_pep517=template.use_pep517,
- isolated=template.isolated,
- constraint=template.constraint,
- global_options=template.global_options,
- hash_options=template.hash_options,
- config_settings=template.config_settings,
- )
- ireq.satisfied_by = dist
- return ireq
-
-
-class _InstallRequirementBackedCandidate(Candidate):
- """A candidate backed by an ``InstallRequirement``.
-
- This represents a package request with the target not being already
- in the environment, and needs to be fetched and installed. The backing
- ``InstallRequirement`` is responsible for most of the leg work; this
- class exposes appropriate information to the resolver.
-
- :param link: The link passed to the ``InstallRequirement``. The backing
- ``InstallRequirement`` will use this link to fetch the distribution.
- :param source_link: The link this candidate "originates" from. This is
- different from ``link`` when the link is found in the wheel cache.
- ``link`` would point to the wheel cache, while this points to the
- found remote link (e.g. from pypi.org).
- """
-
- dist: BaseDistribution
- is_installed = False
-
- def __init__(
- self,
- link: Link,
- source_link: Link,
- ireq: InstallRequirement,
- factory: "Factory",
- name: Optional[NormalizedName] = None,
- version: Optional[CandidateVersion] = None,
- ) -> None:
- self._link = link
- self._source_link = source_link
- self._factory = factory
- self._ireq = ireq
- self._name = name
- self._version = version
- self.dist = self._prepare()
-
- def __str__(self) -> str:
- return f"{self.name} {self.version}"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self._link)!r})"
-
- def __hash__(self) -> int:
- return hash((self.__class__, self._link))
-
- def __eq__(self, other: Any) -> bool:
- if isinstance(other, self.__class__):
- return links_equivalent(self._link, other._link)
- return False
-
- @property
- def source_link(self) -> Optional[Link]:
- return self._source_link
-
- @property
- def project_name(self) -> NormalizedName:
- """The normalised name of the project the candidate refers to"""
- if self._name is None:
- self._name = self.dist.canonical_name
- return self._name
-
- @property
- def name(self) -> str:
- return self.project_name
-
- @property
- def version(self) -> CandidateVersion:
- if self._version is None:
- self._version = self.dist.version
- return self._version
-
- def format_for_error(self) -> str:
- return "{} {} (from {})".format(
- self.name,
- self.version,
- self._link.file_path if self._link.is_file else self._link,
- )
-
- def _prepare_distribution(self) -> BaseDistribution:
- raise NotImplementedError("Override in subclass")
-
- def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
- """Check for consistency of project name and version of dist."""
- if self._name is not None and self._name != dist.canonical_name:
- raise MetadataInconsistent(
- self._ireq,
- "name",
- self._name,
- dist.canonical_name,
- )
- if self._version is not None and self._version != dist.version:
- raise MetadataInconsistent(
- self._ireq,
- "version",
- str(self._version),
- str(dist.version),
- )
-
- def _prepare(self) -> BaseDistribution:
- try:
- dist = self._prepare_distribution()
- except HashError as e:
- # Provide HashError the underlying ireq that caused it. This
- # provides context for the resulting error message to show the
- # offending line to the user.
- e.req = self._ireq
- raise
- except InstallationSubprocessError as exc:
- # The output has been presented already, so don't duplicate it.
- exc.context = "See above for output."
- raise
-
- self._check_metadata_consistency(dist)
- return dist
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- requires = self.dist.iter_dependencies() if with_requires else ()
- for r in requires:
- yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
- yield self._factory.make_requires_python_requirement(self.dist.requires_python)
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- return self._ireq
-
-
-class LinkCandidate(_InstallRequirementBackedCandidate):
- is_editable = False
-
- def __init__(
- self,
- link: Link,
- template: InstallRequirement,
- factory: "Factory",
- name: Optional[NormalizedName] = None,
- version: Optional[CandidateVersion] = None,
- ) -> None:
- source_link = link
- cache_entry = factory.get_wheel_cache_entry(source_link, name)
- if cache_entry is not None:
- logger.debug("Using cached wheel link: %s", cache_entry.link)
- link = cache_entry.link
- ireq = make_install_req_from_link(link, template)
- assert ireq.link == link
- if ireq.link.is_wheel and not ireq.link.is_file:
- wheel = Wheel(ireq.link.filename)
- wheel_name = canonicalize_name(wheel.name)
- assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
- # Version may not be present for PEP 508 direct URLs
- if version is not None:
- wheel_version = Version(wheel.version)
- assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
- version, wheel_version, name
- )
-
- if cache_entry is not None:
- assert ireq.link.is_wheel
- assert ireq.link.is_file
- if cache_entry.persistent and template.link is template.original_link:
- ireq.cached_wheel_source_link = source_link
- if cache_entry.origin is not None:
- ireq.download_info = cache_entry.origin
- else:
- # Legacy cache entry that does not have origin.json.
- # download_info may miss the archive_info.hashes field.
- ireq.download_info = direct_url_from_link(
- source_link, link_is_in_wheel_cache=cache_entry.persistent
- )
-
- super().__init__(
- link=link,
- source_link=source_link,
- ireq=ireq,
- factory=factory,
- name=name,
- version=version,
- )
-
- def _prepare_distribution(self) -> BaseDistribution:
- preparer = self._factory.preparer
- return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
-
-
-class EditableCandidate(_InstallRequirementBackedCandidate):
- is_editable = True
-
- def __init__(
- self,
- link: Link,
- template: InstallRequirement,
- factory: "Factory",
- name: Optional[NormalizedName] = None,
- version: Optional[CandidateVersion] = None,
- ) -> None:
- super().__init__(
- link=link,
- source_link=link,
- ireq=make_install_req_from_editable(link, template),
- factory=factory,
- name=name,
- version=version,
- )
-
- def _prepare_distribution(self) -> BaseDistribution:
- return self._factory.preparer.prepare_editable_requirement(self._ireq)
-
-
-class AlreadyInstalledCandidate(Candidate):
- is_installed = True
- source_link = None
-
- def __init__(
- self,
- dist: BaseDistribution,
- template: InstallRequirement,
- factory: "Factory",
- ) -> None:
- self.dist = dist
- self._ireq = _make_install_req_from_dist(dist, template)
- self._factory = factory
- self._version = None
-
- # This is just logging some messages, so we can do it eagerly.
- # The returned dist would be exactly the same as self.dist because we
- # set satisfied_by in _make_install_req_from_dist.
- # TODO: Supply reason based on force_reinstall and upgrade_strategy.
- skip_reason = "already satisfied"
- factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
-
- def __str__(self) -> str:
- return str(self.dist)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self.dist!r})"
-
- def __hash__(self) -> int:
- return hash((self.__class__, self.name, self.version))
-
- def __eq__(self, other: Any) -> bool:
- if isinstance(other, self.__class__):
- return self.name == other.name and self.version == other.version
- return False
-
- @property
- def project_name(self) -> NormalizedName:
- return self.dist.canonical_name
-
- @property
- def name(self) -> str:
- return self.project_name
-
- @property
- def version(self) -> CandidateVersion:
- if self._version is None:
- self._version = self.dist.version
- return self._version
-
- @property
- def is_editable(self) -> bool:
- return self.dist.editable
-
- def format_for_error(self) -> str:
- return f"{self.name} {self.version} (Installed)"
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- if not with_requires:
- return
- for r in self.dist.iter_dependencies():
- yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- return None
-
-
-class ExtrasCandidate(Candidate):
- """A candidate that has 'extras', indicating additional dependencies.
-
- Requirements can be for a project with dependencies, something like
- foo[extra]. The extras don't affect the project/version being installed
- directly, but indicate that we need additional dependencies. We model that
- by having an artificial ExtrasCandidate that wraps the "base" candidate.
-
- The ExtrasCandidate differs from the base in the following ways:
-
- 1. It has a unique name, of the form foo[extra]. This causes the resolver
- to treat it as a separate node in the dependency graph.
- 2. When we're getting the candidate's dependencies,
- a) We specify that we want the extra dependencies as well.
- b) We add a dependency on the base candidate.
- See below for why this is needed.
- 3. We return None for the underlying InstallRequirement, as the base
- candidate will provide it, and we don't want to end up with duplicates.
-
- The dependency on the base candidate is needed so that the resolver can't
- decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
- version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
- respectively forces the resolver to recognise that this is a conflict.
- """
-
- def __init__(
- self,
- base: BaseCandidate,
- extras: FrozenSet[str],
- *,
- comes_from: Optional[InstallRequirement] = None,
- ) -> None:
- """
- :param comes_from: the InstallRequirement that led to this candidate if it
- differs from the base's InstallRequirement. This will often be the
- case in the sense that this candidate's requirement has the extras
- while the base's does not. Unlike the InstallRequirement backed
- candidates, this requirement is used solely for reporting purposes,
- it does not do any leg work.
- """
- self.base = base
- self.extras = frozenset(canonicalize_name(e) for e in extras)
- # If any extras are requested in their non-normalized forms, keep track
- # of their raw values. This is needed when we look up dependencies
- # since PEP 685 has not been implemented for marker-matching, and using
- # the non-normalized extra for lookup ensures the user can select a
- # non-normalized extra in a package with its non-normalized form.
- # TODO: Remove this attribute when packaging is upgraded to support the
- # marker comparison logic specified in PEP 685.
- self._unnormalized_extras = extras.difference(self.extras)
- self._comes_from = comes_from if comes_from is not None else self.base._ireq
-
- def __str__(self) -> str:
- name, rest = str(self.base).split(" ", 1)
- return "{}[{}] {}".format(name, ",".join(self.extras), rest)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
-
- def __hash__(self) -> int:
- return hash((self.base, self.extras))
-
- def __eq__(self, other: Any) -> bool:
- if isinstance(other, self.__class__):
- return self.base == other.base and self.extras == other.extras
- return False
-
- @property
- def project_name(self) -> NormalizedName:
- return self.base.project_name
-
- @property
- def name(self) -> str:
- """The normalised name of the project the candidate refers to"""
- return format_name(self.base.project_name, self.extras)
-
- @property
- def version(self) -> CandidateVersion:
- return self.base.version
-
- def format_for_error(self) -> str:
- return "{} [{}]".format(
- self.base.format_for_error(), ", ".join(sorted(self.extras))
- )
-
- @property
- def is_installed(self) -> bool:
- return self.base.is_installed
-
- @property
- def is_editable(self) -> bool:
- return self.base.is_editable
-
- @property
- def source_link(self) -> Optional[Link]:
- return self.base.source_link
-
- def _warn_invalid_extras(
- self,
- requested: FrozenSet[str],
- valid: FrozenSet[str],
- ) -> None:
- """Emit warnings for invalid extras being requested.
-
- This emits a warning for each requested extra that is not in the
- candidate's ``Provides-Extra`` list.
- """
- invalid_extras_to_warn = frozenset(
- extra
- for extra in requested
- if extra not in valid
- # If an extra is requested in an unnormalized form, skip warning
- # about the normalized form being missing.
- and extra in self.extras
- )
- if not invalid_extras_to_warn:
- return
- for extra in sorted(invalid_extras_to_warn):
- logger.warning(
- "%s %s does not provide the extra '%s'",
- self.base.name,
- self.version,
- extra,
- )
-
- def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
- """Get a list of valid extras requested by this candidate.
-
- The user (or upstream dependant) may have specified extras that the
- candidate doesn't support. Any unsupported extras are dropped, and each
- cause a warning to be logged here.
- """
- requested_extras = self.extras.union(self._unnormalized_extras)
- valid_extras = frozenset(
- extra
- for extra in requested_extras
- if self.base.dist.is_extra_provided(extra)
- )
- self._warn_invalid_extras(requested_extras, valid_extras)
- return valid_extras
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- factory = self.base._factory
-
- # Add a dependency on the exact base
- # (See note 2b in the class docstring)
- yield factory.make_requirement_from_candidate(self.base)
- if not with_requires:
- return
-
- valid_extras = self._calculate_valid_requested_extras()
- for r in self.base.dist.iter_dependencies(valid_extras):
- yield from factory.make_requirements_from_spec(
- str(r),
- self._comes_from,
- valid_extras,
- )
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- # We don't return anything here, because we always
- # depend on the base candidate, and we'll get the
- # install requirement from that.
- return None
-
-
-class RequiresPythonCandidate(Candidate):
- is_installed = False
- source_link = None
-
- def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
- if py_version_info is not None:
- version_info = normalize_version_info(py_version_info)
- else:
- version_info = sys.version_info[:3]
- self._version = Version(".".join(str(c) for c in version_info))
-
- # We don't need to implement __eq__() and __ne__() since there is always
- # only one RequiresPythonCandidate in a resolution, i.e. the host Python.
- # The built-in object.__eq__() and object.__ne__() do exactly what we want.
-
- def __str__(self) -> str:
- return f"Python {self._version}"
-
- @property
- def project_name(self) -> NormalizedName:
- return REQUIRES_PYTHON_IDENTIFIER
-
- @property
- def name(self) -> str:
- return REQUIRES_PYTHON_IDENTIFIER
-
- @property
- def version(self) -> CandidateVersion:
- return self._version
-
- def format_for_error(self) -> str:
- return f"Python {self.version}"
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- return ()
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- return None
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py
deleted file mode 100644
index 4adeb43..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py
+++ /dev/null
@@ -1,812 +0,0 @@
-import contextlib
-import functools
-import logging
-from typing import (
- TYPE_CHECKING,
- Dict,
- FrozenSet,
- Iterable,
- Iterator,
- List,
- Mapping,
- NamedTuple,
- Optional,
- Sequence,
- Set,
- Tuple,
- TypeVar,
- cast,
-)
-
-from pip._vendor.packaging.requirements import InvalidRequirement
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.resolvelib import ResolutionImpossible
-
-from pip._internal.cache import CacheEntry, WheelCache
-from pip._internal.exceptions import (
- DistributionNotFound,
- InstallationError,
- MetadataInconsistent,
- UnsupportedPythonVersion,
- UnsupportedWheel,
-)
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution, get_default_environment
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.constructors import (
- install_req_drop_extras,
- install_req_from_link_and_ireq,
-)
-from pip._internal.req.req_install import (
- InstallRequirement,
- check_invalid_constraint_type,
-)
-from pip._internal.resolution.base import InstallRequirementProvider
-from pip._internal.utils.compatibility_tags import get_supported
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.packaging import get_requirement
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-from .base import Candidate, CandidateVersion, Constraint, Requirement
-from .candidates import (
- AlreadyInstalledCandidate,
- BaseCandidate,
- EditableCandidate,
- ExtrasCandidate,
- LinkCandidate,
- RequiresPythonCandidate,
- as_base_candidate,
-)
-from .found_candidates import FoundCandidates, IndexCandidateInfo
-from .requirements import (
- ExplicitRequirement,
- RequiresPythonRequirement,
- SpecifierRequirement,
- SpecifierWithoutExtrasRequirement,
- UnsatisfiableRequirement,
-)
-
-if TYPE_CHECKING:
- from typing import Protocol
-
- class ConflictCause(Protocol):
- requirement: RequiresPythonRequirement
- parent: Candidate
-
-
-logger = logging.getLogger(__name__)
-
-C = TypeVar("C")
-Cache = Dict[Link, C]
-
-
-class CollectedRootRequirements(NamedTuple):
- requirements: List[Requirement]
- constraints: Dict[str, Constraint]
- user_requested: Dict[str, int]
-
-
-class Factory:
- def __init__(
- self,
- finder: PackageFinder,
- preparer: RequirementPreparer,
- make_install_req: InstallRequirementProvider,
- wheel_cache: Optional[WheelCache],
- use_user_site: bool,
- force_reinstall: bool,
- ignore_installed: bool,
- ignore_requires_python: bool,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ) -> None:
- self._finder = finder
- self.preparer = preparer
- self._wheel_cache = wheel_cache
- self._python_candidate = RequiresPythonCandidate(py_version_info)
- self._make_install_req_from_spec = make_install_req
- self._use_user_site = use_user_site
- self._force_reinstall = force_reinstall
- self._ignore_requires_python = ignore_requires_python
-
- self._build_failures: Cache[InstallationError] = {}
- self._link_candidate_cache: Cache[LinkCandidate] = {}
- self._editable_candidate_cache: Cache[EditableCandidate] = {}
- self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
- self._extras_candidate_cache: Dict[
- Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
- ] = {}
-
- if not ignore_installed:
- env = get_default_environment()
- self._installed_dists = {
- dist.canonical_name: dist
- for dist in env.iter_installed_distributions(local_only=False)
- }
- else:
- self._installed_dists = {}
-
- @property
- def force_reinstall(self) -> bool:
- return self._force_reinstall
-
- def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
- if not link.is_wheel:
- return
- wheel = Wheel(link.filename)
- if wheel.supported(self._finder.target_python.get_unsorted_tags()):
- return
- msg = f"{link.filename} is not a supported wheel on this platform."
- raise UnsupportedWheel(msg)
-
- def _make_extras_candidate(
- self,
- base: BaseCandidate,
- extras: FrozenSet[str],
- *,
- comes_from: Optional[InstallRequirement] = None,
- ) -> ExtrasCandidate:
- cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
- try:
- candidate = self._extras_candidate_cache[cache_key]
- except KeyError:
- candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
- self._extras_candidate_cache[cache_key] = candidate
- return candidate
-
- def _make_candidate_from_dist(
- self,
- dist: BaseDistribution,
- extras: FrozenSet[str],
- template: InstallRequirement,
- ) -> Candidate:
- try:
- base = self._installed_candidate_cache[dist.canonical_name]
- except KeyError:
- base = AlreadyInstalledCandidate(dist, template, factory=self)
- self._installed_candidate_cache[dist.canonical_name] = base
- if not extras:
- return base
- return self._make_extras_candidate(base, extras, comes_from=template)
-
- def _make_candidate_from_link(
- self,
- link: Link,
- extras: FrozenSet[str],
- template: InstallRequirement,
- name: Optional[NormalizedName],
- version: Optional[CandidateVersion],
- ) -> Optional[Candidate]:
- base: Optional[BaseCandidate] = self._make_base_candidate_from_link(
- link, template, name, version
- )
- if not extras or base is None:
- return base
- return self._make_extras_candidate(base, extras, comes_from=template)
-
- def _make_base_candidate_from_link(
- self,
- link: Link,
- template: InstallRequirement,
- name: Optional[NormalizedName],
- version: Optional[CandidateVersion],
- ) -> Optional[BaseCandidate]:
- # TODO: Check already installed candidate, and use it if the link and
- # editable flag match.
-
- if link in self._build_failures:
- # We already tried this candidate before, and it does not build.
- # Don't bother trying again.
- return None
-
- if template.editable:
- if link not in self._editable_candidate_cache:
- try:
- self._editable_candidate_cache[link] = EditableCandidate(
- link,
- template,
- factory=self,
- name=name,
- version=version,
- )
- except MetadataInconsistent as e:
- logger.info(
- "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
- link,
- e,
- extra={"markup": True},
- )
- self._build_failures[link] = e
- return None
-
- return self._editable_candidate_cache[link]
- else:
- if link not in self._link_candidate_cache:
- try:
- self._link_candidate_cache[link] = LinkCandidate(
- link,
- template,
- factory=self,
- name=name,
- version=version,
- )
- except MetadataInconsistent as e:
- logger.info(
- "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
- link,
- e,
- extra={"markup": True},
- )
- self._build_failures[link] = e
- return None
- return self._link_candidate_cache[link]
-
- def _iter_found_candidates(
- self,
- ireqs: Sequence[InstallRequirement],
- specifier: SpecifierSet,
- hashes: Hashes,
- prefers_installed: bool,
- incompatible_ids: Set[int],
- ) -> Iterable[Candidate]:
- if not ireqs:
- return ()
-
- # The InstallRequirement implementation requires us to give it a
- # "template". Here we just choose the first requirement to represent
- # all of them.
- # Hopefully the Project model can correct this mismatch in the future.
- template = ireqs[0]
- assert template.req, "Candidates found on index must be PEP 508"
- name = canonicalize_name(template.req.name)
-
- extras: FrozenSet[str] = frozenset()
- for ireq in ireqs:
- assert ireq.req, "Candidates found on index must be PEP 508"
- specifier &= ireq.req.specifier
- hashes &= ireq.hashes(trust_internet=False)
- extras |= frozenset(ireq.extras)
-
- def _get_installed_candidate() -> Optional[Candidate]:
- """Get the candidate for the currently-installed version."""
- # If --force-reinstall is set, we want the version from the index
- # instead, so we "pretend" there is nothing installed.
- if self._force_reinstall:
- return None
- try:
- installed_dist = self._installed_dists[name]
- except KeyError:
- return None
- # Don't use the installed distribution if its version does not fit
- # the current dependency graph.
- if not specifier.contains(installed_dist.version, prereleases=True):
- return None
- candidate = self._make_candidate_from_dist(
- dist=installed_dist,
- extras=extras,
- template=template,
- )
- # The candidate is a known incompatibility. Don't use it.
- if id(candidate) in incompatible_ids:
- return None
- return candidate
-
- def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
- result = self._finder.find_best_candidate(
- project_name=name,
- specifier=specifier,
- hashes=hashes,
- )
- icans = list(result.iter_applicable())
-
- # PEP 592: Yanked releases are ignored unless the specifier
- # explicitly pins a version (via '==' or '===') that can be
- # solely satisfied by a yanked release.
- all_yanked = all(ican.link.is_yanked for ican in icans)
-
- def is_pinned(specifier: SpecifierSet) -> bool:
- for sp in specifier:
- if sp.operator == "===":
- return True
- if sp.operator != "==":
- continue
- if sp.version.endswith(".*"):
- continue
- return True
- return False
-
- pinned = is_pinned(specifier)
-
- # PackageFinder returns earlier versions first, so we reverse.
- for ican in reversed(icans):
- if not (all_yanked and pinned) and ican.link.is_yanked:
- continue
- func = functools.partial(
- self._make_candidate_from_link,
- link=ican.link,
- extras=extras,
- template=template,
- name=name,
- version=ican.version,
- )
- yield ican.version, func
-
- return FoundCandidates(
- iter_index_candidate_infos,
- _get_installed_candidate(),
- prefers_installed,
- incompatible_ids,
- )
-
- def _iter_explicit_candidates_from_base(
- self,
- base_requirements: Iterable[Requirement],
- extras: FrozenSet[str],
- ) -> Iterator[Candidate]:
- """Produce explicit candidates from the base given an extra-ed package.
-
- :param base_requirements: Requirements known to the resolver. The
- requirements are guaranteed to not have extras.
- :param extras: The extras to inject into the explicit requirements'
- candidates.
- """
- for req in base_requirements:
- lookup_cand, _ = req.get_candidate_lookup()
- if lookup_cand is None: # Not explicit.
- continue
- # We've stripped extras from the identifier, and should always
- # get a BaseCandidate here, unless there's a bug elsewhere.
- base_cand = as_base_candidate(lookup_cand)
- assert base_cand is not None, "no extras here"
- yield self._make_extras_candidate(base_cand, extras)
-
- def _iter_candidates_from_constraints(
- self,
- identifier: str,
- constraint: Constraint,
- template: InstallRequirement,
- ) -> Iterator[Candidate]:
- """Produce explicit candidates from constraints.
-
- This creates "fake" InstallRequirement objects that are basically clones
- of what "should" be the template, but with original_link set to link.
- """
- for link in constraint.links:
- self._fail_if_link_is_unsupported_wheel(link)
- candidate = self._make_base_candidate_from_link(
- link,
- template=install_req_from_link_and_ireq(link, template),
- name=canonicalize_name(identifier),
- version=None,
- )
- if candidate:
- yield candidate
-
- def find_candidates(
- self,
- identifier: str,
- requirements: Mapping[str, Iterable[Requirement]],
- incompatibilities: Mapping[str, Iterator[Candidate]],
- constraint: Constraint,
- prefers_installed: bool,
- ) -> Iterable[Candidate]:
- # Collect basic lookup information from the requirements.
- explicit_candidates: Set[Candidate] = set()
- ireqs: List[InstallRequirement] = []
- for req in requirements[identifier]:
- cand, ireq = req.get_candidate_lookup()
- if cand is not None:
- explicit_candidates.add(cand)
- if ireq is not None:
- ireqs.append(ireq)
-
- # If the current identifier contains extras, add requires and explicit
- # candidates from entries from extra-less identifier.
- with contextlib.suppress(InvalidRequirement):
- parsed_requirement = get_requirement(identifier)
- if parsed_requirement.name != identifier:
- explicit_candidates.update(
- self._iter_explicit_candidates_from_base(
- requirements.get(parsed_requirement.name, ()),
- frozenset(parsed_requirement.extras),
- ),
- )
- for req in requirements.get(parsed_requirement.name, []):
- _, ireq = req.get_candidate_lookup()
- if ireq is not None:
- ireqs.append(ireq)
-
- # Add explicit candidates from constraints. We only do this if there are
- # known ireqs, which represent requirements not already explicit. If
- # there are no ireqs, we're constraining already-explicit requirements,
- # which is handled later when we return the explicit candidates.
- if ireqs:
- try:
- explicit_candidates.update(
- self._iter_candidates_from_constraints(
- identifier,
- constraint,
- template=ireqs[0],
- ),
- )
- except UnsupportedWheel:
- # If we're constrained to install a wheel incompatible with the
- # target architecture, no candidates will ever be valid.
- return ()
-
- # Since we cache all the candidates, incompatibility identification
- # can be made quicker by comparing only the id() values.
- incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
-
- # If none of the requirements want an explicit candidate, we can ask
- # the finder for candidates.
- if not explicit_candidates:
- return self._iter_found_candidates(
- ireqs,
- constraint.specifier,
- constraint.hashes,
- prefers_installed,
- incompat_ids,
- )
-
- return (
- c
- for c in explicit_candidates
- if id(c) not in incompat_ids
- and constraint.is_satisfied_by(c)
- and all(req.is_satisfied_by(c) for req in requirements[identifier])
- )
-
- def _make_requirements_from_install_req(
- self, ireq: InstallRequirement, requested_extras: Iterable[str]
- ) -> Iterator[Requirement]:
- """
- Returns requirement objects associated with the given InstallRequirement. In
- most cases this will be a single object but the following special cases exist:
- - the InstallRequirement has markers that do not apply -> result is empty
- - the InstallRequirement has both a constraint (or link) and extras
- -> result is split in two requirement objects: one with the constraint
- (or link) and one with the extra. This allows centralized constraint
- handling for the base, resulting in fewer candidate rejections.
- """
- if not ireq.match_markers(requested_extras):
- logger.info(
- "Ignoring %s: markers '%s' don't match your environment",
- ireq.name,
- ireq.markers,
- )
- elif not ireq.link:
- if ireq.extras and ireq.req is not None and ireq.req.specifier:
- yield SpecifierWithoutExtrasRequirement(ireq)
- yield SpecifierRequirement(ireq)
- else:
- self._fail_if_link_is_unsupported_wheel(ireq.link)
- # Always make the link candidate for the base requirement to make it
- # available to `find_candidates` for explicit candidate lookup for any
- # set of extras.
- # The extras are required separately via a second requirement.
- cand = self._make_base_candidate_from_link(
- ireq.link,
- template=install_req_drop_extras(ireq) if ireq.extras else ireq,
- name=canonicalize_name(ireq.name) if ireq.name else None,
- version=None,
- )
- if cand is None:
- # There's no way we can satisfy a URL requirement if the underlying
- # candidate fails to build. An unnamed URL must be user-supplied, so
- # we fail eagerly. If the URL is named, an unsatisfiable requirement
- # can make the resolver do the right thing, either backtrack (and
- # maybe find some other requirement that's buildable) or raise a
- # ResolutionImpossible eventually.
- if not ireq.name:
- raise self._build_failures[ireq.link]
- yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
- else:
- # require the base from the link
- yield self.make_requirement_from_candidate(cand)
- if ireq.extras:
- # require the extras on top of the base candidate
- yield self.make_requirement_from_candidate(
- self._make_extras_candidate(cand, frozenset(ireq.extras))
- )
-
- def collect_root_requirements(
- self, root_ireqs: List[InstallRequirement]
- ) -> CollectedRootRequirements:
- collected = CollectedRootRequirements([], {}, {})
- for i, ireq in enumerate(root_ireqs):
- if ireq.constraint:
- # Ensure we only accept valid constraints
- problem = check_invalid_constraint_type(ireq)
- if problem:
- raise InstallationError(problem)
- if not ireq.match_markers():
- continue
- assert ireq.name, "Constraint must be named"
- name = canonicalize_name(ireq.name)
- if name in collected.constraints:
- collected.constraints[name] &= ireq
- else:
- collected.constraints[name] = Constraint.from_ireq(ireq)
- else:
- reqs = list(
- self._make_requirements_from_install_req(
- ireq,
- requested_extras=(),
- )
- )
- if not reqs:
- continue
- template = reqs[0]
- if ireq.user_supplied and template.name not in collected.user_requested:
- collected.user_requested[template.name] = i
- collected.requirements.extend(reqs)
- # Put requirements with extras at the end of the root requires. This does not
- # affect resolvelib's picking preference but it does affect its initial criteria
- # population: by putting extras at the end we enable the candidate finder to
- # present resolvelib with a smaller set of candidates to resolvelib, already
- # taking into account any non-transient constraints on the associated base. This
- # means resolvelib will have fewer candidates to visit and reject.
- # Python's list sort is stable, meaning relative order is kept for objects with
- # the same key.
- collected.requirements.sort(key=lambda r: r.name != r.project_name)
- return collected
-
- def make_requirement_from_candidate(
- self, candidate: Candidate
- ) -> ExplicitRequirement:
- return ExplicitRequirement(candidate)
-
- def make_requirements_from_spec(
- self,
- specifier: str,
- comes_from: Optional[InstallRequirement],
- requested_extras: Iterable[str] = (),
- ) -> Iterator[Requirement]:
- """
- Returns requirement objects associated with the given specifier. In most cases
- this will be a single object but the following special cases exist:
- - the specifier has markers that do not apply -> result is empty
- - the specifier has both a constraint and extras -> result is split
- in two requirement objects: one with the constraint and one with the
- extra. This allows centralized constraint handling for the base,
- resulting in fewer candidate rejections.
- """
- ireq = self._make_install_req_from_spec(specifier, comes_from)
- return self._make_requirements_from_install_req(ireq, requested_extras)
-
- def make_requires_python_requirement(
- self,
- specifier: SpecifierSet,
- ) -> Optional[Requirement]:
- if self._ignore_requires_python:
- return None
- # Don't bother creating a dependency for an empty Requires-Python.
- if not str(specifier):
- return None
- return RequiresPythonRequirement(specifier, self._python_candidate)
-
- def get_wheel_cache_entry(
- self, link: Link, name: Optional[str]
- ) -> Optional[CacheEntry]:
- """Look up the link in the wheel cache.
-
- If ``preparer.require_hashes`` is True, don't use the wheel cache,
- because cached wheels, always built locally, have different hashes
- than the files downloaded from the index server and thus throw false
- hash mismatches. Furthermore, cached wheels at present have
- nondeterministic contents due to file modification times.
- """
- if self._wheel_cache is None:
- return None
- return self._wheel_cache.get_cache_entry(
- link=link,
- package_name=name,
- supported_tags=get_supported(),
- )
-
- def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
- # TODO: Are there more cases this needs to return True? Editable?
- dist = self._installed_dists.get(candidate.project_name)
- if dist is None: # Not installed, no uninstallation required.
- return None
-
- # We're installing into global site. The current installation must
- # be uninstalled, no matter it's in global or user site, because the
- # user site installation has precedence over global.
- if not self._use_user_site:
- return dist
-
- # We're installing into user site. Remove the user site installation.
- if dist.in_usersite:
- return dist
-
- # We're installing into user site, but the installed incompatible
- # package is in global site. We can't uninstall that, and would let
- # the new user installation to "shadow" it. But shadowing won't work
- # in virtual environments, so we error out.
- if running_under_virtualenv() and dist.in_site_packages:
- message = (
- f"Will not install to the user site because it will lack "
- f"sys.path precedence to {dist.raw_name} in {dist.location}"
- )
- raise InstallationError(message)
- return None
-
- def _report_requires_python_error(
- self, causes: Sequence["ConflictCause"]
- ) -> UnsupportedPythonVersion:
- assert causes, "Requires-Python error reported with no cause"
-
- version = self._python_candidate.version
-
- if len(causes) == 1:
- specifier = str(causes[0].requirement.specifier)
- message = (
- f"Package {causes[0].parent.name!r} requires a different "
- f"Python: {version} not in {specifier!r}"
- )
- return UnsupportedPythonVersion(message)
-
- message = f"Packages require a different Python. {version} not in:"
- for cause in causes:
- package = cause.parent.format_for_error()
- specifier = str(cause.requirement.specifier)
- message += f"\n{specifier!r} (required by {package})"
- return UnsupportedPythonVersion(message)
-
- def _report_single_requirement_conflict(
- self, req: Requirement, parent: Optional[Candidate]
- ) -> DistributionNotFound:
- if parent is None:
- req_disp = str(req)
- else:
- req_disp = f"{req} (from {parent.name})"
-
- cands = self._finder.find_all_candidates(req.project_name)
- skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
-
- versions_set: Set[CandidateVersion] = set()
- yanked_versions_set: Set[CandidateVersion] = set()
- for c in cands:
- is_yanked = c.link.is_yanked if c.link else False
- if is_yanked:
- yanked_versions_set.add(c.version)
- else:
- versions_set.add(c.version)
-
- versions = [str(v) for v in sorted(versions_set)]
- yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
-
- if yanked_versions:
- # Saying "version X is yanked" isn't entirely accurate.
- # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
- logger.critical(
- "Ignored the following yanked versions: %s",
- ", ".join(yanked_versions) or "none",
- )
- if skipped_by_requires_python:
- logger.critical(
- "Ignored the following versions that require a different python "
- "version: %s",
- "; ".join(skipped_by_requires_python) or "none",
- )
- logger.critical(
- "Could not find a version that satisfies the requirement %s "
- "(from versions: %s)",
- req_disp,
- ", ".join(versions) or "none",
- )
- if str(req) == "requirements.txt":
- logger.info(
- "HINT: You are attempting to install a package literally "
- 'named "requirements.txt" (which cannot exist). Consider '
- "using the '-r' flag to install the packages listed in "
- "requirements.txt"
- )
-
- return DistributionNotFound(f"No matching distribution found for {req}")
-
- def get_installation_error(
- self,
- e: "ResolutionImpossible[Requirement, Candidate]",
- constraints: Dict[str, Constraint],
- ) -> InstallationError:
- assert e.causes, "Installation error reported with no cause"
-
- # If one of the things we can't solve is "we need Python X.Y",
- # that is what we report.
- requires_python_causes = [
- cause
- for cause in e.causes
- if isinstance(cause.requirement, RequiresPythonRequirement)
- and not cause.requirement.is_satisfied_by(self._python_candidate)
- ]
- if requires_python_causes:
- # The comprehension above makes sure all Requirement instances are
- # RequiresPythonRequirement, so let's cast for convenience.
- return self._report_requires_python_error(
- cast("Sequence[ConflictCause]", requires_python_causes),
- )
-
- # Otherwise, we have a set of causes which can't all be satisfied
- # at once.
-
- # The simplest case is when we have *one* cause that can't be
- # satisfied. We just report that case.
- if len(e.causes) == 1:
- req, parent = e.causes[0]
- if req.name not in constraints:
- return self._report_single_requirement_conflict(req, parent)
-
- # OK, we now have a list of requirements that can't all be
- # satisfied at once.
-
- # A couple of formatting helpers
- def text_join(parts: List[str]) -> str:
- if len(parts) == 1:
- return parts[0]
-
- return ", ".join(parts[:-1]) + " and " + parts[-1]
-
- def describe_trigger(parent: Candidate) -> str:
- ireq = parent.get_install_requirement()
- if not ireq or not ireq.comes_from:
- return f"{parent.name}=={parent.version}"
- if isinstance(ireq.comes_from, InstallRequirement):
- return str(ireq.comes_from.name)
- return str(ireq.comes_from)
-
- triggers = set()
- for req, parent in e.causes:
- if parent is None:
- # This is a root requirement, so we can report it directly
- trigger = req.format_for_error()
- else:
- trigger = describe_trigger(parent)
- triggers.add(trigger)
-
- if triggers:
- info = text_join(sorted(triggers))
- else:
- info = "the requested packages"
-
- msg = (
- f"Cannot install {info} because these package versions "
- "have conflicting dependencies."
- )
- logger.critical(msg)
- msg = "\nThe conflict is caused by:"
-
- relevant_constraints = set()
- for req, parent in e.causes:
- if req.name in constraints:
- relevant_constraints.add(req.name)
- msg = msg + "\n "
- if parent:
- msg = msg + f"{parent.name} {parent.version} depends on "
- else:
- msg = msg + "The user requested "
- msg = msg + req.format_for_error()
- for key in relevant_constraints:
- spec = constraints[key].specifier
- msg += f"\n The user requested (constraint) {key}{spec}"
-
- msg = (
- msg
- + "\n\n"
- + "To fix this you could try to:\n"
- + "1. loosen the range of package versions you've specified\n"
- + "2. remove package versions to allow pip attempt to solve "
- + "the dependency conflict\n"
- )
-
- logger.info(msg)
-
- return DistributionNotFound(
- "ResolutionImpossible: for help visit "
- "https://pip.pypa.io/en/latest/topics/dependency-resolution/"
- "#dealing-with-dependency-conflicts"
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
deleted file mode 100644
index 8663097..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
+++ /dev/null
@@ -1,155 +0,0 @@
-"""Utilities to lazily create and visit candidates found.
-
-Creating and visiting a candidate is a *very* costly operation. It involves
-fetching, extracting, potentially building modules from source, and verifying
-distribution metadata. It is therefore crucial for performance to keep
-everything here lazy all the way down, so we only touch candidates that we
-absolutely need, and not "download the world" when we only need one version of
-something.
-"""
-
-import functools
-from collections.abc import Sequence
-from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
-
-from pip._vendor.packaging.version import _BaseVersion
-
-from .base import Candidate
-
-IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
-
-if TYPE_CHECKING:
- SequenceCandidate = Sequence[Candidate]
-else:
- # For compatibility: Python before 3.9 does not support using [] on the
- # Sequence class.
- #
- # >>> from collections.abc import Sequence
- # >>> Sequence[str]
- # Traceback (most recent call last):
- # File "<stdin>", line 1, in <module>
- # TypeError: 'ABCMeta' object is not subscriptable
- #
- # TODO: Remove this block after dropping Python 3.8 support.
- SequenceCandidate = Sequence
-
-
-def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
- """Iterator for ``FoundCandidates``.
-
- This iterator is used when the package is not already installed. Candidates
- from index come later in their normal ordering.
- """
- versions_found: Set[_BaseVersion] = set()
- for version, func in infos:
- if version in versions_found:
- continue
- candidate = func()
- if candidate is None:
- continue
- yield candidate
- versions_found.add(version)
-
-
-def _iter_built_with_prepended(
- installed: Candidate, infos: Iterator[IndexCandidateInfo]
-) -> Iterator[Candidate]:
- """Iterator for ``FoundCandidates``.
-
- This iterator is used when the resolver prefers the already-installed
- candidate and NOT to upgrade. The installed candidate is therefore
- always yielded first, and candidates from index come later in their
- normal ordering, except skipped when the version is already installed.
- """
- yield installed
- versions_found: Set[_BaseVersion] = {installed.version}
- for version, func in infos:
- if version in versions_found:
- continue
- candidate = func()
- if candidate is None:
- continue
- yield candidate
- versions_found.add(version)
-
-
-def _iter_built_with_inserted(
- installed: Candidate, infos: Iterator[IndexCandidateInfo]
-) -> Iterator[Candidate]:
- """Iterator for ``FoundCandidates``.
-
- This iterator is used when the resolver prefers to upgrade an
- already-installed package. Candidates from index are returned in their
- normal ordering, except replaced when the version is already installed.
-
- The implementation iterates through and yields other candidates, inserting
- the installed candidate exactly once before we start yielding older or
- equivalent candidates, or after all other candidates if they are all newer.
- """
- versions_found: Set[_BaseVersion] = set()
- for version, func in infos:
- if version in versions_found:
- continue
- # If the installed candidate is better, yield it first.
- if installed.version >= version:
- yield installed
- versions_found.add(installed.version)
- candidate = func()
- if candidate is None:
- continue
- yield candidate
- versions_found.add(version)
-
- # If the installed candidate is older than all other candidates.
- if installed.version not in versions_found:
- yield installed
-
-
-class FoundCandidates(SequenceCandidate):
- """A lazy sequence to provide candidates to the resolver.
-
- The intended usage is to return this from `find_matches()` so the resolver
- can iterate through the sequence multiple times, but only access the index
- page when remote packages are actually needed. This improve performances
- when suitable candidates are already installed on disk.
- """
-
- def __init__(
- self,
- get_infos: Callable[[], Iterator[IndexCandidateInfo]],
- installed: Optional[Candidate],
- prefers_installed: bool,
- incompatible_ids: Set[int],
- ):
- self._get_infos = get_infos
- self._installed = installed
- self._prefers_installed = prefers_installed
- self._incompatible_ids = incompatible_ids
-
- def __getitem__(self, index: Any) -> Any:
- # Implemented to satisfy the ABC check. This is not needed by the
- # resolver, and should not be used by the provider either (for
- # performance reasons).
- raise NotImplementedError("don't do this")
-
- def __iter__(self) -> Iterator[Candidate]:
- infos = self._get_infos()
- if not self._installed:
- iterator = _iter_built(infos)
- elif self._prefers_installed:
- iterator = _iter_built_with_prepended(self._installed, infos)
- else:
- iterator = _iter_built_with_inserted(self._installed, infos)
- return (c for c in iterator if id(c) not in self._incompatible_ids)
-
- def __len__(self) -> int:
- # Implemented to satisfy the ABC check. This is not needed by the
- # resolver, and should not be used by the provider either (for
- # performance reasons).
- raise NotImplementedError("don't do this")
-
- @functools.lru_cache(maxsize=1)
- def __bool__(self) -> bool:
- if self._prefers_installed and self._installed:
- return True
- return any(self)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py
deleted file mode 100644
index 315fb9c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py
+++ /dev/null
@@ -1,255 +0,0 @@
-import collections
-import math
-from typing import (
- TYPE_CHECKING,
- Dict,
- Iterable,
- Iterator,
- Mapping,
- Sequence,
- TypeVar,
- Union,
-)
-
-from pip._vendor.resolvelib.providers import AbstractProvider
-
-from .base import Candidate, Constraint, Requirement
-from .candidates import REQUIRES_PYTHON_IDENTIFIER
-from .factory import Factory
-
-if TYPE_CHECKING:
- from pip._vendor.resolvelib.providers import Preference
- from pip._vendor.resolvelib.resolvers import RequirementInformation
-
- PreferenceInformation = RequirementInformation[Requirement, Candidate]
-
- _ProviderBase = AbstractProvider[Requirement, Candidate, str]
-else:
- _ProviderBase = AbstractProvider
-
-# Notes on the relationship between the provider, the factory, and the
-# candidate and requirement classes.
-#
-# The provider is a direct implementation of the resolvelib class. Its role
-# is to deliver the API that resolvelib expects.
-#
-# Rather than work with completely abstract "requirement" and "candidate"
-# concepts as resolvelib does, pip has concrete classes implementing these two
-# ideas. The API of Requirement and Candidate objects are defined in the base
-# classes, but essentially map fairly directly to the equivalent provider
-# methods. In particular, `find_matches` and `is_satisfied_by` are
-# requirement methods, and `get_dependencies` is a candidate method.
-#
-# The factory is the interface to pip's internal mechanisms. It is stateless,
-# and is created by the resolver and held as a property of the provider. It is
-# responsible for creating Requirement and Candidate objects, and provides
-# services to those objects (access to pip's finder and preparer).
-
-
-D = TypeVar("D")
-V = TypeVar("V")
-
-
-def _get_with_identifier(
- mapping: Mapping[str, V],
- identifier: str,
- default: D,
-) -> Union[D, V]:
- """Get item from a package name lookup mapping with a resolver identifier.
-
- This extra logic is needed when the target mapping is keyed by package
- name, which cannot be directly looked up with an identifier (which may
- contain requested extras). Additional logic is added to also look up a value
- by "cleaning up" the extras from the identifier.
- """
- if identifier in mapping:
- return mapping[identifier]
- # HACK: Theoretically we should check whether this identifier is a valid
- # "NAME[EXTRAS]" format, and parse out the name part with packaging or
- # some regular expression. But since pip's resolver only spits out three
- # kinds of identifiers: normalized PEP 503 names, normalized names plus
- # extras, and Requires-Python, we can cheat a bit here.
- name, open_bracket, _ = identifier.partition("[")
- if open_bracket and name in mapping:
- return mapping[name]
- return default
-
-
-class PipProvider(_ProviderBase):
- """Pip's provider implementation for resolvelib.
-
- :params constraints: A mapping of constraints specified by the user. Keys
- are canonicalized project names.
- :params ignore_dependencies: Whether the user specified ``--no-deps``.
- :params upgrade_strategy: The user-specified upgrade strategy.
- :params user_requested: A set of canonicalized package names that the user
- supplied for pip to install/upgrade.
- """
-
- def __init__(
- self,
- factory: Factory,
- constraints: Dict[str, Constraint],
- ignore_dependencies: bool,
- upgrade_strategy: str,
- user_requested: Dict[str, int],
- ) -> None:
- self._factory = factory
- self._constraints = constraints
- self._ignore_dependencies = ignore_dependencies
- self._upgrade_strategy = upgrade_strategy
- self._user_requested = user_requested
- self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
-
- def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
- return requirement_or_candidate.name
-
- def get_preference(
- self,
- identifier: str,
- resolutions: Mapping[str, Candidate],
- candidates: Mapping[str, Iterator[Candidate]],
- information: Mapping[str, Iterable["PreferenceInformation"]],
- backtrack_causes: Sequence["PreferenceInformation"],
- ) -> "Preference":
- """Produce a sort key for given requirement based on preference.
-
- The lower the return value is, the more preferred this group of
- arguments is.
-
- Currently pip considers the following in order:
-
- * Prefer if any of the known requirements is "direct", e.g. points to an
- explicit URL.
- * If equal, prefer if any requirement is "pinned", i.e. contains
- operator ``===`` or ``==``.
- * If equal, calculate an approximate "depth" and resolve requirements
- closer to the user-specified requirements first. If the depth cannot
- by determined (eg: due to no matching parents), it is considered
- infinite.
- * Order user-specified requirements by the order they are specified.
- * If equal, prefers "non-free" requirements, i.e. contains at least one
- operator, such as ``>=`` or ``<``.
- * If equal, order alphabetically for consistency (helps debuggability).
- """
- try:
- next(iter(information[identifier]))
- except StopIteration:
- # There is no information for this identifier, so there's no known
- # candidates.
- has_information = False
- else:
- has_information = True
-
- if has_information:
- lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
- candidate, ireqs = zip(*lookups)
- else:
- candidate, ireqs = None, ()
-
- operators = [
- specifier.operator
- for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
- for specifier in specifier_set
- ]
-
- direct = candidate is not None
- pinned = any(op[:2] == "==" for op in operators)
- unfree = bool(operators)
-
- try:
- requested_order: Union[int, float] = self._user_requested[identifier]
- except KeyError:
- requested_order = math.inf
- if has_information:
- parent_depths = (
- self._known_depths[parent.name] if parent is not None else 0.0
- for _, parent in information[identifier]
- )
- inferred_depth = min(d for d in parent_depths) + 1.0
- else:
- inferred_depth = math.inf
- else:
- inferred_depth = 1.0
- self._known_depths[identifier] = inferred_depth
-
- requested_order = self._user_requested.get(identifier, math.inf)
-
- # Requires-Python has only one candidate and the check is basically
- # free, so we always do it first to avoid needless work if it fails.
- requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
-
- # Prefer the causes of backtracking on the assumption that the problem
- # resolving the dependency tree is related to the failures that caused
- # the backtracking
- backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
-
- return (
- not requires_python,
- not direct,
- not pinned,
- not backtrack_cause,
- inferred_depth,
- requested_order,
- not unfree,
- identifier,
- )
-
- def find_matches(
- self,
- identifier: str,
- requirements: Mapping[str, Iterator[Requirement]],
- incompatibilities: Mapping[str, Iterator[Candidate]],
- ) -> Iterable[Candidate]:
- def _eligible_for_upgrade(identifier: str) -> bool:
- """Are upgrades allowed for this project?
-
- This checks the upgrade strategy, and whether the project was one
- that the user specified in the command line, in order to decide
- whether we should upgrade if there's a newer version available.
-
- (Note that we don't need access to the `--upgrade` flag, because
- an upgrade strategy of "to-satisfy-only" means that `--upgrade`
- was not specified).
- """
- if self._upgrade_strategy == "eager":
- return True
- elif self._upgrade_strategy == "only-if-needed":
- user_order = _get_with_identifier(
- self._user_requested,
- identifier,
- default=None,
- )
- return user_order is not None
- return False
-
- constraint = _get_with_identifier(
- self._constraints,
- identifier,
- default=Constraint.empty(),
- )
- return self._factory.find_candidates(
- identifier=identifier,
- requirements=requirements,
- constraint=constraint,
- prefers_installed=(not _eligible_for_upgrade(identifier)),
- incompatibilities=incompatibilities,
- )
-
- def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
- return requirement.is_satisfied_by(candidate)
-
- def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
- with_requires = not self._ignore_dependencies
- return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
-
- @staticmethod
- def is_backtrack_cause(
- identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
- ) -> bool:
- for backtrack_cause in backtrack_causes:
- if identifier == backtrack_cause.requirement.name:
- return True
- if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
- return True
- return False
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py
deleted file mode 100644
index 12adeff..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from collections import defaultdict
-from logging import getLogger
-from typing import Any, DefaultDict
-
-from pip._vendor.resolvelib.reporters import BaseReporter
-
-from .base import Candidate, Requirement
-
-logger = getLogger(__name__)
-
-
-class PipReporter(BaseReporter):
- def __init__(self) -> None:
- self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
-
- self._messages_at_reject_count = {
- 1: (
- "pip is looking at multiple versions of {package_name} to "
- "determine which version is compatible with other "
- "requirements. This could take a while."
- ),
- 8: (
- "pip is still looking at multiple versions of {package_name} to "
- "determine which version is compatible with other "
- "requirements. This could take a while."
- ),
- 13: (
- "This is taking longer than usual. You might need to provide "
- "the dependency resolver with stricter constraints to reduce "
- "runtime. See https://pip.pypa.io/warnings/backtracking for "
- "guidance. If you want to abort this run, press Ctrl + C."
- ),
- }
-
- def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
- self.reject_count_by_package[candidate.name] += 1
-
- count = self.reject_count_by_package[candidate.name]
- if count not in self._messages_at_reject_count:
- return
-
- message = self._messages_at_reject_count[count]
- logger.info("INFO: %s", message.format(package_name=candidate.name))
-
- msg = "Will try a different candidate, due to conflict:"
- for req_info in criterion.information:
- req, parent = req_info.requirement, req_info.parent
- # Inspired by Factory.get_installation_error
- msg += "\n "
- if parent:
- msg += f"{parent.name} {parent.version} depends on "
- else:
- msg += "The user requested "
- msg += req.format_for_error()
- logger.debug(msg)
-
-
-class PipDebuggingReporter(BaseReporter):
- """A reporter that does an info log for every event it sees."""
-
- def starting(self) -> None:
- logger.info("Reporter.starting()")
-
- def starting_round(self, index: int) -> None:
- logger.info("Reporter.starting_round(%r)", index)
-
- def ending_round(self, index: int, state: Any) -> None:
- logger.info("Reporter.ending_round(%r, state)", index)
-
- def ending(self, state: Any) -> None:
- logger.info("Reporter.ending(%r)", state)
-
- def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
- logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
-
- def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
- logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
-
- def pinning(self, candidate: Candidate) -> None:
- logger.info("Reporter.pinning(%r)", candidate)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py
deleted file mode 100644
index 4af4a9f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py
+++ /dev/null
@@ -1,166 +0,0 @@
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-
-from pip._internal.req.constructors import install_req_drop_extras
-from pip._internal.req.req_install import InstallRequirement
-
-from .base import Candidate, CandidateLookup, Requirement, format_name
-
-
-class ExplicitRequirement(Requirement):
- def __init__(self, candidate: Candidate) -> None:
- self.candidate = candidate
-
- def __str__(self) -> str:
- return str(self.candidate)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self.candidate!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- # No need to canonicalize - the candidate did this
- return self.candidate.project_name
-
- @property
- def name(self) -> str:
- # No need to canonicalize - the candidate did this
- return self.candidate.name
-
- def format_for_error(self) -> str:
- return self.candidate.format_for_error()
-
- def get_candidate_lookup(self) -> CandidateLookup:
- return self.candidate, None
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- return candidate == self.candidate
-
-
-class SpecifierRequirement(Requirement):
- def __init__(self, ireq: InstallRequirement) -> None:
- assert ireq.link is None, "This is a link, not a specifier"
- self._ireq = ireq
- self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
-
- def __str__(self) -> str:
- return str(self._ireq.req)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
- return canonicalize_name(self._ireq.req.name)
-
- @property
- def name(self) -> str:
- return format_name(self.project_name, self._extras)
-
- def format_for_error(self) -> str:
- # Convert comma-separated specifiers into "A, B, ..., F and G"
- # This makes the specifier a bit more "human readable", without
- # risking a change in meaning. (Hopefully! Not all edge cases have
- # been checked)
- parts = [s.strip() for s in str(self).split(",")]
- if len(parts) == 0:
- return ""
- elif len(parts) == 1:
- return parts[0]
-
- return ", ".join(parts[:-1]) + " and " + parts[-1]
-
- def get_candidate_lookup(self) -> CandidateLookup:
- return None, self._ireq
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- assert candidate.name == self.name, (
- f"Internal issue: Candidate is not for this requirement "
- f"{candidate.name} vs {self.name}"
- )
- # We can safely always allow prereleases here since PackageFinder
- # already implements the prerelease logic, and would have filtered out
- # prerelease candidates if the user does not expect them.
- assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
- spec = self._ireq.req.specifier
- return spec.contains(candidate.version, prereleases=True)
-
-
-class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
- """
- Requirement backed by an install requirement on a base package.
- Trims extras from its install requirement if there are any.
- """
-
- def __init__(self, ireq: InstallRequirement) -> None:
- assert ireq.link is None, "This is a link, not a specifier"
- self._ireq = install_req_drop_extras(ireq)
- self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
-
-
-class RequiresPythonRequirement(Requirement):
- """A requirement representing Requires-Python metadata."""
-
- def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
- self.specifier = specifier
- self._candidate = match
-
- def __str__(self) -> str:
- return f"Python {self.specifier}"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self.specifier)!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- return self._candidate.project_name
-
- @property
- def name(self) -> str:
- return self._candidate.name
-
- def format_for_error(self) -> str:
- return str(self)
-
- def get_candidate_lookup(self) -> CandidateLookup:
- if self.specifier.contains(self._candidate.version, prereleases=True):
- return self._candidate, None
- return None, None
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- assert candidate.name == self._candidate.name, "Not Python candidate"
- # We can safely always allow prereleases here since PackageFinder
- # already implements the prerelease logic, and would have filtered out
- # prerelease candidates if the user does not expect them.
- return self.specifier.contains(candidate.version, prereleases=True)
-
-
-class UnsatisfiableRequirement(Requirement):
- """A requirement that cannot be satisfied."""
-
- def __init__(self, name: NormalizedName) -> None:
- self._name = name
-
- def __str__(self) -> str:
- return f"{self._name} (unavailable)"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self._name)!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- return self._name
-
- @property
- def name(self) -> str:
- return self._name
-
- def format_for_error(self) -> str:
- return str(self)
-
- def get_candidate_lookup(self) -> CandidateLookup:
- return None, None
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- return False
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py
deleted file mode 100644
index c12beef..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py
+++ /dev/null
@@ -1,317 +0,0 @@
-import contextlib
-import functools
-import logging
-import os
-from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
-from pip._vendor.resolvelib import Resolver as RLResolver
-from pip._vendor.resolvelib.structs import DirectedGraph
-
-from pip._internal.cache import WheelCache
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.constructors import install_req_extend_extras
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.req.req_set import RequirementSet
-from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
-from pip._internal.resolution.resolvelib.provider import PipProvider
-from pip._internal.resolution.resolvelib.reporter import (
- PipDebuggingReporter,
- PipReporter,
-)
-from pip._internal.utils.packaging import get_requirement
-
-from .base import Candidate, Requirement
-from .factory import Factory
-
-if TYPE_CHECKING:
- from pip._vendor.resolvelib.resolvers import Result as RLResult
-
- Result = RLResult[Requirement, Candidate, str]
-
-
-logger = logging.getLogger(__name__)
-
-
-class Resolver(BaseResolver):
- _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
- def __init__(
- self,
- preparer: RequirementPreparer,
- finder: PackageFinder,
- wheel_cache: Optional[WheelCache],
- make_install_req: InstallRequirementProvider,
- use_user_site: bool,
- ignore_dependencies: bool,
- ignore_installed: bool,
- ignore_requires_python: bool,
- force_reinstall: bool,
- upgrade_strategy: str,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ):
- super().__init__()
- assert upgrade_strategy in self._allowed_strategies
-
- self.factory = Factory(
- finder=finder,
- preparer=preparer,
- make_install_req=make_install_req,
- wheel_cache=wheel_cache,
- use_user_site=use_user_site,
- force_reinstall=force_reinstall,
- ignore_installed=ignore_installed,
- ignore_requires_python=ignore_requires_python,
- py_version_info=py_version_info,
- )
- self.ignore_dependencies = ignore_dependencies
- self.upgrade_strategy = upgrade_strategy
- self._result: Optional[Result] = None
-
- def resolve(
- self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
- ) -> RequirementSet:
- collected = self.factory.collect_root_requirements(root_reqs)
- provider = PipProvider(
- factory=self.factory,
- constraints=collected.constraints,
- ignore_dependencies=self.ignore_dependencies,
- upgrade_strategy=self.upgrade_strategy,
- user_requested=collected.user_requested,
- )
- if "PIP_RESOLVER_DEBUG" in os.environ:
- reporter: BaseReporter = PipDebuggingReporter()
- else:
- reporter = PipReporter()
- resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
- provider,
- reporter,
- )
-
- try:
- limit_how_complex_resolution_can_be = 200000
- result = self._result = resolver.resolve(
- collected.requirements, max_rounds=limit_how_complex_resolution_can_be
- )
-
- except ResolutionImpossible as e:
- error = self.factory.get_installation_error(
- cast("ResolutionImpossible[Requirement, Candidate]", e),
- collected.constraints,
- )
- raise error from e
-
- req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
- # process candidates with extras last to ensure their base equivalent is
- # already in the req_set if appropriate.
- # Python's sort is stable so using a binary key function keeps relative order
- # within both subsets.
- for candidate in sorted(
- result.mapping.values(), key=lambda c: c.name != c.project_name
- ):
- ireq = candidate.get_install_requirement()
- if ireq is None:
- if candidate.name != candidate.project_name:
- # extend existing req's extras
- with contextlib.suppress(KeyError):
- req = req_set.get_requirement(candidate.project_name)
- req_set.add_named_requirement(
- install_req_extend_extras(
- req, get_requirement(candidate.name).extras
- )
- )
- continue
-
- # Check if there is already an installation under the same name,
- # and set a flag for later stages to uninstall it, if needed.
- installed_dist = self.factory.get_dist_to_uninstall(candidate)
- if installed_dist is None:
- # There is no existing installation -- nothing to uninstall.
- ireq.should_reinstall = False
- elif self.factory.force_reinstall:
- # The --force-reinstall flag is set -- reinstall.
- ireq.should_reinstall = True
- elif installed_dist.version != candidate.version:
- # The installation is different in version -- reinstall.
- ireq.should_reinstall = True
- elif candidate.is_editable or installed_dist.editable:
- # The incoming distribution is editable, or different in
- # editable-ness to installation -- reinstall.
- ireq.should_reinstall = True
- elif candidate.source_link and candidate.source_link.is_file:
- # The incoming distribution is under file://
- if candidate.source_link.is_wheel:
- # is a local wheel -- do nothing.
- logger.info(
- "%s is already installed with the same version as the "
- "provided wheel. Use --force-reinstall to force an "
- "installation of the wheel.",
- ireq.name,
- )
- continue
-
- # is a local sdist or path -- reinstall
- ireq.should_reinstall = True
- else:
- continue
-
- link = candidate.source_link
- if link and link.is_yanked:
- # The reason can contain non-ASCII characters, Unicode
- # is required for Python 2.
- msg = (
- "The candidate selected for download or install is a "
- "yanked version: {name!r} candidate (version {version} "
- "at {link})\nReason for being yanked: {reason}"
- ).format(
- name=candidate.name,
- version=candidate.version,
- link=link,
- reason=link.yanked_reason or "<none given>",
- )
- logger.warning(msg)
-
- req_set.add_named_requirement(ireq)
-
- reqs = req_set.all_requirements
- self.factory.preparer.prepare_linked_requirements_more(reqs)
- for req in reqs:
- req.prepared = True
- req.needs_more_preparation = False
- return req_set
-
- def get_installation_order(
- self, req_set: RequirementSet
- ) -> List[InstallRequirement]:
- """Get order for installation of requirements in RequirementSet.
-
- The returned list contains a requirement before another that depends on
- it. This helps ensure that the environment is kept consistent as they
- get installed one-by-one.
-
- The current implementation creates a topological ordering of the
- dependency graph, giving more weight to packages with less
- or no dependencies, while breaking any cycles in the graph at
- arbitrary points. We make no guarantees about where the cycle
- would be broken, other than it *would* be broken.
- """
- assert self._result is not None, "must call resolve() first"
-
- if not req_set.requirements:
- # Nothing is left to install, so we do not need an order.
- return []
-
- graph = self._result.graph
- weights = get_topological_weights(graph, set(req_set.requirements.keys()))
-
- sorted_items = sorted(
- req_set.requirements.items(),
- key=functools.partial(_req_set_item_sorter, weights=weights),
- reverse=True,
- )
- return [ireq for _, ireq in sorted_items]
-
-
-def get_topological_weights(
- graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
-) -> Dict[Optional[str], int]:
- """Assign weights to each node based on how "deep" they are.
-
- This implementation may change at any point in the future without prior
- notice.
-
- We first simplify the dependency graph by pruning any leaves and giving them
- the highest weight: a package without any dependencies should be installed
- first. This is done again and again in the same way, giving ever less weight
- to the newly found leaves. The loop stops when no leaves are left: all
- remaining packages have at least one dependency left in the graph.
-
- Then we continue with the remaining graph, by taking the length for the
- longest path to any node from root, ignoring any paths that contain a single
- node twice (i.e. cycles). This is done through a depth-first search through
- the graph, while keeping track of the path to the node.
-
- Cycles in the graph result would result in node being revisited while also
- being on its own path. In this case, take no action. This helps ensure we
- don't get stuck in a cycle.
-
- When assigning weight, the longer path (i.e. larger length) is preferred.
-
- We are only interested in the weights of packages that are in the
- requirement_keys.
- """
- path: Set[Optional[str]] = set()
- weights: Dict[Optional[str], int] = {}
-
- def visit(node: Optional[str]) -> None:
- if node in path:
- # We hit a cycle, so we'll break it here.
- return
-
- # Time to visit the children!
- path.add(node)
- for child in graph.iter_children(node):
- visit(child)
- path.remove(node)
-
- if node not in requirement_keys:
- return
-
- last_known_parent_count = weights.get(node, 0)
- weights[node] = max(last_known_parent_count, len(path))
-
- # Simplify the graph, pruning leaves that have no dependencies.
- # This is needed for large graphs (say over 200 packages) because the
- # `visit` function is exponentially slower then, taking minutes.
- # See https://github.com/pypa/pip/issues/10557
- # We will loop until we explicitly break the loop.
- while True:
- leaves = set()
- for key in graph:
- if key is None:
- continue
- for _child in graph.iter_children(key):
- # This means we have at least one child
- break
- else:
- # No child.
- leaves.add(key)
- if not leaves:
- # We are done simplifying.
- break
- # Calculate the weight for the leaves.
- weight = len(graph) - 1
- for leaf in leaves:
- if leaf not in requirement_keys:
- continue
- weights[leaf] = weight
- # Remove the leaves from the graph, making it simpler.
- for leaf in leaves:
- graph.remove(leaf)
-
- # Visit the remaining graph.
- # `None` is guaranteed to be the root node by resolvelib.
- visit(None)
-
- # Sanity check: all requirement keys should be in the weights,
- # and no other keys should be in the weights.
- difference = set(weights.keys()).difference(requirement_keys)
- assert not difference, difference
-
- return weights
-
-
-def _req_set_item_sorter(
- item: Tuple[str, InstallRequirement],
- weights: Dict[Optional[str], int],
-) -> Tuple[int, str]:
- """Key function used to sort install requirements for installation.
-
- Based on the "weight" mapping calculated in ``get_installation_order()``.
- The canonical package name is returned as the second member as a tie-
- breaker to ensure the result is predictable, which is useful in tests.
- """
- name = canonicalize_name(item[0])
- return weights[name], name