summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/pip/_internal
diff options
context:
space:
mode:
Diffstat (limited to 'venv/lib/python3.11/site-packages/pip/_internal')
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__init__.py18
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pycbin890 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pycbin16079 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pycbin14395 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pycbin19788 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pycbin37449 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pycbin759 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pycbin5617 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pycbin11774 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pycbin15177 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/build_env.py311
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cache.py290
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__init__.py4
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pycbin294 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pycbin10249 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pycbin11868 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pycbin33774 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pycbin2116 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pycbin2586 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pycbin5530 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pycbin16956 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pycbin3178 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pycbin20333 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pycbin8843 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pycbin382 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py172
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/base_command.py236
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py1074
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/command_context.py27
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/main.py79
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py134
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/parser.py294
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py68
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/req_command.py505
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/spinners.py159
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py6
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__init__.py132
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pycbin4462 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pycbin10882 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pycbin2417 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pycbin5632 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pycbin14860 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pycbin12205 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pycbin7951 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pycbin4667 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pycbin3364 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pycbin1976 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pycbin7730 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pycbin4452 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pycbin31153 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pycbin17256 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pycbin8958 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pycbin11301 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pycbin5152 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pycbin9408 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/cache.py225
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/check.py54
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/completion.py130
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/configuration.py280
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/debug.py201
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/download.py147
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/freeze.py108
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/hash.py59
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/help.py41
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/index.py139
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/inspect.py92
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/install.py774
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/list.py368
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/search.py174
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/show.py189
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/uninstall.py113
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/commands/wheel.py183
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/configuration.py383
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/__init__.py21
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pycbin1044 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pycbin3136 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pycbin1853 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pycbin9377 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pycbin2445 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/base.py51
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/installed.py29
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/sdist.py156
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/distributions/wheel.py40
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/exceptions.py728
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/__init__.py2
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pycbin248 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pycbin24593 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pycbin44157 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pycbin13964 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/collector.py507
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/package_finder.py1027
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/index/sources.py285
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/__init__.py467
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pycbin18186 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pycbin7560 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pycbin8890 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pycbin4011 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/_distutils.py172
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/_sysconfig.py213
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/locations/base.py81
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/main.py12
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/__init__.py128
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pycbin6510 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pycbin3573 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pycbin38687 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pycbin17544 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/_json.py84
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/base.py702
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py6
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pycbin399 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pycbin3572 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pycbin14882 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pycbin12515 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py55
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_dists.py227
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py189
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/metadata/pkg_resources.py278
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__init__.py2
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pycbin282 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pycbin2090 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pycbin12775 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pycbin4636 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pycbin1907 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pycbin2613 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pycbin28627 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pycbin1273 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pycbin5836 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pycbin2004 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pycbin5303 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pycbin6429 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/candidate.py30
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/direct_url.py235
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/format_control.py78
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/index.py28
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/installation_report.py56
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/link.py579
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/scheme.py31
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/search_scope.py132
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/selection_prefs.py51
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/target_python.py122
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/models/wheel.py92
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__init__.py2
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pycbin270 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pycbin23997 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pycbin7938 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pycbin9548 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pycbin13031 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pycbin21447 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pycbin2419 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pycbin3257 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/auth.py561
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/cache.py106
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/download.py186
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/lazy_wheel.py210
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/session.py520
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/utils.py96
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/network/xmlrpc.py62
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pycbin208 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pycbin8471 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pycbin11604 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pycbin27833 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pycbin214 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pycbin8935 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pycbin2285 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pycbin2321 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pycbin3721 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pycbin1951 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pycbin2395 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pycbin4502 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py139
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py39
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py41
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py74
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py37
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py46
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py102
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/check.py187
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/freeze.py255
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py2
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pycbin282 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pycbin2195 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pycbin40176 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/install/editable_legacy.py46
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/install/wheel.py734
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/operations/prepare.py730
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/pyproject.py179
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__init__.py92
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pycbin4396 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pycbin23404 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pycbin23116 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pycbin40263 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pycbin7964 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pycbin37329 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/constructors.py576
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/req_file.py554
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/req_install.py923
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/req_set.py119
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/req/req_uninstall.py649
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pycbin208 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pycbin1379 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py20
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pycbin215 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pycbin23672 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py598
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pycbin219 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pycbin9307 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pycbin31401 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pycbin35794 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pycbin6767 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pycbin11458 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pycbin5449 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pycbin12223 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pycbin13460 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py141
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py597
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py812
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py155
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py255
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py80
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py166
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py317
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/self_outdated_check.py248
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pycbin203 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pycbin4767 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pycbin2024 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pycbin2562 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pycbin2270 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pycbin6762 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pycbin720 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pycbin4689 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pycbin3726 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pycbin3558 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pycbin2326 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pycbin4248 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pycbin8233 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pycbin1319 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pycbin2615 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pycbin8774 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pycbin15375 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pycbin38620 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pycbin2943 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pycbin2810 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pycbin4875 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pycbin9904 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pycbin13387 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pycbin12899 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pycbin2696 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pycbin4943 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pycbin7031 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/_jaraco_text.py109
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/_log.py38
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/appdirs.py52
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/compat.py63
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/compatibility_tags.py165
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/datetime.py11
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/deprecation.py120
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/direct_url_helpers.py87
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/egg_link.py80
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/encoding.py36
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/entrypoints.py84
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/filesystem.py153
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/filetypes.py27
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/glibc.py88
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/hashes.py151
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/logging.py348
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/misc.py783
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/models.py39
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/packaging.py57
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/setuptools_build.py146
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/subprocess.py260
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py296
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/unpacking.py257
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/urls.py62
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py104
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/utils/wheel.py134
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__init__.py15
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pycbin638 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pycbin5863 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pycbin21378 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pycbin8731 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pycbin14606 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pycbin31770 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/bazaar.py112
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/git.py526
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/mercurial.py163
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/subversion.py324
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/vcs/versioncontrol.py705
-rw-r--r--venv/lib/python3.11/site-packages/pip/_internal/wheel_builder.py354
294 files changed, 0 insertions, 30670 deletions
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/__init__.py
deleted file mode 100644
index 96c6b88..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__init__.py
+++ /dev/null
@@ -1,18 +0,0 @@
-from typing import List, Optional
-
-from pip._internal.utils import _log
-
-# init_logging() must be called before any call to logging.getLogger()
-# which happens at import of most modules.
-_log.init_logging()
-
-
-def main(args: (Optional[List[str]]) = None) -> int:
- """This is preserved for old console scripts that may still be referencing
- it.
-
- For additional details, see https://github.com/pypa/pip/issues/7498.
- """
- from pip._internal.utils.entrypoints import _wrapper
-
- return _wrapper(args)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index e35d715..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc
deleted file mode 100644
index 6f98c9d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/build_env.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc
deleted file mode 100644
index a04467c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/cache.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc
deleted file mode 100644
index 205147f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/configuration.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc
deleted file mode 100644
index 9531979..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/exceptions.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc
deleted file mode 100644
index 886229c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/main.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc
deleted file mode 100644
index 434e1e0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/pyproject.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc
deleted file mode 100644
index f4a3f5c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc
deleted file mode 100644
index e466f22..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/build_env.py b/venv/lib/python3.11/site-packages/pip/_internal/build_env.py
deleted file mode 100644
index 4f704a3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/build_env.py
+++ /dev/null
@@ -1,311 +0,0 @@
-"""Build Environment used for isolation during sdist building
-"""
-
-import logging
-import os
-import pathlib
-import site
-import sys
-import textwrap
-from collections import OrderedDict
-from types import TracebackType
-from typing import TYPE_CHECKING, Iterable, List, Optional, Set, Tuple, Type, Union
-
-from pip._vendor.certifi import where
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.version import Version
-
-from pip import __file__ as pip_location
-from pip._internal.cli.spinners import open_spinner
-from pip._internal.locations import get_platlib, get_purelib, get_scheme
-from pip._internal.metadata import get_default_environment, get_environment
-from pip._internal.utils.subprocess import call_subprocess
-from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
-
-if TYPE_CHECKING:
- from pip._internal.index.package_finder import PackageFinder
-
-logger = logging.getLogger(__name__)
-
-
-def _dedup(a: str, b: str) -> Union[Tuple[str], Tuple[str, str]]:
- return (a, b) if a != b else (a,)
-
-
-class _Prefix:
- def __init__(self, path: str) -> None:
- self.path = path
- self.setup = False
- scheme = get_scheme("", prefix=path)
- self.bin_dir = scheme.scripts
- self.lib_dirs = _dedup(scheme.purelib, scheme.platlib)
-
-
-def get_runnable_pip() -> str:
- """Get a file to pass to a Python executable, to run the currently-running pip.
-
- This is used to run a pip subprocess, for installing requirements into the build
- environment.
- """
- source = pathlib.Path(pip_location).resolve().parent
-
- if not source.is_dir():
- # This would happen if someone is using pip from inside a zip file. In that
- # case, we can use that directly.
- return str(source)
-
- return os.fsdecode(source / "__pip-runner__.py")
-
-
-def _get_system_sitepackages() -> Set[str]:
- """Get system site packages
-
- Usually from site.getsitepackages,
- but fallback on `get_purelib()/get_platlib()` if unavailable
- (e.g. in a virtualenv created by virtualenv<20)
-
- Returns normalized set of strings.
- """
- if hasattr(site, "getsitepackages"):
- system_sites = site.getsitepackages()
- else:
- # virtualenv < 20 overwrites site.py without getsitepackages
- # fallback on get_purelib/get_platlib.
- # this is known to miss things, but shouldn't in the cases
- # where getsitepackages() has been removed (inside a virtualenv)
- system_sites = [get_purelib(), get_platlib()]
- return {os.path.normcase(path) for path in system_sites}
-
-
-class BuildEnvironment:
- """Creates and manages an isolated environment to install build deps"""
-
- def __init__(self) -> None:
- temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
-
- self._prefixes = OrderedDict(
- (name, _Prefix(os.path.join(temp_dir.path, name)))
- for name in ("normal", "overlay")
- )
-
- self._bin_dirs: List[str] = []
- self._lib_dirs: List[str] = []
- for prefix in reversed(list(self._prefixes.values())):
- self._bin_dirs.append(prefix.bin_dir)
- self._lib_dirs.extend(prefix.lib_dirs)
-
- # Customize site to:
- # - ensure .pth files are honored
- # - prevent access to system site packages
- system_sites = _get_system_sitepackages()
-
- self._site_dir = os.path.join(temp_dir.path, "site")
- if not os.path.exists(self._site_dir):
- os.mkdir(self._site_dir)
- with open(
- os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
- ) as fp:
- fp.write(
- textwrap.dedent(
- """
- import os, site, sys
-
- # First, drop system-sites related paths.
- original_sys_path = sys.path[:]
- known_paths = set()
- for path in {system_sites!r}:
- site.addsitedir(path, known_paths=known_paths)
- system_paths = set(
- os.path.normcase(path)
- for path in sys.path[len(original_sys_path):]
- )
- original_sys_path = [
- path for path in original_sys_path
- if os.path.normcase(path) not in system_paths
- ]
- sys.path = original_sys_path
-
- # Second, add lib directories.
- # ensuring .pth file are processed.
- for path in {lib_dirs!r}:
- assert not path in sys.path
- site.addsitedir(path)
- """
- ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
- )
-
- def __enter__(self) -> None:
- self._save_env = {
- name: os.environ.get(name, None)
- for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
- }
-
- path = self._bin_dirs[:]
- old_path = self._save_env["PATH"]
- if old_path:
- path.extend(old_path.split(os.pathsep))
-
- pythonpath = [self._site_dir]
-
- os.environ.update(
- {
- "PATH": os.pathsep.join(path),
- "PYTHONNOUSERSITE": "1",
- "PYTHONPATH": os.pathsep.join(pythonpath),
- }
- )
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> None:
- for varname, old_value in self._save_env.items():
- if old_value is None:
- os.environ.pop(varname, None)
- else:
- os.environ[varname] = old_value
-
- def check_requirements(
- self, reqs: Iterable[str]
- ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
- """Return 2 sets:
- - conflicting requirements: set of (installed, wanted) reqs tuples
- - missing requirements: set of reqs
- """
- missing = set()
- conflicting = set()
- if reqs:
- env = (
- get_environment(self._lib_dirs)
- if hasattr(self, "_lib_dirs")
- else get_default_environment()
- )
- for req_str in reqs:
- req = Requirement(req_str)
- # We're explicitly evaluating with an empty extra value, since build
- # environments are not provided any mechanism to select specific extras.
- if req.marker is not None and not req.marker.evaluate({"extra": ""}):
- continue
- dist = env.get_distribution(req.name)
- if not dist:
- missing.add(req_str)
- continue
- if isinstance(dist.version, Version):
- installed_req_str = f"{req.name}=={dist.version}"
- else:
- installed_req_str = f"{req.name}==={dist.version}"
- if not req.specifier.contains(dist.version, prereleases=True):
- conflicting.add((installed_req_str, req_str))
- # FIXME: Consider direct URL?
- return conflicting, missing
-
- def install_requirements(
- self,
- finder: "PackageFinder",
- requirements: Iterable[str],
- prefix_as_string: str,
- *,
- kind: str,
- ) -> None:
- prefix = self._prefixes[prefix_as_string]
- assert not prefix.setup
- prefix.setup = True
- if not requirements:
- return
- self._install_requirements(
- get_runnable_pip(),
- finder,
- requirements,
- prefix,
- kind=kind,
- )
-
- @staticmethod
- def _install_requirements(
- pip_runnable: str,
- finder: "PackageFinder",
- requirements: Iterable[str],
- prefix: _Prefix,
- *,
- kind: str,
- ) -> None:
- args: List[str] = [
- sys.executable,
- pip_runnable,
- "install",
- "--ignore-installed",
- "--no-user",
- "--prefix",
- prefix.path,
- "--no-warn-script-location",
- ]
- if logger.getEffectiveLevel() <= logging.DEBUG:
- args.append("-v")
- for format_control in ("no_binary", "only_binary"):
- formats = getattr(finder.format_control, format_control)
- args.extend(
- (
- "--" + format_control.replace("_", "-"),
- ",".join(sorted(formats or {":none:"})),
- )
- )
-
- index_urls = finder.index_urls
- if index_urls:
- args.extend(["-i", index_urls[0]])
- for extra_index in index_urls[1:]:
- args.extend(["--extra-index-url", extra_index])
- else:
- args.append("--no-index")
- for link in finder.find_links:
- args.extend(["--find-links", link])
-
- for host in finder.trusted_hosts:
- args.extend(["--trusted-host", host])
- if finder.allow_all_prereleases:
- args.append("--pre")
- if finder.prefer_binary:
- args.append("--prefer-binary")
- args.append("--")
- args.extend(requirements)
- extra_environ = {"_PIP_STANDALONE_CERT": where()}
- with open_spinner(f"Installing {kind}") as spinner:
- call_subprocess(
- args,
- command_desc=f"pip subprocess to install {kind}",
- spinner=spinner,
- extra_environ=extra_environ,
- )
-
-
-class NoOpBuildEnvironment(BuildEnvironment):
- """A no-op drop-in replacement for BuildEnvironment"""
-
- def __init__(self) -> None:
- pass
-
- def __enter__(self) -> None:
- pass
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> None:
- pass
-
- def cleanup(self) -> None:
- pass
-
- def install_requirements(
- self,
- finder: "PackageFinder",
- requirements: Iterable[str],
- prefix_as_string: str,
- *,
- kind: str,
- ) -> None:
- raise NotImplementedError()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cache.py b/venv/lib/python3.11/site-packages/pip/_internal/cache.py
deleted file mode 100644
index f45ac23..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cache.py
+++ /dev/null
@@ -1,290 +0,0 @@
-"""Cache Management
-"""
-
-import hashlib
-import json
-import logging
-import os
-from pathlib import Path
-from typing import Any, Dict, List, Optional
-
-from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.exceptions import InvalidWheelFilename
-from pip._internal.models.direct_url import DirectUrl
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
-from pip._internal.utils.urls import path_to_url
-
-logger = logging.getLogger(__name__)
-
-ORIGIN_JSON_NAME = "origin.json"
-
-
-def _hash_dict(d: Dict[str, str]) -> str:
- """Return a stable sha224 of a dictionary."""
- s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
- return hashlib.sha224(s.encode("ascii")).hexdigest()
-
-
-class Cache:
- """An abstract class - provides cache directories for data from links
-
- :param cache_dir: The root of the cache.
- """
-
- def __init__(self, cache_dir: str) -> None:
- super().__init__()
- assert not cache_dir or os.path.isabs(cache_dir)
- self.cache_dir = cache_dir or None
-
- def _get_cache_path_parts(self, link: Link) -> List[str]:
- """Get parts of part that must be os.path.joined with cache_dir"""
-
- # We want to generate an url to use as our cache key, we don't want to
- # just re-use the URL because it might have other items in the fragment
- # and we don't care about those.
- key_parts = {"url": link.url_without_fragment}
- if link.hash_name is not None and link.hash is not None:
- key_parts[link.hash_name] = link.hash
- if link.subdirectory_fragment:
- key_parts["subdirectory"] = link.subdirectory_fragment
-
- # Include interpreter name, major and minor version in cache key
- # to cope with ill-behaved sdists that build a different wheel
- # depending on the python version their setup.py is being run on,
- # and don't encode the difference in compatibility tags.
- # https://github.com/pypa/pip/issues/7296
- key_parts["interpreter_name"] = interpreter_name()
- key_parts["interpreter_version"] = interpreter_version()
-
- # Encode our key url with sha224, we'll use this because it has similar
- # security properties to sha256, but with a shorter total output (and
- # thus less secure). However the differences don't make a lot of
- # difference for our use case here.
- hashed = _hash_dict(key_parts)
-
- # We want to nest the directories some to prevent having a ton of top
- # level directories where we might run out of sub directories on some
- # FS.
- parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
-
- return parts
-
- def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
- can_not_cache = not self.cache_dir or not canonical_package_name or not link
- if can_not_cache:
- return []
-
- path = self.get_path_for_link(link)
- if os.path.isdir(path):
- return [(candidate, path) for candidate in os.listdir(path)]
- return []
-
- def get_path_for_link(self, link: Link) -> str:
- """Return a directory to store cached items in for link."""
- raise NotImplementedError()
-
- def get(
- self,
- link: Link,
- package_name: Optional[str],
- supported_tags: List[Tag],
- ) -> Link:
- """Returns a link to a cached item if it exists, otherwise returns the
- passed link.
- """
- raise NotImplementedError()
-
-
-class SimpleWheelCache(Cache):
- """A cache of wheels for future installs."""
-
- def __init__(self, cache_dir: str) -> None:
- super().__init__(cache_dir)
-
- def get_path_for_link(self, link: Link) -> str:
- """Return a directory to store cached wheels for link
-
- Because there are M wheels for any one sdist, we provide a directory
- to cache them in, and then consult that directory when looking up
- cache hits.
-
- We only insert things into the cache if they have plausible version
- numbers, so that we don't contaminate the cache with things that were
- not unique. E.g. ./package might have dozens of installs done for it
- and build a version of 0.0...and if we built and cached a wheel, we'd
- end up using the same wheel even if the source has been edited.
-
- :param link: The link of the sdist for which this will cache wheels.
- """
- parts = self._get_cache_path_parts(link)
- assert self.cache_dir
- # Store wheels within the root cache_dir
- return os.path.join(self.cache_dir, "wheels", *parts)
-
- def get(
- self,
- link: Link,
- package_name: Optional[str],
- supported_tags: List[Tag],
- ) -> Link:
- candidates = []
-
- if not package_name:
- return link
-
- canonical_package_name = canonicalize_name(package_name)
- for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
- try:
- wheel = Wheel(wheel_name)
- except InvalidWheelFilename:
- continue
- if canonicalize_name(wheel.name) != canonical_package_name:
- logger.debug(
- "Ignoring cached wheel %s for %s as it "
- "does not match the expected distribution name %s.",
- wheel_name,
- link,
- package_name,
- )
- continue
- if not wheel.supported(supported_tags):
- # Built for a different python/arch/etc
- continue
- candidates.append(
- (
- wheel.support_index_min(supported_tags),
- wheel_name,
- wheel_dir,
- )
- )
-
- if not candidates:
- return link
-
- _, wheel_name, wheel_dir = min(candidates)
- return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
-
-
-class EphemWheelCache(SimpleWheelCache):
- """A SimpleWheelCache that creates it's own temporary cache directory"""
-
- def __init__(self) -> None:
- self._temp_dir = TempDirectory(
- kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
- globally_managed=True,
- )
-
- super().__init__(self._temp_dir.path)
-
-
-class CacheEntry:
- def __init__(
- self,
- link: Link,
- persistent: bool,
- ):
- self.link = link
- self.persistent = persistent
- self.origin: Optional[DirectUrl] = None
- origin_direct_url_path = Path(self.link.file_path).parent / ORIGIN_JSON_NAME
- if origin_direct_url_path.exists():
- try:
- self.origin = DirectUrl.from_json(
- origin_direct_url_path.read_text(encoding="utf-8")
- )
- except Exception as e:
- logger.warning(
- "Ignoring invalid cache entry origin file %s for %s (%s)",
- origin_direct_url_path,
- link.filename,
- e,
- )
-
-
-class WheelCache(Cache):
- """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
-
- This Cache allows for gracefully degradation, using the ephem wheel cache
- when a certain link is not found in the simple wheel cache first.
- """
-
- def __init__(self, cache_dir: str) -> None:
- super().__init__(cache_dir)
- self._wheel_cache = SimpleWheelCache(cache_dir)
- self._ephem_cache = EphemWheelCache()
-
- def get_path_for_link(self, link: Link) -> str:
- return self._wheel_cache.get_path_for_link(link)
-
- def get_ephem_path_for_link(self, link: Link) -> str:
- return self._ephem_cache.get_path_for_link(link)
-
- def get(
- self,
- link: Link,
- package_name: Optional[str],
- supported_tags: List[Tag],
- ) -> Link:
- cache_entry = self.get_cache_entry(link, package_name, supported_tags)
- if cache_entry is None:
- return link
- return cache_entry.link
-
- def get_cache_entry(
- self,
- link: Link,
- package_name: Optional[str],
- supported_tags: List[Tag],
- ) -> Optional[CacheEntry]:
- """Returns a CacheEntry with a link to a cached item if it exists or
- None. The cache entry indicates if the item was found in the persistent
- or ephemeral cache.
- """
- retval = self._wheel_cache.get(
- link=link,
- package_name=package_name,
- supported_tags=supported_tags,
- )
- if retval is not link:
- return CacheEntry(retval, persistent=True)
-
- retval = self._ephem_cache.get(
- link=link,
- package_name=package_name,
- supported_tags=supported_tags,
- )
- if retval is not link:
- return CacheEntry(retval, persistent=False)
-
- return None
-
- @staticmethod
- def record_download_origin(cache_dir: str, download_info: DirectUrl) -> None:
- origin_path = Path(cache_dir) / ORIGIN_JSON_NAME
- if origin_path.exists():
- try:
- origin = DirectUrl.from_json(origin_path.read_text(encoding="utf-8"))
- except Exception as e:
- logger.warning(
- "Could not read origin file %s in cache entry (%s). "
- "Will attempt to overwrite it.",
- origin_path,
- e,
- )
- else:
- # TODO: use DirectUrl.equivalent when
- # https://github.com/pypa/pip/pull/10564 is merged.
- if origin.url != download_info.url:
- logger.warning(
- "Origin URL %s in cache entry %s does not match download URL "
- "%s. This is likely a pip bug or a cache corruption issue. "
- "Will overwrite it with the new value.",
- origin.url,
- cache_dir,
- download_info.url,
- )
- origin_path.write_text(download_info.to_json(), encoding="utf-8")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/__init__.py
deleted file mode 100644
index e589bb9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-"""Subpackage containing all of pip's command line interface related code
-"""
-
-# This file intentionally does not import submodules
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index c128c8f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc
deleted file mode 100644
index ec67e9b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc
deleted file mode 100644
index abc1003..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc
deleted file mode 100644
index 5645357..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc
deleted file mode 100644
index 3637274..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc
deleted file mode 100644
index db86b76..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc
deleted file mode 100644
index 5d6d691..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc
deleted file mode 100644
index 3f08e46..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/parser.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc
deleted file mode 100644
index 5d98c0d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc
deleted file mode 100644
index 280592a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc
deleted file mode 100644
index a722088..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc
deleted file mode 100644
index 7d94fde..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py
deleted file mode 100644
index e5950b9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/autocompletion.py
+++ /dev/null
@@ -1,172 +0,0 @@
-"""Logic that powers autocompletion installed by ``pip completion``.
-"""
-
-import optparse
-import os
-import sys
-from itertools import chain
-from typing import Any, Iterable, List, Optional
-
-from pip._internal.cli.main_parser import create_main_parser
-from pip._internal.commands import commands_dict, create_command
-from pip._internal.metadata import get_default_environment
-
-
-def autocomplete() -> None:
- """Entry Point for completion of main and subcommand options."""
- # Don't complete if user hasn't sourced bash_completion file.
- if "PIP_AUTO_COMPLETE" not in os.environ:
- return
- cwords = os.environ["COMP_WORDS"].split()[1:]
- cword = int(os.environ["COMP_CWORD"])
- try:
- current = cwords[cword - 1]
- except IndexError:
- current = ""
-
- parser = create_main_parser()
- subcommands = list(commands_dict)
- options = []
-
- # subcommand
- subcommand_name: Optional[str] = None
- for word in cwords:
- if word in subcommands:
- subcommand_name = word
- break
- # subcommand options
- if subcommand_name is not None:
- # special case: 'help' subcommand has no options
- if subcommand_name == "help":
- sys.exit(1)
- # special case: list locally installed dists for show and uninstall
- should_list_installed = not current.startswith("-") and subcommand_name in [
- "show",
- "uninstall",
- ]
- if should_list_installed:
- env = get_default_environment()
- lc = current.lower()
- installed = [
- dist.canonical_name
- for dist in env.iter_installed_distributions(local_only=True)
- if dist.canonical_name.startswith(lc)
- and dist.canonical_name not in cwords[1:]
- ]
- # if there are no dists installed, fall back to option completion
- if installed:
- for dist in installed:
- print(dist)
- sys.exit(1)
-
- should_list_installables = (
- not current.startswith("-") and subcommand_name == "install"
- )
- if should_list_installables:
- for path in auto_complete_paths(current, "path"):
- print(path)
- sys.exit(1)
-
- subcommand = create_command(subcommand_name)
-
- for opt in subcommand.parser.option_list_all:
- if opt.help != optparse.SUPPRESS_HELP:
- options += [
- (opt_str, opt.nargs) for opt_str in opt._long_opts + opt._short_opts
- ]
-
- # filter out previously specified options from available options
- prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
- options = [(x, v) for (x, v) in options if x not in prev_opts]
- # filter options by current input
- options = [(k, v) for k, v in options if k.startswith(current)]
- # get completion type given cwords and available subcommand options
- completion_type = get_path_completion_type(
- cwords,
- cword,
- subcommand.parser.option_list_all,
- )
- # get completion files and directories if ``completion_type`` is
- # ``<file>``, ``<dir>`` or ``<path>``
- if completion_type:
- paths = auto_complete_paths(current, completion_type)
- options = [(path, 0) for path in paths]
- for option in options:
- opt_label = option[0]
- # append '=' to options which require args
- if option[1] and option[0][:2] == "--":
- opt_label += "="
- print(opt_label)
- else:
- # show main parser options only when necessary
-
- opts = [i.option_list for i in parser.option_groups]
- opts.append(parser.option_list)
- flattened_opts = chain.from_iterable(opts)
- if current.startswith("-"):
- for opt in flattened_opts:
- if opt.help != optparse.SUPPRESS_HELP:
- subcommands += opt._long_opts + opt._short_opts
- else:
- # get completion type given cwords and all available options
- completion_type = get_path_completion_type(cwords, cword, flattened_opts)
- if completion_type:
- subcommands = list(auto_complete_paths(current, completion_type))
-
- print(" ".join([x for x in subcommands if x.startswith(current)]))
- sys.exit(1)
-
-
-def get_path_completion_type(
- cwords: List[str], cword: int, opts: Iterable[Any]
-) -> Optional[str]:
- """Get the type of path completion (``file``, ``dir``, ``path`` or None)
-
- :param cwords: same as the environmental variable ``COMP_WORDS``
- :param cword: same as the environmental variable ``COMP_CWORD``
- :param opts: The available options to check
- :return: path completion type (``file``, ``dir``, ``path`` or None)
- """
- if cword < 2 or not cwords[cword - 2].startswith("-"):
- return None
- for opt in opts:
- if opt.help == optparse.SUPPRESS_HELP:
- continue
- for o in str(opt).split("/"):
- if cwords[cword - 2].split("=")[0] == o:
- if not opt.metavar or any(
- x in ("path", "file", "dir") for x in opt.metavar.split("/")
- ):
- return opt.metavar
- return None
-
-
-def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
- """If ``completion_type`` is ``file`` or ``path``, list all regular files
- and directories starting with ``current``; otherwise only list directories
- starting with ``current``.
-
- :param current: The word to be completed
- :param completion_type: path completion type(``file``, ``path`` or ``dir``)
- :return: A generator of regular files and/or directories
- """
- directory, filename = os.path.split(current)
- current_path = os.path.abspath(directory)
- # Don't complete paths if they can't be accessed
- if not os.access(current_path, os.R_OK):
- return
- filename = os.path.normcase(filename)
- # list all files that start with ``filename``
- file_list = (
- x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
- )
- for f in file_list:
- opt = os.path.join(current_path, f)
- comp_file = os.path.normcase(os.path.join(directory, f))
- # complete regular files when there is not ``<dir>`` after option
- # complete directories when there is ``<file>``, ``<path>`` or
- # ``<dir>``after option
- if completion_type != "dir" and os.path.isfile(opt):
- yield comp_file
- elif os.path.isdir(opt):
- yield os.path.join(comp_file, "")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/base_command.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/base_command.py
deleted file mode 100644
index db9d5cc..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/base_command.py
+++ /dev/null
@@ -1,236 +0,0 @@
-"""Base Command class, and related routines"""
-
-import functools
-import logging
-import logging.config
-import optparse
-import os
-import sys
-import traceback
-from optparse import Values
-from typing import Any, Callable, List, Optional, Tuple
-
-from pip._vendor.rich import traceback as rich_traceback
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.command_context import CommandContextMixIn
-from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
-from pip._internal.cli.status_codes import (
- ERROR,
- PREVIOUS_BUILD_DIR_ERROR,
- UNKNOWN_ERROR,
- VIRTUALENV_NOT_FOUND,
-)
-from pip._internal.exceptions import (
- BadCommand,
- CommandError,
- DiagnosticPipError,
- InstallationError,
- NetworkConnectionError,
- PreviousBuildDirError,
- UninstallationError,
-)
-from pip._internal.utils.filesystem import check_path_owner
-from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
-from pip._internal.utils.misc import get_prog, normalize_path
-from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
-from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-__all__ = ["Command"]
-
-logger = logging.getLogger(__name__)
-
-
-class Command(CommandContextMixIn):
- usage: str = ""
- ignore_require_venv: bool = False
-
- def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
- super().__init__()
-
- self.name = name
- self.summary = summary
- self.parser = ConfigOptionParser(
- usage=self.usage,
- prog=f"{get_prog()} {name}",
- formatter=UpdatingDefaultsHelpFormatter(),
- add_help_option=False,
- name=name,
- description=self.__doc__,
- isolated=isolated,
- )
-
- self.tempdir_registry: Optional[TempDirRegistry] = None
-
- # Commands should add options to this option group
- optgroup_name = f"{self.name.capitalize()} Options"
- self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
-
- # Add the general options
- gen_opts = cmdoptions.make_option_group(
- cmdoptions.general_group,
- self.parser,
- )
- self.parser.add_option_group(gen_opts)
-
- self.add_options()
-
- def add_options(self) -> None:
- pass
-
- def handle_pip_version_check(self, options: Values) -> None:
- """
- This is a no-op so that commands by default do not do the pip version
- check.
- """
- # Make sure we do the pip version check if the index_group options
- # are present.
- assert not hasattr(options, "no_index")
-
- def run(self, options: Values, args: List[str]) -> int:
- raise NotImplementedError
-
- def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
- # factored out for testability
- return self.parser.parse_args(args)
-
- def main(self, args: List[str]) -> int:
- try:
- with self.main_context():
- return self._main(args)
- finally:
- logging.shutdown()
-
- def _main(self, args: List[str]) -> int:
- # We must initialize this before the tempdir manager, otherwise the
- # configuration would not be accessible by the time we clean up the
- # tempdir manager.
- self.tempdir_registry = self.enter_context(tempdir_registry())
- # Intentionally set as early as possible so globally-managed temporary
- # directories are available to the rest of the code.
- self.enter_context(global_tempdir_manager())
-
- options, args = self.parse_args(args)
-
- # Set verbosity so that it can be used elsewhere.
- self.verbosity = options.verbose - options.quiet
-
- level_number = setup_logging(
- verbosity=self.verbosity,
- no_color=options.no_color,
- user_log_file=options.log,
- )
-
- always_enabled_features = set(options.features_enabled) & set(
- cmdoptions.ALWAYS_ENABLED_FEATURES
- )
- if always_enabled_features:
- logger.warning(
- "The following features are always enabled: %s. ",
- ", ".join(sorted(always_enabled_features)),
- )
-
- # Make sure that the --python argument isn't specified after the
- # subcommand. We can tell, because if --python was specified,
- # we should only reach this point if we're running in the created
- # subprocess, which has the _PIP_RUNNING_IN_SUBPROCESS environment
- # variable set.
- if options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
- logger.critical(
- "The --python option must be placed before the pip subcommand name"
- )
- sys.exit(ERROR)
-
- # TODO: Try to get these passing down from the command?
- # without resorting to os.environ to hold these.
- # This also affects isolated builds and it should.
-
- if options.no_input:
- os.environ["PIP_NO_INPUT"] = "1"
-
- if options.exists_action:
- os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
-
- if options.require_venv and not self.ignore_require_venv:
- # If a venv is required check if it can really be found
- if not running_under_virtualenv():
- logger.critical("Could not find an activated virtualenv (required).")
- sys.exit(VIRTUALENV_NOT_FOUND)
-
- if options.cache_dir:
- options.cache_dir = normalize_path(options.cache_dir)
- if not check_path_owner(options.cache_dir):
- logger.warning(
- "The directory '%s' or its parent directory is not owned "
- "or is not writable by the current user. The cache "
- "has been disabled. Check the permissions and owner of "
- "that directory. If executing pip with sudo, you should "
- "use sudo's -H flag.",
- options.cache_dir,
- )
- options.cache_dir = None
-
- def intercepts_unhandled_exc(
- run_func: Callable[..., int]
- ) -> Callable[..., int]:
- @functools.wraps(run_func)
- def exc_logging_wrapper(*args: Any) -> int:
- try:
- status = run_func(*args)
- assert isinstance(status, int)
- return status
- except DiagnosticPipError as exc:
- logger.error("%s", exc, extra={"rich": True})
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except PreviousBuildDirError as exc:
- logger.critical(str(exc))
- logger.debug("Exception information:", exc_info=True)
-
- return PREVIOUS_BUILD_DIR_ERROR
- except (
- InstallationError,
- UninstallationError,
- BadCommand,
- NetworkConnectionError,
- ) as exc:
- logger.critical(str(exc))
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except CommandError as exc:
- logger.critical("%s", exc)
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except BrokenStdoutLoggingError:
- # Bypass our logger and write any remaining messages to
- # stderr because stdout no longer works.
- print("ERROR: Pipe to stdout was broken", file=sys.stderr)
- if level_number <= logging.DEBUG:
- traceback.print_exc(file=sys.stderr)
-
- return ERROR
- except KeyboardInterrupt:
- logger.critical("Operation cancelled by user")
- logger.debug("Exception information:", exc_info=True)
-
- return ERROR
- except BaseException:
- logger.critical("Exception:", exc_info=True)
-
- return UNKNOWN_ERROR
-
- return exc_logging_wrapper
-
- try:
- if not options.debug_mode:
- run = intercepts_unhandled_exc(self.run)
- else:
- run = self.run
- rich_traceback.install(show_locals=True)
- return run(options, args)
- finally:
- self.handle_pip_version_check(options)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py
deleted file mode 100644
index d05e502..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/cmdoptions.py
+++ /dev/null
@@ -1,1074 +0,0 @@
-"""
-shared options and groups
-
-The principle here is to define options once, but *not* instantiate them
-globally. One reason being that options with action='append' can carry state
-between parses. pip parses general options twice internally, and shouldn't
-pass on state. To be consistent, all options will follow this design.
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-import importlib.util
-import logging
-import os
-import textwrap
-from functools import partial
-from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
-from textwrap import dedent
-from typing import Any, Callable, Dict, Optional, Tuple
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli.parser import ConfigOptionParser
-from pip._internal.exceptions import CommandError
-from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
-from pip._internal.models.format_control import FormatControl
-from pip._internal.models.index import PyPI
-from pip._internal.models.target_python import TargetPython
-from pip._internal.utils.hashes import STRONG_HASHES
-from pip._internal.utils.misc import strtobool
-
-logger = logging.getLogger(__name__)
-
-
-def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
- """
- Raise an option parsing error using parser.error().
-
- Args:
- parser: an OptionParser instance.
- option: an Option instance.
- msg: the error text.
- """
- msg = f"{option} error: {msg}"
- msg = textwrap.fill(" ".join(msg.split()))
- parser.error(msg)
-
-
-def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
- """
- Return an OptionGroup object
- group -- assumed to be dict with 'name' and 'options' keys
- parser -- an optparse Parser
- """
- option_group = OptionGroup(parser, group["name"])
- for option in group["options"]:
- option_group.add_option(option())
- return option_group
-
-
-def check_dist_restriction(options: Values, check_target: bool = False) -> None:
- """Function for determining if custom platform options are allowed.
-
- :param options: The OptionParser options.
- :param check_target: Whether or not to check if --target is being used.
- """
- dist_restriction_set = any(
- [
- options.python_version,
- options.platforms,
- options.abis,
- options.implementation,
- ]
- )
-
- binary_only = FormatControl(set(), {":all:"})
- sdist_dependencies_allowed = (
- options.format_control != binary_only and not options.ignore_dependencies
- )
-
- # Installations or downloads using dist restrictions must not combine
- # source distributions and dist-specific wheels, as they are not
- # guaranteed to be locally compatible.
- if dist_restriction_set and sdist_dependencies_allowed:
- raise CommandError(
- "When restricting platform and interpreter constraints using "
- "--python-version, --platform, --abi, or --implementation, "
- "either --no-deps must be set, or --only-binary=:all: must be "
- "set and --no-binary must not be set (or must be set to "
- ":none:)."
- )
-
- if check_target:
- if not options.dry_run and dist_restriction_set and not options.target_dir:
- raise CommandError(
- "Can not use any platform or abi specific options unless "
- "installing via '--target' or using '--dry-run'"
- )
-
-
-def _path_option_check(option: Option, opt: str, value: str) -> str:
- return os.path.expanduser(value)
-
-
-def _package_name_option_check(option: Option, opt: str, value: str) -> str:
- return canonicalize_name(value)
-
-
-class PipOption(Option):
- TYPES = Option.TYPES + ("path", "package_name")
- TYPE_CHECKER = Option.TYPE_CHECKER.copy()
- TYPE_CHECKER["package_name"] = _package_name_option_check
- TYPE_CHECKER["path"] = _path_option_check
-
-
-###########
-# options #
-###########
-
-help_: Callable[..., Option] = partial(
- Option,
- "-h",
- "--help",
- dest="help",
- action="help",
- help="Show help.",
-)
-
-debug_mode: Callable[..., Option] = partial(
- Option,
- "--debug",
- dest="debug_mode",
- action="store_true",
- default=False,
- help=(
- "Let unhandled exceptions propagate outside the main subroutine, "
- "instead of logging them to stderr."
- ),
-)
-
-isolated_mode: Callable[..., Option] = partial(
- Option,
- "--isolated",
- dest="isolated_mode",
- action="store_true",
- default=False,
- help=(
- "Run pip in an isolated mode, ignoring environment variables and user "
- "configuration."
- ),
-)
-
-require_virtualenv: Callable[..., Option] = partial(
- Option,
- "--require-virtualenv",
- "--require-venv",
- dest="require_venv",
- action="store_true",
- default=False,
- help=(
- "Allow pip to only run in a virtual environment; "
- "exit with an error otherwise."
- ),
-)
-
-override_externally_managed: Callable[..., Option] = partial(
- Option,
- "--break-system-packages",
- dest="override_externally_managed",
- action="store_true",
- help="Allow pip to modify an EXTERNALLY-MANAGED Python installation",
-)
-
-python: Callable[..., Option] = partial(
- Option,
- "--python",
- dest="python",
- help="Run pip with the specified Python interpreter.",
-)
-
-verbose: Callable[..., Option] = partial(
- Option,
- "-v",
- "--verbose",
- dest="verbose",
- action="count",
- default=0,
- help="Give more output. Option is additive, and can be used up to 3 times.",
-)
-
-no_color: Callable[..., Option] = partial(
- Option,
- "--no-color",
- dest="no_color",
- action="store_true",
- default=False,
- help="Suppress colored output.",
-)
-
-version: Callable[..., Option] = partial(
- Option,
- "-V",
- "--version",
- dest="version",
- action="store_true",
- help="Show version and exit.",
-)
-
-quiet: Callable[..., Option] = partial(
- Option,
- "-q",
- "--quiet",
- dest="quiet",
- action="count",
- default=0,
- help=(
- "Give less output. Option is additive, and can be used up to 3"
- " times (corresponding to WARNING, ERROR, and CRITICAL logging"
- " levels)."
- ),
-)
-
-progress_bar: Callable[..., Option] = partial(
- Option,
- "--progress-bar",
- dest="progress_bar",
- type="choice",
- choices=["on", "off"],
- default="on",
- help="Specify whether the progress bar should be used [on, off] (default: on)",
-)
-
-log: Callable[..., Option] = partial(
- PipOption,
- "--log",
- "--log-file",
- "--local-log",
- dest="log",
- metavar="path",
- type="path",
- help="Path to a verbose appending log.",
-)
-
-no_input: Callable[..., Option] = partial(
- Option,
- # Don't ask for input
- "--no-input",
- dest="no_input",
- action="store_true",
- default=False,
- help="Disable prompting for input.",
-)
-
-keyring_provider: Callable[..., Option] = partial(
- Option,
- "--keyring-provider",
- dest="keyring_provider",
- choices=["auto", "disabled", "import", "subprocess"],
- default="auto",
- help=(
- "Enable the credential lookup via the keyring library if user input is allowed."
- " Specify which mechanism to use [disabled, import, subprocess]."
- " (default: disabled)"
- ),
-)
-
-proxy: Callable[..., Option] = partial(
- Option,
- "--proxy",
- dest="proxy",
- type="str",
- default="",
- help="Specify a proxy in the form scheme://[user:passwd@]proxy.server:port.",
-)
-
-retries: Callable[..., Option] = partial(
- Option,
- "--retries",
- dest="retries",
- type="int",
- default=5,
- help="Maximum number of retries each connection should attempt "
- "(default %default times).",
-)
-
-timeout: Callable[..., Option] = partial(
- Option,
- "--timeout",
- "--default-timeout",
- metavar="sec",
- dest="timeout",
- type="float",
- default=15,
- help="Set the socket timeout (default %default seconds).",
-)
-
-
-def exists_action() -> Option:
- return Option(
- # Option when path already exist
- "--exists-action",
- dest="exists_action",
- type="choice",
- choices=["s", "i", "w", "b", "a"],
- default=[],
- action="append",
- metavar="action",
- help="Default action when a path already exists: "
- "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
- )
-
-
-cert: Callable[..., Option] = partial(
- PipOption,
- "--cert",
- dest="cert",
- type="path",
- metavar="path",
- help=(
- "Path to PEM-encoded CA certificate bundle. "
- "If provided, overrides the default. "
- "See 'SSL Certificate Verification' in pip documentation "
- "for more information."
- ),
-)
-
-client_cert: Callable[..., Option] = partial(
- PipOption,
- "--client-cert",
- dest="client_cert",
- type="path",
- default=None,
- metavar="path",
- help="Path to SSL client certificate, a single file containing the "
- "private key and the certificate in PEM format.",
-)
-
-index_url: Callable[..., Option] = partial(
- Option,
- "-i",
- "--index-url",
- "--pypi-url",
- dest="index_url",
- metavar="URL",
- default=PyPI.simple_url,
- help="Base URL of the Python Package Index (default %default). "
- "This should point to a repository compliant with PEP 503 "
- "(the simple repository API) or a local directory laid out "
- "in the same format.",
-)
-
-
-def extra_index_url() -> Option:
- return Option(
- "--extra-index-url",
- dest="extra_index_urls",
- metavar="URL",
- action="append",
- default=[],
- help="Extra URLs of package indexes to use in addition to "
- "--index-url. Should follow the same rules as "
- "--index-url.",
- )
-
-
-no_index: Callable[..., Option] = partial(
- Option,
- "--no-index",
- dest="no_index",
- action="store_true",
- default=False,
- help="Ignore package index (only looking at --find-links URLs instead).",
-)
-
-
-def find_links() -> Option:
- return Option(
- "-f",
- "--find-links",
- dest="find_links",
- action="append",
- default=[],
- metavar="url",
- help="If a URL or path to an html file, then parse for links to "
- "archives such as sdist (.tar.gz) or wheel (.whl) files. "
- "If a local path or file:// URL that's a directory, "
- "then look for archives in the directory listing. "
- "Links to VCS project URLs are not supported.",
- )
-
-
-def trusted_host() -> Option:
- return Option(
- "--trusted-host",
- dest="trusted_hosts",
- action="append",
- metavar="HOSTNAME",
- default=[],
- help="Mark this host or host:port pair as trusted, even though it "
- "does not have valid or any HTTPS.",
- )
-
-
-def constraints() -> Option:
- return Option(
- "-c",
- "--constraint",
- dest="constraints",
- action="append",
- default=[],
- metavar="file",
- help="Constrain versions using the given constraints file. "
- "This option can be used multiple times.",
- )
-
-
-def requirements() -> Option:
- return Option(
- "-r",
- "--requirement",
- dest="requirements",
- action="append",
- default=[],
- metavar="file",
- help="Install from the given requirements file. "
- "This option can be used multiple times.",
- )
-
-
-def editable() -> Option:
- return Option(
- "-e",
- "--editable",
- dest="editables",
- action="append",
- default=[],
- metavar="path/url",
- help=(
- "Install a project in editable mode (i.e. setuptools "
- '"develop mode") from a local project path or a VCS url.'
- ),
- )
-
-
-def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
- value = os.path.abspath(value)
- setattr(parser.values, option.dest, value)
-
-
-src: Callable[..., Option] = partial(
- PipOption,
- "--src",
- "--source",
- "--source-dir",
- "--source-directory",
- dest="src_dir",
- type="path",
- metavar="dir",
- default=get_src_prefix(),
- action="callback",
- callback=_handle_src,
- help="Directory to check out editable projects into. "
- 'The default in a virtualenv is "<venv path>/src". '
- 'The default for global installs is "<current dir>/src".',
-)
-
-
-def _get_format_control(values: Values, option: Option) -> Any:
- """Get a format_control object."""
- return getattr(values, option.dest)
-
-
-def _handle_no_binary(
- option: Option, opt_str: str, value: str, parser: OptionParser
-) -> None:
- existing = _get_format_control(parser.values, option)
- FormatControl.handle_mutual_excludes(
- value,
- existing.no_binary,
- existing.only_binary,
- )
-
-
-def _handle_only_binary(
- option: Option, opt_str: str, value: str, parser: OptionParser
-) -> None:
- existing = _get_format_control(parser.values, option)
- FormatControl.handle_mutual_excludes(
- value,
- existing.only_binary,
- existing.no_binary,
- )
-
-
-def no_binary() -> Option:
- format_control = FormatControl(set(), set())
- return Option(
- "--no-binary",
- dest="format_control",
- action="callback",
- callback=_handle_no_binary,
- type="str",
- default=format_control,
- help="Do not use binary packages. Can be supplied multiple times, and "
- 'each time adds to the existing value. Accepts either ":all:" to '
- 'disable all binary packages, ":none:" to empty the set (notice '
- "the colons), or one or more package names with commas between "
- "them (no colons). Note that some packages are tricky to compile "
- "and may fail to install when this option is used on them.",
- )
-
-
-def only_binary() -> Option:
- format_control = FormatControl(set(), set())
- return Option(
- "--only-binary",
- dest="format_control",
- action="callback",
- callback=_handle_only_binary,
- type="str",
- default=format_control,
- help="Do not use source packages. Can be supplied multiple times, and "
- 'each time adds to the existing value. Accepts either ":all:" to '
- 'disable all source packages, ":none:" to empty the set, or one '
- "or more package names with commas between them. Packages "
- "without binary distributions will fail to install when this "
- "option is used on them.",
- )
-
-
-platforms: Callable[..., Option] = partial(
- Option,
- "--platform",
- dest="platforms",
- metavar="platform",
- action="append",
- default=None,
- help=(
- "Only use wheels compatible with <platform>. Defaults to the "
- "platform of the running system. Use this option multiple times to "
- "specify multiple platforms supported by the target interpreter."
- ),
-)
-
-
-# This was made a separate function for unit-testing purposes.
-def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
- """
- Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
-
- :return: A 2-tuple (version_info, error_msg), where `error_msg` is
- non-None if and only if there was a parsing error.
- """
- if not value:
- # The empty string is the same as not providing a value.
- return (None, None)
-
- parts = value.split(".")
- if len(parts) > 3:
- return ((), "at most three version parts are allowed")
-
- if len(parts) == 1:
- # Then we are in the case of "3" or "37".
- value = parts[0]
- if len(value) > 1:
- parts = [value[0], value[1:]]
-
- try:
- version_info = tuple(int(part) for part in parts)
- except ValueError:
- return ((), "each version part must be an integer")
-
- return (version_info, None)
-
-
-def _handle_python_version(
- option: Option, opt_str: str, value: str, parser: OptionParser
-) -> None:
- """
- Handle a provided --python-version value.
- """
- version_info, error_msg = _convert_python_version(value)
- if error_msg is not None:
- msg = f"invalid --python-version value: {value!r}: {error_msg}"
- raise_option_error(parser, option=option, msg=msg)
-
- parser.values.python_version = version_info
-
-
-python_version: Callable[..., Option] = partial(
- Option,
- "--python-version",
- dest="python_version",
- metavar="python_version",
- action="callback",
- callback=_handle_python_version,
- type="str",
- default=None,
- help=dedent(
- """\
- The Python interpreter version to use for wheel and "Requires-Python"
- compatibility checks. Defaults to a version derived from the running
- interpreter. The version can be specified using up to three dot-separated
- integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
- version can also be given as a string without dots (e.g. "37" for 3.7.0).
- """
- ),
-)
-
-
-implementation: Callable[..., Option] = partial(
- Option,
- "--implementation",
- dest="implementation",
- metavar="implementation",
- default=None,
- help=(
- "Only use wheels compatible with Python "
- "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
- " or 'ip'. If not specified, then the current "
- "interpreter implementation is used. Use 'py' to force "
- "implementation-agnostic wheels."
- ),
-)
-
-
-abis: Callable[..., Option] = partial(
- Option,
- "--abi",
- dest="abis",
- metavar="abi",
- action="append",
- default=None,
- help=(
- "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
- "If not specified, then the current interpreter abi tag is used. "
- "Use this option multiple times to specify multiple abis supported "
- "by the target interpreter. Generally you will need to specify "
- "--implementation, --platform, and --python-version when using this "
- "option."
- ),
-)
-
-
-def add_target_python_options(cmd_opts: OptionGroup) -> None:
- cmd_opts.add_option(platforms())
- cmd_opts.add_option(python_version())
- cmd_opts.add_option(implementation())
- cmd_opts.add_option(abis())
-
-
-def make_target_python(options: Values) -> TargetPython:
- target_python = TargetPython(
- platforms=options.platforms,
- py_version_info=options.python_version,
- abis=options.abis,
- implementation=options.implementation,
- )
-
- return target_python
-
-
-def prefer_binary() -> Option:
- return Option(
- "--prefer-binary",
- dest="prefer_binary",
- action="store_true",
- default=False,
- help=(
- "Prefer binary packages over source packages, even if the "
- "source packages are newer."
- ),
- )
-
-
-cache_dir: Callable[..., Option] = partial(
- PipOption,
- "--cache-dir",
- dest="cache_dir",
- default=USER_CACHE_DIR,
- metavar="dir",
- type="path",
- help="Store the cache data in <dir>.",
-)
-
-
-def _handle_no_cache_dir(
- option: Option, opt: str, value: str, parser: OptionParser
-) -> None:
- """
- Process a value provided for the --no-cache-dir option.
-
- This is an optparse.Option callback for the --no-cache-dir option.
- """
- # The value argument will be None if --no-cache-dir is passed via the
- # command-line, since the option doesn't accept arguments. However,
- # the value can be non-None if the option is triggered e.g. by an
- # environment variable, like PIP_NO_CACHE_DIR=true.
- if value is not None:
- # Then parse the string value to get argument error-checking.
- try:
- strtobool(value)
- except ValueError as exc:
- raise_option_error(parser, option=option, msg=str(exc))
-
- # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
- # converted to 0 (like "false" or "no") caused cache_dir to be disabled
- # rather than enabled (logic would say the latter). Thus, we disable
- # the cache directory not just on values that parse to True, but (for
- # backwards compatibility reasons) also on values that parse to False.
- # In other words, always set it to False if the option is provided in
- # some (valid) form.
- parser.values.cache_dir = False
-
-
-no_cache: Callable[..., Option] = partial(
- Option,
- "--no-cache-dir",
- dest="cache_dir",
- action="callback",
- callback=_handle_no_cache_dir,
- help="Disable the cache.",
-)
-
-no_deps: Callable[..., Option] = partial(
- Option,
- "--no-deps",
- "--no-dependencies",
- dest="ignore_dependencies",
- action="store_true",
- default=False,
- help="Don't install package dependencies.",
-)
-
-ignore_requires_python: Callable[..., Option] = partial(
- Option,
- "--ignore-requires-python",
- dest="ignore_requires_python",
- action="store_true",
- help="Ignore the Requires-Python information.",
-)
-
-no_build_isolation: Callable[..., Option] = partial(
- Option,
- "--no-build-isolation",
- dest="build_isolation",
- action="store_false",
- default=True,
- help="Disable isolation when building a modern source distribution. "
- "Build dependencies specified by PEP 518 must be already installed "
- "if this option is used.",
-)
-
-check_build_deps: Callable[..., Option] = partial(
- Option,
- "--check-build-dependencies",
- dest="check_build_deps",
- action="store_true",
- default=False,
- help="Check the build dependencies when PEP517 is used.",
-)
-
-
-def _handle_no_use_pep517(
- option: Option, opt: str, value: str, parser: OptionParser
-) -> None:
- """
- Process a value provided for the --no-use-pep517 option.
-
- This is an optparse.Option callback for the no_use_pep517 option.
- """
- # Since --no-use-pep517 doesn't accept arguments, the value argument
- # will be None if --no-use-pep517 is passed via the command-line.
- # However, the value can be non-None if the option is triggered e.g.
- # by an environment variable, for example "PIP_NO_USE_PEP517=true".
- if value is not None:
- msg = """A value was passed for --no-use-pep517,
- probably using either the PIP_NO_USE_PEP517 environment variable
- or the "no-use-pep517" config file option. Use an appropriate value
- of the PIP_USE_PEP517 environment variable or the "use-pep517"
- config file option instead.
- """
- raise_option_error(parser, option=option, msg=msg)
-
- # If user doesn't wish to use pep517, we check if setuptools and wheel are installed
- # and raise error if it is not.
- packages = ("setuptools", "wheel")
- if not all(importlib.util.find_spec(package) for package in packages):
- msg = (
- f"It is not possible to use --no-use-pep517 "
- f"without {' and '.join(packages)} installed."
- )
- raise_option_error(parser, option=option, msg=msg)
-
- # Otherwise, --no-use-pep517 was passed via the command-line.
- parser.values.use_pep517 = False
-
-
-use_pep517: Any = partial(
- Option,
- "--use-pep517",
- dest="use_pep517",
- action="store_true",
- default=None,
- help="Use PEP 517 for building source distributions "
- "(use --no-use-pep517 to force legacy behaviour).",
-)
-
-no_use_pep517: Any = partial(
- Option,
- "--no-use-pep517",
- dest="use_pep517",
- action="callback",
- callback=_handle_no_use_pep517,
- default=None,
- help=SUPPRESS_HELP,
-)
-
-
-def _handle_config_settings(
- option: Option, opt_str: str, value: str, parser: OptionParser
-) -> None:
- key, sep, val = value.partition("=")
- if sep != "=":
- parser.error(f"Arguments to {opt_str} must be of the form KEY=VAL")
- dest = getattr(parser.values, option.dest)
- if dest is None:
- dest = {}
- setattr(parser.values, option.dest, dest)
- if key in dest:
- if isinstance(dest[key], list):
- dest[key].append(val)
- else:
- dest[key] = [dest[key], val]
- else:
- dest[key] = val
-
-
-config_settings: Callable[..., Option] = partial(
- Option,
- "-C",
- "--config-settings",
- dest="config_settings",
- type=str,
- action="callback",
- callback=_handle_config_settings,
- metavar="settings",
- help="Configuration settings to be passed to the PEP 517 build backend. "
- "Settings take the form KEY=VALUE. Use multiple --config-settings options "
- "to pass multiple keys to the backend.",
-)
-
-build_options: Callable[..., Option] = partial(
- Option,
- "--build-option",
- dest="build_options",
- metavar="options",
- action="append",
- help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
-)
-
-global_options: Callable[..., Option] = partial(
- Option,
- "--global-option",
- dest="global_options",
- action="append",
- metavar="options",
- help="Extra global options to be supplied to the setup.py "
- "call before the install or bdist_wheel command.",
-)
-
-no_clean: Callable[..., Option] = partial(
- Option,
- "--no-clean",
- action="store_true",
- default=False,
- help="Don't clean up build directories.",
-)
-
-pre: Callable[..., Option] = partial(
- Option,
- "--pre",
- action="store_true",
- default=False,
- help="Include pre-release and development versions. By default, "
- "pip only finds stable versions.",
-)
-
-disable_pip_version_check: Callable[..., Option] = partial(
- Option,
- "--disable-pip-version-check",
- dest="disable_pip_version_check",
- action="store_true",
- default=False,
- help="Don't periodically check PyPI to determine whether a new version "
- "of pip is available for download. Implied with --no-index.",
-)
-
-root_user_action: Callable[..., Option] = partial(
- Option,
- "--root-user-action",
- dest="root_user_action",
- default="warn",
- choices=["warn", "ignore"],
- help="Action if pip is run as a root user. By default, a warning message is shown.",
-)
-
-
-def _handle_merge_hash(
- option: Option, opt_str: str, value: str, parser: OptionParser
-) -> None:
- """Given a value spelled "algo:digest", append the digest to a list
- pointed to in a dict by the algo name."""
- if not parser.values.hashes:
- parser.values.hashes = {}
- try:
- algo, digest = value.split(":", 1)
- except ValueError:
- parser.error(
- f"Arguments to {opt_str} must be a hash name "
- "followed by a value, like --hash=sha256:"
- "abcde..."
- )
- if algo not in STRONG_HASHES:
- parser.error(
- "Allowed hash algorithms for {} are {}.".format(
- opt_str, ", ".join(STRONG_HASHES)
- )
- )
- parser.values.hashes.setdefault(algo, []).append(digest)
-
-
-hash: Callable[..., Option] = partial(
- Option,
- "--hash",
- # Hash values eventually end up in InstallRequirement.hashes due to
- # __dict__ copying in process_line().
- dest="hashes",
- action="callback",
- callback=_handle_merge_hash,
- type="string",
- help="Verify that the package's archive matches this "
- "hash before installing. Example: --hash=sha256:abcdef...",
-)
-
-
-require_hashes: Callable[..., Option] = partial(
- Option,
- "--require-hashes",
- dest="require_hashes",
- action="store_true",
- default=False,
- help="Require a hash to check each requirement against, for "
- "repeatable installs. This option is implied when any package in a "
- "requirements file has a --hash option.",
-)
-
-
-list_path: Callable[..., Option] = partial(
- PipOption,
- "--path",
- dest="path",
- type="path",
- action="append",
- help="Restrict to the specified installation path for listing "
- "packages (can be used multiple times).",
-)
-
-
-def check_list_path_option(options: Values) -> None:
- if options.path and (options.user or options.local):
- raise CommandError("Cannot combine '--path' with '--user' or '--local'")
-
-
-list_exclude: Callable[..., Option] = partial(
- PipOption,
- "--exclude",
- dest="excludes",
- action="append",
- metavar="package",
- type="package_name",
- help="Exclude specified package from the output",
-)
-
-
-no_python_version_warning: Callable[..., Option] = partial(
- Option,
- "--no-python-version-warning",
- dest="no_python_version_warning",
- action="store_true",
- default=False,
- help="Silence deprecation warnings for upcoming unsupported Pythons.",
-)
-
-
-# Features that are now always on. A warning is printed if they are used.
-ALWAYS_ENABLED_FEATURES = [
- "no-binary-enable-wheel-cache", # always on since 23.1
-]
-
-use_new_feature: Callable[..., Option] = partial(
- Option,
- "--use-feature",
- dest="features_enabled",
- metavar="feature",
- action="append",
- default=[],
- choices=[
- "fast-deps",
- "truststore",
- ]
- + ALWAYS_ENABLED_FEATURES,
- help="Enable new functionality, that may be backward incompatible.",
-)
-
-use_deprecated_feature: Callable[..., Option] = partial(
- Option,
- "--use-deprecated",
- dest="deprecated_features_enabled",
- metavar="feature",
- action="append",
- default=[],
- choices=[
- "legacy-resolver",
- ],
- help=("Enable deprecated functionality, that will be removed in the future."),
-)
-
-
-##########
-# groups #
-##########
-
-general_group: Dict[str, Any] = {
- "name": "General Options",
- "options": [
- help_,
- debug_mode,
- isolated_mode,
- require_virtualenv,
- python,
- verbose,
- version,
- quiet,
- log,
- no_input,
- keyring_provider,
- proxy,
- retries,
- timeout,
- exists_action,
- trusted_host,
- cert,
- client_cert,
- cache_dir,
- no_cache,
- disable_pip_version_check,
- no_color,
- no_python_version_warning,
- use_new_feature,
- use_deprecated_feature,
- ],
-}
-
-index_group: Dict[str, Any] = {
- "name": "Package Index Options",
- "options": [
- index_url,
- extra_index_url,
- no_index,
- find_links,
- ],
-}
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/command_context.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/command_context.py
deleted file mode 100644
index 139995a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/command_context.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from contextlib import ExitStack, contextmanager
-from typing import ContextManager, Generator, TypeVar
-
-_T = TypeVar("_T", covariant=True)
-
-
-class CommandContextMixIn:
- def __init__(self) -> None:
- super().__init__()
- self._in_main_context = False
- self._main_context = ExitStack()
-
- @contextmanager
- def main_context(self) -> Generator[None, None, None]:
- assert not self._in_main_context
-
- self._in_main_context = True
- try:
- with self._main_context:
- yield
- finally:
- self._in_main_context = False
-
- def enter_context(self, context_provider: ContextManager[_T]) -> _T:
- assert self._in_main_context
-
- return self._main_context.enter_context(context_provider)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/main.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/main.py
deleted file mode 100644
index 7e061f5..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/main.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""Primary application entrypoint.
-"""
-import locale
-import logging
-import os
-import sys
-import warnings
-from typing import List, Optional
-
-from pip._internal.cli.autocompletion import autocomplete
-from pip._internal.cli.main_parser import parse_command
-from pip._internal.commands import create_command
-from pip._internal.exceptions import PipError
-from pip._internal.utils import deprecation
-
-logger = logging.getLogger(__name__)
-
-
-# Do not import and use main() directly! Using it directly is actively
-# discouraged by pip's maintainers. The name, location and behavior of
-# this function is subject to change, so calling it directly is not
-# portable across different pip versions.
-
-# In addition, running pip in-process is unsupported and unsafe. This is
-# elaborated in detail at
-# https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
-# That document also provides suggestions that should work for nearly
-# all users that are considering importing and using main() directly.
-
-# However, we know that certain users will still want to invoke pip
-# in-process. If you understand and accept the implications of using pip
-# in an unsupported manner, the best approach is to use runpy to avoid
-# depending on the exact location of this entry point.
-
-# The following example shows how to use runpy to invoke pip in that
-# case:
-#
-# sys.argv = ["pip", your, args, here]
-# runpy.run_module("pip", run_name="__main__")
-#
-# Note that this will exit the process after running, unlike a direct
-# call to main. As it is not safe to do any processing after calling
-# main, this should not be an issue in practice.
-
-
-def main(args: Optional[List[str]] = None) -> int:
- if args is None:
- args = sys.argv[1:]
-
- # Suppress the pkg_resources deprecation warning
- # Note - we use a module of .*pkg_resources to cover
- # the normal case (pip._vendor.pkg_resources) and the
- # devendored case (a bare pkg_resources)
- warnings.filterwarnings(
- action="ignore", category=DeprecationWarning, module=".*pkg_resources"
- )
-
- # Configure our deprecation warnings to be sent through loggers
- deprecation.install_warning_logger()
-
- autocomplete()
-
- try:
- cmd_name, cmd_args = parse_command(args)
- except PipError as exc:
- sys.stderr.write(f"ERROR: {exc}")
- sys.stderr.write(os.linesep)
- sys.exit(1)
-
- # Needed for locale.getpreferredencoding(False) to work
- # in pip._internal.utils.encoding.auto_decode
- try:
- locale.setlocale(locale.LC_ALL, "")
- except locale.Error as e:
- # setlocale can apparently crash if locale are uninitialized
- logger.debug("Ignoring error %s when setting locale", e)
- command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
-
- return command.main(cmd_args)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py
deleted file mode 100644
index 5ade356..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/main_parser.py
+++ /dev/null
@@ -1,134 +0,0 @@
-"""A single place for constructing and exposing the main parser
-"""
-
-import os
-import subprocess
-import sys
-from typing import List, Optional, Tuple
-
-from pip._internal.build_env import get_runnable_pip
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
-from pip._internal.commands import commands_dict, get_similar_commands
-from pip._internal.exceptions import CommandError
-from pip._internal.utils.misc import get_pip_version, get_prog
-
-__all__ = ["create_main_parser", "parse_command"]
-
-
-def create_main_parser() -> ConfigOptionParser:
- """Creates and returns the main parser for pip's CLI"""
-
- parser = ConfigOptionParser(
- usage="\n%prog <command> [options]",
- add_help_option=False,
- formatter=UpdatingDefaultsHelpFormatter(),
- name="global",
- prog=get_prog(),
- )
- parser.disable_interspersed_args()
-
- parser.version = get_pip_version()
-
- # add the general options
- gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
- parser.add_option_group(gen_opts)
-
- # so the help formatter knows
- parser.main = True # type: ignore
-
- # create command listing for description
- description = [""] + [
- f"{name:27} {command_info.summary}"
- for name, command_info in commands_dict.items()
- ]
- parser.description = "\n".join(description)
-
- return parser
-
-
-def identify_python_interpreter(python: str) -> Optional[str]:
- # If the named file exists, use it.
- # If it's a directory, assume it's a virtual environment and
- # look for the environment's Python executable.
- if os.path.exists(python):
- if os.path.isdir(python):
- # bin/python for Unix, Scripts/python.exe for Windows
- # Try both in case of odd cases like cygwin.
- for exe in ("bin/python", "Scripts/python.exe"):
- py = os.path.join(python, exe)
- if os.path.exists(py):
- return py
- else:
- return python
-
- # Could not find the interpreter specified
- return None
-
-
-def parse_command(args: List[str]) -> Tuple[str, List[str]]:
- parser = create_main_parser()
-
- # Note: parser calls disable_interspersed_args(), so the result of this
- # call is to split the initial args into the general options before the
- # subcommand and everything else.
- # For example:
- # args: ['--timeout=5', 'install', '--user', 'INITools']
- # general_options: ['--timeout==5']
- # args_else: ['install', '--user', 'INITools']
- general_options, args_else = parser.parse_args(args)
-
- # --python
- if general_options.python and "_PIP_RUNNING_IN_SUBPROCESS" not in os.environ:
- # Re-invoke pip using the specified Python interpreter
- interpreter = identify_python_interpreter(general_options.python)
- if interpreter is None:
- raise CommandError(
- f"Could not locate Python interpreter {general_options.python}"
- )
-
- pip_cmd = [
- interpreter,
- get_runnable_pip(),
- ]
- pip_cmd.extend(args)
-
- # Set a flag so the child doesn't re-invoke itself, causing
- # an infinite loop.
- os.environ["_PIP_RUNNING_IN_SUBPROCESS"] = "1"
- returncode = 0
- try:
- proc = subprocess.run(pip_cmd)
- returncode = proc.returncode
- except (subprocess.SubprocessError, OSError) as exc:
- raise CommandError(f"Failed to run pip under {interpreter}: {exc}")
- sys.exit(returncode)
-
- # --version
- if general_options.version:
- sys.stdout.write(parser.version)
- sys.stdout.write(os.linesep)
- sys.exit()
-
- # pip || pip help -> print_help()
- if not args_else or (args_else[0] == "help" and len(args_else) == 1):
- parser.print_help()
- sys.exit()
-
- # the subcommand name
- cmd_name = args_else[0]
-
- if cmd_name not in commands_dict:
- guess = get_similar_commands(cmd_name)
-
- msg = [f'unknown command "{cmd_name}"']
- if guess:
- msg.append(f'maybe you meant "{guess}"')
-
- raise CommandError(" - ".join(msg))
-
- # all the args without the subcommand
- cmd_args = args[:]
- cmd_args.remove(cmd_name)
-
- return cmd_name, cmd_args
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/parser.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/parser.py
deleted file mode 100644
index ae554b2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/parser.py
+++ /dev/null
@@ -1,294 +0,0 @@
-"""Base option parser setup"""
-
-import logging
-import optparse
-import shutil
-import sys
-import textwrap
-from contextlib import suppress
-from typing import Any, Dict, Generator, List, Tuple
-
-from pip._internal.cli.status_codes import UNKNOWN_ERROR
-from pip._internal.configuration import Configuration, ConfigurationError
-from pip._internal.utils.misc import redact_auth_from_url, strtobool
-
-logger = logging.getLogger(__name__)
-
-
-class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
- """A prettier/less verbose help formatter for optparse."""
-
- def __init__(self, *args: Any, **kwargs: Any) -> None:
- # help position must be aligned with __init__.parseopts.description
- kwargs["max_help_position"] = 30
- kwargs["indent_increment"] = 1
- kwargs["width"] = shutil.get_terminal_size()[0] - 2
- super().__init__(*args, **kwargs)
-
- def format_option_strings(self, option: optparse.Option) -> str:
- return self._format_option_strings(option)
-
- def _format_option_strings(
- self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
- ) -> str:
- """
- Return a comma-separated list of option strings and metavars.
-
- :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
- :param mvarfmt: metavar format string
- :param optsep: separator
- """
- opts = []
-
- if option._short_opts:
- opts.append(option._short_opts[0])
- if option._long_opts:
- opts.append(option._long_opts[0])
- if len(opts) > 1:
- opts.insert(1, optsep)
-
- if option.takes_value():
- assert option.dest is not None
- metavar = option.metavar or option.dest.lower()
- opts.append(mvarfmt.format(metavar.lower()))
-
- return "".join(opts)
-
- def format_heading(self, heading: str) -> str:
- if heading == "Options":
- return ""
- return heading + ":\n"
-
- def format_usage(self, usage: str) -> str:
- """
- Ensure there is only one newline between usage and the first heading
- if there is no description.
- """
- msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
- return msg
-
- def format_description(self, description: str) -> str:
- # leave full control over description to us
- if description:
- if hasattr(self.parser, "main"):
- label = "Commands"
- else:
- label = "Description"
- # some doc strings have initial newlines, some don't
- description = description.lstrip("\n")
- # some doc strings have final newlines and spaces, some don't
- description = description.rstrip()
- # dedent, then reindent
- description = self.indent_lines(textwrap.dedent(description), " ")
- description = f"{label}:\n{description}\n"
- return description
- else:
- return ""
-
- def format_epilog(self, epilog: str) -> str:
- # leave full control over epilog to us
- if epilog:
- return epilog
- else:
- return ""
-
- def indent_lines(self, text: str, indent: str) -> str:
- new_lines = [indent + line for line in text.split("\n")]
- return "\n".join(new_lines)
-
-
-class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
- """Custom help formatter for use in ConfigOptionParser.
-
- This is updates the defaults before expanding them, allowing
- them to show up correctly in the help listing.
-
- Also redact auth from url type options
- """
-
- def expand_default(self, option: optparse.Option) -> str:
- default_values = None
- if self.parser is not None:
- assert isinstance(self.parser, ConfigOptionParser)
- self.parser._update_defaults(self.parser.defaults)
- assert option.dest is not None
- default_values = self.parser.defaults.get(option.dest)
- help_text = super().expand_default(option)
-
- if default_values and option.metavar == "URL":
- if isinstance(default_values, str):
- default_values = [default_values]
-
- # If its not a list, we should abort and just return the help text
- if not isinstance(default_values, list):
- default_values = []
-
- for val in default_values:
- help_text = help_text.replace(val, redact_auth_from_url(val))
-
- return help_text
-
-
-class CustomOptionParser(optparse.OptionParser):
- def insert_option_group(
- self, idx: int, *args: Any, **kwargs: Any
- ) -> optparse.OptionGroup:
- """Insert an OptionGroup at a given position."""
- group = self.add_option_group(*args, **kwargs)
-
- self.option_groups.pop()
- self.option_groups.insert(idx, group)
-
- return group
-
- @property
- def option_list_all(self) -> List[optparse.Option]:
- """Get a list of all options, including those in option groups."""
- res = self.option_list[:]
- for i in self.option_groups:
- res.extend(i.option_list)
-
- return res
-
-
-class ConfigOptionParser(CustomOptionParser):
- """Custom option parser which updates its defaults by checking the
- configuration files and environmental variables"""
-
- def __init__(
- self,
- *args: Any,
- name: str,
- isolated: bool = False,
- **kwargs: Any,
- ) -> None:
- self.name = name
- self.config = Configuration(isolated)
-
- assert self.name
- super().__init__(*args, **kwargs)
-
- def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
- try:
- return option.check_value(key, val)
- except optparse.OptionValueError as exc:
- print(f"An error occurred during configuration: {exc}")
- sys.exit(3)
-
- def _get_ordered_configuration_items(
- self,
- ) -> Generator[Tuple[str, Any], None, None]:
- # Configuration gives keys in an unordered manner. Order them.
- override_order = ["global", self.name, ":env:"]
-
- # Pool the options into different groups
- section_items: Dict[str, List[Tuple[str, Any]]] = {
- name: [] for name in override_order
- }
- for section_key, val in self.config.items():
- # ignore empty values
- if not val:
- logger.debug(
- "Ignoring configuration key '%s' as it's value is empty.",
- section_key,
- )
- continue
-
- section, key = section_key.split(".", 1)
- if section in override_order:
- section_items[section].append((key, val))
-
- # Yield each group in their override order
- for section in override_order:
- for key, val in section_items[section]:
- yield key, val
-
- def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
- """Updates the given defaults with values from the config files and
- the environ. Does a little special handling for certain types of
- options (lists)."""
-
- # Accumulate complex default state.
- self.values = optparse.Values(self.defaults)
- late_eval = set()
- # Then set the options with those values
- for key, val in self._get_ordered_configuration_items():
- # '--' because configuration supports only long names
- option = self.get_option("--" + key)
-
- # Ignore options not present in this parser. E.g. non-globals put
- # in [global] by users that want them to apply to all applicable
- # commands.
- if option is None:
- continue
-
- assert option.dest is not None
-
- if option.action in ("store_true", "store_false"):
- try:
- val = strtobool(val)
- except ValueError:
- self.error(
- f"{val} is not a valid value for {key} option, "
- "please specify a boolean value like yes/no, "
- "true/false or 1/0 instead."
- )
- elif option.action == "count":
- with suppress(ValueError):
- val = strtobool(val)
- with suppress(ValueError):
- val = int(val)
- if not isinstance(val, int) or val < 0:
- self.error(
- f"{val} is not a valid value for {key} option, "
- "please instead specify either a non-negative integer "
- "or a boolean value like yes/no or false/true "
- "which is equivalent to 1/0."
- )
- elif option.action == "append":
- val = val.split()
- val = [self.check_default(option, key, v) for v in val]
- elif option.action == "callback":
- assert option.callback is not None
- late_eval.add(option.dest)
- opt_str = option.get_opt_string()
- val = option.convert_value(opt_str, val)
- # From take_action
- args = option.callback_args or ()
- kwargs = option.callback_kwargs or {}
- option.callback(option, opt_str, val, self, *args, **kwargs)
- else:
- val = self.check_default(option, key, val)
-
- defaults[option.dest] = val
-
- for key in late_eval:
- defaults[key] = getattr(self.values, key)
- self.values = None
- return defaults
-
- def get_default_values(self) -> optparse.Values:
- """Overriding to make updating the defaults after instantiation of
- the option parser possible, _update_defaults() does the dirty work."""
- if not self.process_default_values:
- # Old, pre-Optik 1.5 behaviour.
- return optparse.Values(self.defaults)
-
- # Load the configuration, or error out in case of an error
- try:
- self.config.load()
- except ConfigurationError as err:
- self.exit(UNKNOWN_ERROR, str(err))
-
- defaults = self._update_defaults(self.defaults.copy()) # ours
- for option in self._get_all_options():
- assert option.dest is not None
- default = defaults.get(option.dest)
- if isinstance(default, str):
- opt_str = option.get_opt_string()
- defaults[option.dest] = option.check_value(opt_str, default)
- return optparse.Values(defaults)
-
- def error(self, msg: str) -> None:
- self.print_usage(sys.stderr)
- self.exit(UNKNOWN_ERROR, f"{msg}\n")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py
deleted file mode 100644
index 0ad1403..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/progress_bars.py
+++ /dev/null
@@ -1,68 +0,0 @@
-import functools
-from typing import Callable, Generator, Iterable, Iterator, Optional, Tuple
-
-from pip._vendor.rich.progress import (
- BarColumn,
- DownloadColumn,
- FileSizeColumn,
- Progress,
- ProgressColumn,
- SpinnerColumn,
- TextColumn,
- TimeElapsedColumn,
- TimeRemainingColumn,
- TransferSpeedColumn,
-)
-
-from pip._internal.utils.logging import get_indentation
-
-DownloadProgressRenderer = Callable[[Iterable[bytes]], Iterator[bytes]]
-
-
-def _rich_progress_bar(
- iterable: Iterable[bytes],
- *,
- bar_type: str,
- size: int,
-) -> Generator[bytes, None, None]:
- assert bar_type == "on", "This should only be used in the default mode."
-
- if not size:
- total = float("inf")
- columns: Tuple[ProgressColumn, ...] = (
- TextColumn("[progress.description]{task.description}"),
- SpinnerColumn("line", speed=1.5),
- FileSizeColumn(),
- TransferSpeedColumn(),
- TimeElapsedColumn(),
- )
- else:
- total = size
- columns = (
- TextColumn("[progress.description]{task.description}"),
- BarColumn(),
- DownloadColumn(),
- TransferSpeedColumn(),
- TextColumn("eta"),
- TimeRemainingColumn(),
- )
-
- progress = Progress(*columns, refresh_per_second=30)
- task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
- with progress:
- for chunk in iterable:
- yield chunk
- progress.update(task_id, advance=len(chunk))
-
-
-def get_download_progress_renderer(
- *, bar_type: str, size: Optional[int] = None
-) -> DownloadProgressRenderer:
- """Get an object that can be used to render the download progress.
-
- Returns a callable, that takes an iterable to "wrap".
- """
- if bar_type == "on":
- return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
- else:
- return iter # no-op, when passed an iterator
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/req_command.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/req_command.py
deleted file mode 100644
index 6f2f79c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/req_command.py
+++ /dev/null
@@ -1,505 +0,0 @@
-"""Contains the Command base classes that depend on PipSession.
-
-The classes in this module are in a separate module so the commands not
-needing download / PackageFinder capability don't unnecessarily import the
-PackageFinder machinery and all its vendored dependencies, etc.
-"""
-
-import logging
-import os
-import sys
-from functools import partial
-from optparse import Values
-from typing import TYPE_CHECKING, Any, List, Optional, Tuple
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.command_context import CommandContextMixIn
-from pip._internal.exceptions import CommandError, PreviousBuildDirError
-from pip._internal.index.collector import LinkCollector
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.models.target_python import TargetPython
-from pip._internal.network.session import PipSession
-from pip._internal.operations.build.build_tracker import BuildTracker
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.constructors import (
- install_req_from_editable,
- install_req_from_line,
- install_req_from_parsed_requirement,
- install_req_from_req_string,
-)
-from pip._internal.req.req_file import parse_requirements
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.resolution.base import BaseResolver
-from pip._internal.self_outdated_check import pip_self_version_check
-from pip._internal.utils.temp_dir import (
- TempDirectory,
- TempDirectoryTypeRegistry,
- tempdir_kinds,
-)
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-if TYPE_CHECKING:
- from ssl import SSLContext
-
-logger = logging.getLogger(__name__)
-
-
-def _create_truststore_ssl_context() -> Optional["SSLContext"]:
- if sys.version_info < (3, 10):
- raise CommandError("The truststore feature is only available for Python 3.10+")
-
- try:
- import ssl
- except ImportError:
- logger.warning("Disabling truststore since ssl support is missing")
- return None
-
- try:
- from pip._vendor import truststore
- except ImportError as e:
- raise CommandError(f"The truststore feature is unavailable: {e}")
-
- return truststore.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
-
-
-class SessionCommandMixin(CommandContextMixIn):
-
- """
- A class mixin for command classes needing _build_session().
- """
-
- def __init__(self) -> None:
- super().__init__()
- self._session: Optional[PipSession] = None
-
- @classmethod
- def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
- """Return a list of index urls from user-provided options."""
- index_urls = []
- if not getattr(options, "no_index", False):
- url = getattr(options, "index_url", None)
- if url:
- index_urls.append(url)
- urls = getattr(options, "extra_index_urls", None)
- if urls:
- index_urls.extend(urls)
- # Return None rather than an empty list
- return index_urls or None
-
- def get_default_session(self, options: Values) -> PipSession:
- """Get a default-managed session."""
- if self._session is None:
- self._session = self.enter_context(self._build_session(options))
- # there's no type annotation on requests.Session, so it's
- # automatically ContextManager[Any] and self._session becomes Any,
- # then https://github.com/python/mypy/issues/7696 kicks in
- assert self._session is not None
- return self._session
-
- def _build_session(
- self,
- options: Values,
- retries: Optional[int] = None,
- timeout: Optional[int] = None,
- fallback_to_certifi: bool = False,
- ) -> PipSession:
- cache_dir = options.cache_dir
- assert not cache_dir or os.path.isabs(cache_dir)
-
- if "truststore" in options.features_enabled:
- try:
- ssl_context = _create_truststore_ssl_context()
- except Exception:
- if not fallback_to_certifi:
- raise
- ssl_context = None
- else:
- ssl_context = None
-
- session = PipSession(
- cache=os.path.join(cache_dir, "http-v2") if cache_dir else None,
- retries=retries if retries is not None else options.retries,
- trusted_hosts=options.trusted_hosts,
- index_urls=self._get_index_urls(options),
- ssl_context=ssl_context,
- )
-
- # Handle custom ca-bundles from the user
- if options.cert:
- session.verify = options.cert
-
- # Handle SSL client certificate
- if options.client_cert:
- session.cert = options.client_cert
-
- # Handle timeouts
- if options.timeout or timeout:
- session.timeout = timeout if timeout is not None else options.timeout
-
- # Handle configured proxies
- if options.proxy:
- session.proxies = {
- "http": options.proxy,
- "https": options.proxy,
- }
-
- # Determine if we can prompt the user for authentication or not
- session.auth.prompting = not options.no_input
- session.auth.keyring_provider = options.keyring_provider
-
- return session
-
-
-class IndexGroupCommand(Command, SessionCommandMixin):
-
- """
- Abstract base class for commands with the index_group options.
-
- This also corresponds to the commands that permit the pip version check.
- """
-
- def handle_pip_version_check(self, options: Values) -> None:
- """
- Do the pip version check if not disabled.
-
- This overrides the default behavior of not doing the check.
- """
- # Make sure the index_group options are present.
- assert hasattr(options, "no_index")
-
- if options.disable_pip_version_check or options.no_index:
- return
-
- # Otherwise, check if we're using the latest version of pip available.
- session = self._build_session(
- options,
- retries=0,
- timeout=min(5, options.timeout),
- # This is set to ensure the function does not fail when truststore is
- # specified in use-feature but cannot be loaded. This usually raises a
- # CommandError and shows a nice user-facing error, but this function is not
- # called in that try-except block.
- fallback_to_certifi=True,
- )
- with session:
- pip_self_version_check(session, options)
-
-
-KEEPABLE_TEMPDIR_TYPES = [
- tempdir_kinds.BUILD_ENV,
- tempdir_kinds.EPHEM_WHEEL_CACHE,
- tempdir_kinds.REQ_BUILD,
-]
-
-
-def warn_if_run_as_root() -> None:
- """Output a warning for sudo users on Unix.
-
- In a virtual environment, sudo pip still writes to virtualenv.
- On Windows, users may run pip as Administrator without issues.
- This warning only applies to Unix root users outside of virtualenv.
- """
- if running_under_virtualenv():
- return
- if not hasattr(os, "getuid"):
- return
- # On Windows, there are no "system managed" Python packages. Installing as
- # Administrator via pip is the correct way of updating system environments.
- #
- # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
- # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
- if sys.platform == "win32" or sys.platform == "cygwin":
- return
-
- if os.getuid() != 0:
- return
-
- logger.warning(
- "Running pip as the 'root' user can result in broken permissions and "
- "conflicting behaviour with the system package manager. "
- "It is recommended to use a virtual environment instead: "
- "https://pip.pypa.io/warnings/venv"
- )
-
-
-def with_cleanup(func: Any) -> Any:
- """Decorator for common logic related to managing temporary
- directories.
- """
-
- def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
- for t in KEEPABLE_TEMPDIR_TYPES:
- registry.set_delete(t, False)
-
- def wrapper(
- self: RequirementCommand, options: Values, args: List[Any]
- ) -> Optional[int]:
- assert self.tempdir_registry is not None
- if options.no_clean:
- configure_tempdir_registry(self.tempdir_registry)
-
- try:
- return func(self, options, args)
- except PreviousBuildDirError:
- # This kind of conflict can occur when the user passes an explicit
- # build directory with a pre-existing folder. In that case we do
- # not want to accidentally remove it.
- configure_tempdir_registry(self.tempdir_registry)
- raise
-
- return wrapper
-
-
-class RequirementCommand(IndexGroupCommand):
- def __init__(self, *args: Any, **kw: Any) -> None:
- super().__init__(*args, **kw)
-
- self.cmd_opts.add_option(cmdoptions.no_clean())
-
- @staticmethod
- def determine_resolver_variant(options: Values) -> str:
- """Determines which resolver should be used, based on the given options."""
- if "legacy-resolver" in options.deprecated_features_enabled:
- return "legacy"
-
- return "resolvelib"
-
- @classmethod
- def make_requirement_preparer(
- cls,
- temp_build_dir: TempDirectory,
- options: Values,
- build_tracker: BuildTracker,
- session: PipSession,
- finder: PackageFinder,
- use_user_site: bool,
- download_dir: Optional[str] = None,
- verbosity: int = 0,
- ) -> RequirementPreparer:
- """
- Create a RequirementPreparer instance for the given parameters.
- """
- temp_build_dir_path = temp_build_dir.path
- assert temp_build_dir_path is not None
- legacy_resolver = False
-
- resolver_variant = cls.determine_resolver_variant(options)
- if resolver_variant == "resolvelib":
- lazy_wheel = "fast-deps" in options.features_enabled
- if lazy_wheel:
- logger.warning(
- "pip is using lazily downloaded wheels using HTTP "
- "range requests to obtain dependency information. "
- "This experimental feature is enabled through "
- "--use-feature=fast-deps and it is not ready for "
- "production."
- )
- else:
- legacy_resolver = True
- lazy_wheel = False
- if "fast-deps" in options.features_enabled:
- logger.warning(
- "fast-deps has no effect when used with the legacy resolver."
- )
-
- return RequirementPreparer(
- build_dir=temp_build_dir_path,
- src_dir=options.src_dir,
- download_dir=download_dir,
- build_isolation=options.build_isolation,
- check_build_deps=options.check_build_deps,
- build_tracker=build_tracker,
- session=session,
- progress_bar=options.progress_bar,
- finder=finder,
- require_hashes=options.require_hashes,
- use_user_site=use_user_site,
- lazy_wheel=lazy_wheel,
- verbosity=verbosity,
- legacy_resolver=legacy_resolver,
- )
-
- @classmethod
- def make_resolver(
- cls,
- preparer: RequirementPreparer,
- finder: PackageFinder,
- options: Values,
- wheel_cache: Optional[WheelCache] = None,
- use_user_site: bool = False,
- ignore_installed: bool = True,
- ignore_requires_python: bool = False,
- force_reinstall: bool = False,
- upgrade_strategy: str = "to-satisfy-only",
- use_pep517: Optional[bool] = None,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ) -> BaseResolver:
- """
- Create a Resolver instance for the given parameters.
- """
- make_install_req = partial(
- install_req_from_req_string,
- isolated=options.isolated_mode,
- use_pep517=use_pep517,
- )
- resolver_variant = cls.determine_resolver_variant(options)
- # The long import name and duplicated invocation is needed to convince
- # Mypy into correctly typechecking. Otherwise it would complain the
- # "Resolver" class being redefined.
- if resolver_variant == "resolvelib":
- import pip._internal.resolution.resolvelib.resolver
-
- return pip._internal.resolution.resolvelib.resolver.Resolver(
- preparer=preparer,
- finder=finder,
- wheel_cache=wheel_cache,
- make_install_req=make_install_req,
- use_user_site=use_user_site,
- ignore_dependencies=options.ignore_dependencies,
- ignore_installed=ignore_installed,
- ignore_requires_python=ignore_requires_python,
- force_reinstall=force_reinstall,
- upgrade_strategy=upgrade_strategy,
- py_version_info=py_version_info,
- )
- import pip._internal.resolution.legacy.resolver
-
- return pip._internal.resolution.legacy.resolver.Resolver(
- preparer=preparer,
- finder=finder,
- wheel_cache=wheel_cache,
- make_install_req=make_install_req,
- use_user_site=use_user_site,
- ignore_dependencies=options.ignore_dependencies,
- ignore_installed=ignore_installed,
- ignore_requires_python=ignore_requires_python,
- force_reinstall=force_reinstall,
- upgrade_strategy=upgrade_strategy,
- py_version_info=py_version_info,
- )
-
- def get_requirements(
- self,
- args: List[str],
- options: Values,
- finder: PackageFinder,
- session: PipSession,
- ) -> List[InstallRequirement]:
- """
- Parse command-line arguments into the corresponding requirements.
- """
- requirements: List[InstallRequirement] = []
- for filename in options.constraints:
- for parsed_req in parse_requirements(
- filename,
- constraint=True,
- finder=finder,
- options=options,
- session=session,
- ):
- req_to_add = install_req_from_parsed_requirement(
- parsed_req,
- isolated=options.isolated_mode,
- user_supplied=False,
- )
- requirements.append(req_to_add)
-
- for req in args:
- req_to_add = install_req_from_line(
- req,
- comes_from=None,
- isolated=options.isolated_mode,
- use_pep517=options.use_pep517,
- user_supplied=True,
- config_settings=getattr(options, "config_settings", None),
- )
- requirements.append(req_to_add)
-
- for req in options.editables:
- req_to_add = install_req_from_editable(
- req,
- user_supplied=True,
- isolated=options.isolated_mode,
- use_pep517=options.use_pep517,
- config_settings=getattr(options, "config_settings", None),
- )
- requirements.append(req_to_add)
-
- # NOTE: options.require_hashes may be set if --require-hashes is True
- for filename in options.requirements:
- for parsed_req in parse_requirements(
- filename, finder=finder, options=options, session=session
- ):
- req_to_add = install_req_from_parsed_requirement(
- parsed_req,
- isolated=options.isolated_mode,
- use_pep517=options.use_pep517,
- user_supplied=True,
- config_settings=parsed_req.options.get("config_settings")
- if parsed_req.options
- else None,
- )
- requirements.append(req_to_add)
-
- # If any requirement has hash options, enable hash checking.
- if any(req.has_hash_options for req in requirements):
- options.require_hashes = True
-
- if not (args or options.editables or options.requirements):
- opts = {"name": self.name}
- if options.find_links:
- raise CommandError(
- "You must give at least one requirement to {name} "
- '(maybe you meant "pip {name} {links}"?)'.format(
- **dict(opts, links=" ".join(options.find_links))
- )
- )
- else:
- raise CommandError(
- "You must give at least one requirement to {name} "
- '(see "pip help {name}")'.format(**opts)
- )
-
- return requirements
-
- @staticmethod
- def trace_basic_info(finder: PackageFinder) -> None:
- """
- Trace basic information about the provided objects.
- """
- # Display where finder is looking for packages
- search_scope = finder.search_scope
- locations = search_scope.get_formatted_locations()
- if locations:
- logger.info(locations)
-
- def _build_package_finder(
- self,
- options: Values,
- session: PipSession,
- target_python: Optional[TargetPython] = None,
- ignore_requires_python: Optional[bool] = None,
- ) -> PackageFinder:
- """
- Create a package finder appropriate to this requirement command.
-
- :param ignore_requires_python: Whether to ignore incompatible
- "Requires-Python" values in links. Defaults to False.
- """
- link_collector = LinkCollector.create(session, options=options)
- selection_prefs = SelectionPreferences(
- allow_yanked=True,
- format_control=options.format_control,
- allow_all_prereleases=options.pre,
- prefer_binary=options.prefer_binary,
- ignore_requires_python=ignore_requires_python,
- )
-
- return PackageFinder.create(
- link_collector=link_collector,
- selection_prefs=selection_prefs,
- target_python=target_python,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/spinners.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/spinners.py
deleted file mode 100644
index cf2b976..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/spinners.py
+++ /dev/null
@@ -1,159 +0,0 @@
-import contextlib
-import itertools
-import logging
-import sys
-import time
-from typing import IO, Generator, Optional
-
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.logging import get_indentation
-
-logger = logging.getLogger(__name__)
-
-
-class SpinnerInterface:
- def spin(self) -> None:
- raise NotImplementedError()
-
- def finish(self, final_status: str) -> None:
- raise NotImplementedError()
-
-
-class InteractiveSpinner(SpinnerInterface):
- def __init__(
- self,
- message: str,
- file: Optional[IO[str]] = None,
- spin_chars: str = "-\\|/",
- # Empirically, 8 updates/second looks nice
- min_update_interval_seconds: float = 0.125,
- ):
- self._message = message
- if file is None:
- file = sys.stdout
- self._file = file
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._finished = False
-
- self._spin_cycle = itertools.cycle(spin_chars)
-
- self._file.write(" " * get_indentation() + self._message + " ... ")
- self._width = 0
-
- def _write(self, status: str) -> None:
- assert not self._finished
- # Erase what we wrote before by backspacing to the beginning, writing
- # spaces to overwrite the old text, and then backspacing again
- backup = "\b" * self._width
- self._file.write(backup + " " * self._width + backup)
- # Now we have a blank slate to add our status
- self._file.write(status)
- self._width = len(status)
- self._file.flush()
- self._rate_limiter.reset()
-
- def spin(self) -> None:
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._write(next(self._spin_cycle))
-
- def finish(self, final_status: str) -> None:
- if self._finished:
- return
- self._write(final_status)
- self._file.write("\n")
- self._file.flush()
- self._finished = True
-
-
-# Used for dumb terminals, non-interactive installs (no tty), etc.
-# We still print updates occasionally (once every 60 seconds by default) to
-# act as a keep-alive for systems like Travis-CI that take lack-of-output as
-# an indication that a task has frozen.
-class NonInteractiveSpinner(SpinnerInterface):
- def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
- self._message = message
- self._finished = False
- self._rate_limiter = RateLimiter(min_update_interval_seconds)
- self._update("started")
-
- def _update(self, status: str) -> None:
- assert not self._finished
- self._rate_limiter.reset()
- logger.info("%s: %s", self._message, status)
-
- def spin(self) -> None:
- if self._finished:
- return
- if not self._rate_limiter.ready():
- return
- self._update("still running...")
-
- def finish(self, final_status: str) -> None:
- if self._finished:
- return
- self._update(f"finished with status '{final_status}'")
- self._finished = True
-
-
-class RateLimiter:
- def __init__(self, min_update_interval_seconds: float) -> None:
- self._min_update_interval_seconds = min_update_interval_seconds
- self._last_update: float = 0
-
- def ready(self) -> bool:
- now = time.time()
- delta = now - self._last_update
- return delta >= self._min_update_interval_seconds
-
- def reset(self) -> None:
- self._last_update = time.time()
-
-
-@contextlib.contextmanager
-def open_spinner(message: str) -> Generator[SpinnerInterface, None, None]:
- # Interactive spinner goes directly to sys.stdout rather than being routed
- # through the logging system, but it acts like it has level INFO,
- # i.e. it's only displayed if we're at level INFO or better.
- # Non-interactive spinner goes through the logging system, so it is always
- # in sync with logging configuration.
- if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
- spinner: SpinnerInterface = InteractiveSpinner(message)
- else:
- spinner = NonInteractiveSpinner(message)
- try:
- with hidden_cursor(sys.stdout):
- yield spinner
- except KeyboardInterrupt:
- spinner.finish("canceled")
- raise
- except Exception:
- spinner.finish("error")
- raise
- else:
- spinner.finish("done")
-
-
-HIDE_CURSOR = "\x1b[?25l"
-SHOW_CURSOR = "\x1b[?25h"
-
-
-@contextlib.contextmanager
-def hidden_cursor(file: IO[str]) -> Generator[None, None, None]:
- # The Windows terminal does not support the hide/show cursor ANSI codes,
- # even via colorama. So don't even try.
- if WINDOWS:
- yield
- # We don't want to clutter the output with control characters if we're
- # writing to a file, or if the user is running with --quiet.
- # See https://github.com/pypa/pip/issues/3418
- elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
- yield
- else:
- file.write(HIDE_CURSOR)
- try:
- yield
- finally:
- file.write(SHOW_CURSOR)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py b/venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py
deleted file mode 100644
index 5e29502..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/cli/status_codes.py
+++ /dev/null
@@ -1,6 +0,0 @@
-SUCCESS = 0
-ERROR = 1
-UNKNOWN_ERROR = 2
-VIRTUALENV_NOT_FOUND = 3
-PREVIOUS_BUILD_DIR_ERROR = 4
-NO_MATCHES_FOUND = 23
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/__init__.py
deleted file mode 100644
index 858a410..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__init__.py
+++ /dev/null
@@ -1,132 +0,0 @@
-"""
-Package containing all pip commands
-"""
-
-import importlib
-from collections import namedtuple
-from typing import Any, Dict, Optional
-
-from pip._internal.cli.base_command import Command
-
-CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
-
-# This dictionary does a bunch of heavy lifting for help output:
-# - Enables avoiding additional (costly) imports for presenting `--help`.
-# - The ordering matters for help display.
-#
-# Even though the module path starts with the same "pip._internal.commands"
-# prefix, the full path makes testing easier (specifically when modifying
-# `commands_dict` in test setup / teardown).
-commands_dict: Dict[str, CommandInfo] = {
- "install": CommandInfo(
- "pip._internal.commands.install",
- "InstallCommand",
- "Install packages.",
- ),
- "download": CommandInfo(
- "pip._internal.commands.download",
- "DownloadCommand",
- "Download packages.",
- ),
- "uninstall": CommandInfo(
- "pip._internal.commands.uninstall",
- "UninstallCommand",
- "Uninstall packages.",
- ),
- "freeze": CommandInfo(
- "pip._internal.commands.freeze",
- "FreezeCommand",
- "Output installed packages in requirements format.",
- ),
- "inspect": CommandInfo(
- "pip._internal.commands.inspect",
- "InspectCommand",
- "Inspect the python environment.",
- ),
- "list": CommandInfo(
- "pip._internal.commands.list",
- "ListCommand",
- "List installed packages.",
- ),
- "show": CommandInfo(
- "pip._internal.commands.show",
- "ShowCommand",
- "Show information about installed packages.",
- ),
- "check": CommandInfo(
- "pip._internal.commands.check",
- "CheckCommand",
- "Verify installed packages have compatible dependencies.",
- ),
- "config": CommandInfo(
- "pip._internal.commands.configuration",
- "ConfigurationCommand",
- "Manage local and global configuration.",
- ),
- "search": CommandInfo(
- "pip._internal.commands.search",
- "SearchCommand",
- "Search PyPI for packages.",
- ),
- "cache": CommandInfo(
- "pip._internal.commands.cache",
- "CacheCommand",
- "Inspect and manage pip's wheel cache.",
- ),
- "index": CommandInfo(
- "pip._internal.commands.index",
- "IndexCommand",
- "Inspect information available from package indexes.",
- ),
- "wheel": CommandInfo(
- "pip._internal.commands.wheel",
- "WheelCommand",
- "Build wheels from your requirements.",
- ),
- "hash": CommandInfo(
- "pip._internal.commands.hash",
- "HashCommand",
- "Compute hashes of package archives.",
- ),
- "completion": CommandInfo(
- "pip._internal.commands.completion",
- "CompletionCommand",
- "A helper command used for command completion.",
- ),
- "debug": CommandInfo(
- "pip._internal.commands.debug",
- "DebugCommand",
- "Show information useful for debugging.",
- ),
- "help": CommandInfo(
- "pip._internal.commands.help",
- "HelpCommand",
- "Show help for commands.",
- ),
-}
-
-
-def create_command(name: str, **kwargs: Any) -> Command:
- """
- Create an instance of the Command class with the given name.
- """
- module_path, class_name, summary = commands_dict[name]
- module = importlib.import_module(module_path)
- command_class = getattr(module, class_name)
- command = command_class(name=name, summary=summary, **kwargs)
-
- return command
-
-
-def get_similar_commands(name: str) -> Optional[str]:
- """Command name auto-correct."""
- from difflib import get_close_matches
-
- name = name.lower()
-
- close_commands = get_close_matches(name, commands_dict.keys())
-
- if close_commands:
- return close_commands[0]
- else:
- return None
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index a6dc288..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc
deleted file mode 100644
index f0a8329..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/cache.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc
deleted file mode 100644
index 25a1351..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/check.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc
deleted file mode 100644
index bde80a0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/completion.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc
deleted file mode 100644
index ddef073..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc
deleted file mode 100644
index cd6954d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/debug.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc
deleted file mode 100644
index d0a2f02..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/download.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc
deleted file mode 100644
index 399e976..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc
deleted file mode 100644
index 6abf8bc..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/hash.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc
deleted file mode 100644
index 4d3eb4e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/help.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc
deleted file mode 100644
index 6493e2d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/index.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc
deleted file mode 100644
index 5b8c2f9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/inspect.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc
deleted file mode 100644
index 121e229..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/install.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc
deleted file mode 100644
index 22c139e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/list.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc
deleted file mode 100644
index 481a62f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/search.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc
deleted file mode 100644
index a733863..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/show.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc
deleted file mode 100644
index a940a04..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc
deleted file mode 100644
index cbe067f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/cache.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/cache.py
deleted file mode 100644
index 3283361..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/cache.py
+++ /dev/null
@@ -1,225 +0,0 @@
-import os
-import textwrap
-from optparse import Values
-from typing import Any, List
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.exceptions import CommandError, PipError
-from pip._internal.utils import filesystem
-from pip._internal.utils.logging import getLogger
-
-logger = getLogger(__name__)
-
-
-class CacheCommand(Command):
- """
- Inspect and manage pip's wheel cache.
-
- Subcommands:
-
- - dir: Show the cache directory.
- - info: Show information about the cache.
- - list: List filenames of packages stored in the cache.
- - remove: Remove one or more package from the cache.
- - purge: Remove all items from the cache.
-
- ``<pattern>`` can be a glob expression or a package name.
- """
-
- ignore_require_venv = True
- usage = """
- %prog dir
- %prog info
- %prog list [<pattern>] [--format=[human, abspath]]
- %prog remove <pattern>
- %prog purge
- """
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "--format",
- action="store",
- dest="list_format",
- default="human",
- choices=("human", "abspath"),
- help="Select the output format among: human (default) or abspath",
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- handlers = {
- "dir": self.get_cache_dir,
- "info": self.get_cache_info,
- "list": self.list_cache_items,
- "remove": self.remove_cache_items,
- "purge": self.purge_cache,
- }
-
- if not options.cache_dir:
- logger.error("pip cache commands can not function since cache is disabled.")
- return ERROR
-
- # Determine action
- if not args or args[0] not in handlers:
- logger.error(
- "Need an action (%s) to perform.",
- ", ".join(sorted(handlers)),
- )
- return ERROR
-
- action = args[0]
-
- # Error handling happens here, not in the action-handlers.
- try:
- handlers[action](options, args[1:])
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- return SUCCESS
-
- def get_cache_dir(self, options: Values, args: List[Any]) -> None:
- if args:
- raise CommandError("Too many arguments")
-
- logger.info(options.cache_dir)
-
- def get_cache_info(self, options: Values, args: List[Any]) -> None:
- if args:
- raise CommandError("Too many arguments")
-
- num_http_files = len(self._find_http_files(options))
- num_packages = len(self._find_wheels(options, "*"))
-
- http_cache_location = self._cache_dir(options, "http-v2")
- old_http_cache_location = self._cache_dir(options, "http")
- wheels_cache_location = self._cache_dir(options, "wheels")
- http_cache_size = filesystem.format_size(
- filesystem.directory_size(http_cache_location)
- + filesystem.directory_size(old_http_cache_location)
- )
- wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
-
- message = (
- textwrap.dedent(
- """
- Package index page cache location (pip v23.3+): {http_cache_location}
- Package index page cache location (older pips): {old_http_cache_location}
- Package index page cache size: {http_cache_size}
- Number of HTTP files: {num_http_files}
- Locally built wheels location: {wheels_cache_location}
- Locally built wheels size: {wheels_cache_size}
- Number of locally built wheels: {package_count}
- """ # noqa: E501
- )
- .format(
- http_cache_location=http_cache_location,
- old_http_cache_location=old_http_cache_location,
- http_cache_size=http_cache_size,
- num_http_files=num_http_files,
- wheels_cache_location=wheels_cache_location,
- package_count=num_packages,
- wheels_cache_size=wheels_cache_size,
- )
- .strip()
- )
-
- logger.info(message)
-
- def list_cache_items(self, options: Values, args: List[Any]) -> None:
- if len(args) > 1:
- raise CommandError("Too many arguments")
-
- if args:
- pattern = args[0]
- else:
- pattern = "*"
-
- files = self._find_wheels(options, pattern)
- if options.list_format == "human":
- self.format_for_human(files)
- else:
- self.format_for_abspath(files)
-
- def format_for_human(self, files: List[str]) -> None:
- if not files:
- logger.info("No locally built wheels cached.")
- return
-
- results = []
- for filename in files:
- wheel = os.path.basename(filename)
- size = filesystem.format_file_size(filename)
- results.append(f" - {wheel} ({size})")
- logger.info("Cache contents:\n")
- logger.info("\n".join(sorted(results)))
-
- def format_for_abspath(self, files: List[str]) -> None:
- if files:
- logger.info("\n".join(sorted(files)))
-
- def remove_cache_items(self, options: Values, args: List[Any]) -> None:
- if len(args) > 1:
- raise CommandError("Too many arguments")
-
- if not args:
- raise CommandError("Please provide a pattern")
-
- files = self._find_wheels(options, args[0])
-
- no_matching_msg = "No matching packages"
- if args[0] == "*":
- # Only fetch http files if no specific pattern given
- files += self._find_http_files(options)
- else:
- # Add the pattern to the log message
- no_matching_msg += f' for pattern "{args[0]}"'
-
- if not files:
- logger.warning(no_matching_msg)
-
- for filename in files:
- os.unlink(filename)
- logger.verbose("Removed %s", filename)
- logger.info("Files removed: %s", len(files))
-
- def purge_cache(self, options: Values, args: List[Any]) -> None:
- if args:
- raise CommandError("Too many arguments")
-
- return self.remove_cache_items(options, ["*"])
-
- def _cache_dir(self, options: Values, subdir: str) -> str:
- return os.path.join(options.cache_dir, subdir)
-
- def _find_http_files(self, options: Values) -> List[str]:
- old_http_dir = self._cache_dir(options, "http")
- new_http_dir = self._cache_dir(options, "http-v2")
- return filesystem.find_files(old_http_dir, "*") + filesystem.find_files(
- new_http_dir, "*"
- )
-
- def _find_wheels(self, options: Values, pattern: str) -> List[str]:
- wheel_dir = self._cache_dir(options, "wheels")
-
- # The wheel filename format, as specified in PEP 427, is:
- # {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
- #
- # Additionally, non-alphanumeric values in the distribution are
- # normalized to underscores (_), meaning hyphens can never occur
- # before `-{version}`.
- #
- # Given that information:
- # - If the pattern we're given contains a hyphen (-), the user is
- # providing at least the version. Thus, we can just append `*.whl`
- # to match the rest of it.
- # - If the pattern we're given doesn't contain a hyphen (-), the
- # user is only providing the name. Thus, we append `-*.whl` to
- # match the hyphen before the version, followed by anything else.
- #
- # PEP 427: https://www.python.org/dev/peps/pep-0427/
- pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
-
- return filesystem.find_files(wheel_dir, pattern)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/check.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/check.py
deleted file mode 100644
index 5efd0a3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/check.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import logging
-from optparse import Values
-from typing import List
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.operations.check import (
- check_package_set,
- create_package_set_from_installed,
- warn_legacy_versions_and_specifiers,
-)
-from pip._internal.utils.misc import write_output
-
-logger = logging.getLogger(__name__)
-
-
-class CheckCommand(Command):
- """Verify installed packages have compatible dependencies."""
-
- usage = """
- %prog [options]"""
-
- def run(self, options: Values, args: List[str]) -> int:
- package_set, parsing_probs = create_package_set_from_installed()
- warn_legacy_versions_and_specifiers(package_set)
- missing, conflicting = check_package_set(package_set)
-
- for project_name in missing:
- version = package_set[project_name].version
- for dependency in missing[project_name]:
- write_output(
- "%s %s requires %s, which is not installed.",
- project_name,
- version,
- dependency[0],
- )
-
- for project_name in conflicting:
- version = package_set[project_name].version
- for dep_name, dep_version, req in conflicting[project_name]:
- write_output(
- "%s %s has requirement %s, but you have %s %s.",
- project_name,
- version,
- req,
- dep_name,
- dep_version,
- )
-
- if missing or conflicting or parsing_probs:
- return ERROR
- else:
- write_output("No broken requirements found.")
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/completion.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/completion.py
deleted file mode 100644
index 9e89e27..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/completion.py
+++ /dev/null
@@ -1,130 +0,0 @@
-import sys
-import textwrap
-from optparse import Values
-from typing import List
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.utils.misc import get_prog
-
-BASE_COMPLETION = """
-# pip {shell} completion start{script}# pip {shell} completion end
-"""
-
-COMPLETION_SCRIPTS = {
- "bash": """
- _pip_completion()
- {{
- COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
- COMP_CWORD=$COMP_CWORD \\
- PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
- }}
- complete -o default -F _pip_completion {prog}
- """,
- "zsh": """
- #compdef -P pip[0-9.]#
- __pip() {{
- compadd $( COMP_WORDS="$words[*]" \\
- COMP_CWORD=$((CURRENT-1)) \\
- PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null )
- }}
- if [[ $zsh_eval_context[-1] == loadautofunc ]]; then
- # autoload from fpath, call function directly
- __pip "$@"
- else
- # eval/source/. command, register function for later
- compdef __pip -P 'pip[0-9.]#'
- fi
- """,
- "fish": """
- function __fish_complete_pip
- set -lx COMP_WORDS (commandline -o) ""
- set -lx COMP_CWORD ( \\
- math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
- )
- set -lx PIP_AUTO_COMPLETE 1
- string split \\ -- (eval $COMP_WORDS[1])
- end
- complete -fa "(__fish_complete_pip)" -c {prog}
- """,
- "powershell": """
- if ((Test-Path Function:\\TabExpansion) -and -not `
- (Test-Path Function:\\_pip_completeBackup)) {{
- Rename-Item Function:\\TabExpansion _pip_completeBackup
- }}
- function TabExpansion($line, $lastWord) {{
- $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
- if ($lastBlock.StartsWith("{prog} ")) {{
- $Env:COMP_WORDS=$lastBlock
- $Env:COMP_CWORD=$lastBlock.Split().Length - 1
- $Env:PIP_AUTO_COMPLETE=1
- (& {prog}).Split()
- Remove-Item Env:COMP_WORDS
- Remove-Item Env:COMP_CWORD
- Remove-Item Env:PIP_AUTO_COMPLETE
- }}
- elseif (Test-Path Function:\\_pip_completeBackup) {{
- # Fall back on existing tab expansion
- _pip_completeBackup $line $lastWord
- }}
- }}
- """,
-}
-
-
-class CompletionCommand(Command):
- """A helper command to be used for command completion."""
-
- ignore_require_venv = True
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "--bash",
- "-b",
- action="store_const",
- const="bash",
- dest="shell",
- help="Emit completion code for bash",
- )
- self.cmd_opts.add_option(
- "--zsh",
- "-z",
- action="store_const",
- const="zsh",
- dest="shell",
- help="Emit completion code for zsh",
- )
- self.cmd_opts.add_option(
- "--fish",
- "-f",
- action="store_const",
- const="fish",
- dest="shell",
- help="Emit completion code for fish",
- )
- self.cmd_opts.add_option(
- "--powershell",
- "-p",
- action="store_const",
- const="powershell",
- dest="shell",
- help="Emit completion code for powershell",
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- """Prints the completion code of the given shell"""
- shells = COMPLETION_SCRIPTS.keys()
- shell_options = ["--" + shell for shell in sorted(shells)]
- if options.shell in shells:
- script = textwrap.dedent(
- COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
- )
- print(BASE_COMPLETION.format(script=script, shell=options.shell))
- return SUCCESS
- else:
- sys.stderr.write(
- "ERROR: You must pass {}\n".format(" or ".join(shell_options))
- )
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/configuration.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/configuration.py
deleted file mode 100644
index 1a1dc6b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/configuration.py
+++ /dev/null
@@ -1,280 +0,0 @@
-import logging
-import os
-import subprocess
-from optparse import Values
-from typing import Any, List, Optional
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.configuration import (
- Configuration,
- Kind,
- get_configuration_files,
- kinds,
-)
-from pip._internal.exceptions import PipError
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import get_prog, write_output
-
-logger = logging.getLogger(__name__)
-
-
-class ConfigurationCommand(Command):
- """
- Manage local and global configuration.
-
- Subcommands:
-
- - list: List the active configuration (or from the file specified)
- - edit: Edit the configuration file in an editor
- - get: Get the value associated with command.option
- - set: Set the command.option=value
- - unset: Unset the value associated with command.option
- - debug: List the configuration files and values defined under them
-
- Configuration keys should be dot separated command and option name,
- with the special prefix "global" affecting any command. For example,
- "pip config set global.index-url https://example.org/" would configure
- the index url for all commands, but "pip config set download.timeout 10"
- would configure a 10 second timeout only for "pip download" commands.
-
- If none of --user, --global and --site are passed, a virtual
- environment configuration file is used if one is active and the file
- exists. Otherwise, all modifications happen to the user file by
- default.
- """
-
- ignore_require_venv = True
- usage = """
- %prog [<file-option>] list
- %prog [<file-option>] [--editor <editor-path>] edit
-
- %prog [<file-option>] get command.option
- %prog [<file-option>] set command.option value
- %prog [<file-option>] unset command.option
- %prog [<file-option>] debug
- """
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "--editor",
- dest="editor",
- action="store",
- default=None,
- help=(
- "Editor to use to edit the file. Uses VISUAL or EDITOR "
- "environment variables if not provided."
- ),
- )
-
- self.cmd_opts.add_option(
- "--global",
- dest="global_file",
- action="store_true",
- default=False,
- help="Use the system-wide configuration file only",
- )
-
- self.cmd_opts.add_option(
- "--user",
- dest="user_file",
- action="store_true",
- default=False,
- help="Use the user configuration file only",
- )
-
- self.cmd_opts.add_option(
- "--site",
- dest="site_file",
- action="store_true",
- default=False,
- help="Use the current environment configuration file only",
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- handlers = {
- "list": self.list_values,
- "edit": self.open_in_editor,
- "get": self.get_name,
- "set": self.set_name_value,
- "unset": self.unset_name,
- "debug": self.list_config_values,
- }
-
- # Determine action
- if not args or args[0] not in handlers:
- logger.error(
- "Need an action (%s) to perform.",
- ", ".join(sorted(handlers)),
- )
- return ERROR
-
- action = args[0]
-
- # Determine which configuration files are to be loaded
- # Depends on whether the command is modifying.
- try:
- load_only = self._determine_file(
- options, need_value=(action in ["get", "set", "unset", "edit"])
- )
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- # Load a new configuration
- self.configuration = Configuration(
- isolated=options.isolated_mode, load_only=load_only
- )
- self.configuration.load()
-
- # Error handling happens here, not in the action-handlers.
- try:
- handlers[action](options, args[1:])
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- return SUCCESS
-
- def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
- file_options = [
- key
- for key, value in (
- (kinds.USER, options.user_file),
- (kinds.GLOBAL, options.global_file),
- (kinds.SITE, options.site_file),
- )
- if value
- ]
-
- if not file_options:
- if not need_value:
- return None
- # Default to user, unless there's a site file.
- elif any(
- os.path.exists(site_config_file)
- for site_config_file in get_configuration_files()[kinds.SITE]
- ):
- return kinds.SITE
- else:
- return kinds.USER
- elif len(file_options) == 1:
- return file_options[0]
-
- raise PipError(
- "Need exactly one file to operate upon "
- "(--user, --site, --global) to perform."
- )
-
- def list_values(self, options: Values, args: List[str]) -> None:
- self._get_n_args(args, "list", n=0)
-
- for key, value in sorted(self.configuration.items()):
- write_output("%s=%r", key, value)
-
- def get_name(self, options: Values, args: List[str]) -> None:
- key = self._get_n_args(args, "get [name]", n=1)
- value = self.configuration.get_value(key)
-
- write_output("%s", value)
-
- def set_name_value(self, options: Values, args: List[str]) -> None:
- key, value = self._get_n_args(args, "set [name] [value]", n=2)
- self.configuration.set_value(key, value)
-
- self._save_configuration()
-
- def unset_name(self, options: Values, args: List[str]) -> None:
- key = self._get_n_args(args, "unset [name]", n=1)
- self.configuration.unset_value(key)
-
- self._save_configuration()
-
- def list_config_values(self, options: Values, args: List[str]) -> None:
- """List config key-value pairs across different config files"""
- self._get_n_args(args, "debug", n=0)
-
- self.print_env_var_values()
- # Iterate over config files and print if they exist, and the
- # key-value pairs present in them if they do
- for variant, files in sorted(self.configuration.iter_config_files()):
- write_output("%s:", variant)
- for fname in files:
- with indent_log():
- file_exists = os.path.exists(fname)
- write_output("%s, exists: %r", fname, file_exists)
- if file_exists:
- self.print_config_file_values(variant)
-
- def print_config_file_values(self, variant: Kind) -> None:
- """Get key-value pairs from the file of a variant"""
- for name, value in self.configuration.get_values_in_config(variant).items():
- with indent_log():
- write_output("%s: %s", name, value)
-
- def print_env_var_values(self) -> None:
- """Get key-values pairs present as environment variables"""
- write_output("%s:", "env_var")
- with indent_log():
- for key, value in sorted(self.configuration.get_environ_vars()):
- env_var = f"PIP_{key.upper()}"
- write_output("%s=%r", env_var, value)
-
- def open_in_editor(self, options: Values, args: List[str]) -> None:
- editor = self._determine_editor(options)
-
- fname = self.configuration.get_file_to_edit()
- if fname is None:
- raise PipError("Could not determine appropriate file.")
- elif '"' in fname:
- # This shouldn't happen, unless we see a username like that.
- # If that happens, we'd appreciate a pull request fixing this.
- raise PipError(
- f'Can not open an editor for a file name containing "\n{fname}'
- )
-
- try:
- subprocess.check_call(f'{editor} "{fname}"', shell=True)
- except FileNotFoundError as e:
- if not e.filename:
- e.filename = editor
- raise
- except subprocess.CalledProcessError as e:
- raise PipError(f"Editor Subprocess exited with exit code {e.returncode}")
-
- def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
- """Helper to make sure the command got the right number of arguments"""
- if len(args) != n:
- msg = (
- f"Got unexpected number of arguments, expected {n}. "
- f'(example: "{get_prog()} config {example}")'
- )
- raise PipError(msg)
-
- if n == 1:
- return args[0]
- else:
- return args
-
- def _save_configuration(self) -> None:
- # We successfully ran a modifying command. Need to save the
- # configuration.
- try:
- self.configuration.save()
- except Exception:
- logger.exception(
- "Unable to save configuration. Please report this as a bug."
- )
- raise PipError("Internal Error.")
-
- def _determine_editor(self, options: Values) -> str:
- if options.editor is not None:
- return options.editor
- elif "VISUAL" in os.environ:
- return os.environ["VISUAL"]
- elif "EDITOR" in os.environ:
- return os.environ["EDITOR"]
- else:
- raise PipError("Could not determine editor to use.")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/debug.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/debug.py
deleted file mode 100644
index 7e5271c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/debug.py
+++ /dev/null
@@ -1,201 +0,0 @@
-import importlib.resources
-import locale
-import logging
-import os
-import sys
-from optparse import Values
-from types import ModuleType
-from typing import Any, Dict, List, Optional
-
-import pip._vendor
-from pip._vendor.certifi import where
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.configuration import Configuration
-from pip._internal.metadata import get_environment
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import get_pip_version
-
-logger = logging.getLogger(__name__)
-
-
-def show_value(name: str, value: Any) -> None:
- logger.info("%s: %s", name, value)
-
-
-def show_sys_implementation() -> None:
- logger.info("sys.implementation:")
- implementation_name = sys.implementation.name
- with indent_log():
- show_value("name", implementation_name)
-
-
-def create_vendor_txt_map() -> Dict[str, str]:
- with importlib.resources.open_text("pip._vendor", "vendor.txt") as f:
- # Purge non version specifying lines.
- # Also, remove any space prefix or suffixes (including comments).
- lines = [
- line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
- ]
-
- # Transform into "module" -> version dict.
- return dict(line.split("==", 1) for line in lines)
-
-
-def get_module_from_module_name(module_name: str) -> Optional[ModuleType]:
- # Module name can be uppercase in vendor.txt for some reason...
- module_name = module_name.lower().replace("-", "_")
- # PATCH: setuptools is actually only pkg_resources.
- if module_name == "setuptools":
- module_name = "pkg_resources"
-
- try:
- __import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
- return getattr(pip._vendor, module_name)
- except ImportError:
- # We allow 'truststore' to fail to import due
- # to being unavailable on Python 3.9 and earlier.
- if module_name == "truststore" and sys.version_info < (3, 10):
- return None
- raise
-
-
-def get_vendor_version_from_module(module_name: str) -> Optional[str]:
- module = get_module_from_module_name(module_name)
- version = getattr(module, "__version__", None)
-
- if module and not version:
- # Try to find version in debundled module info.
- assert module.__file__ is not None
- env = get_environment([os.path.dirname(module.__file__)])
- dist = env.get_distribution(module_name)
- if dist:
- version = str(dist.version)
-
- return version
-
-
-def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
- """Log the actual version and print extra info if there is
- a conflict or if the actual version could not be imported.
- """
- for module_name, expected_version in vendor_txt_versions.items():
- extra_message = ""
- actual_version = get_vendor_version_from_module(module_name)
- if not actual_version:
- extra_message = (
- " (Unable to locate actual module version, using"
- " vendor.txt specified version)"
- )
- actual_version = expected_version
- elif parse_version(actual_version) != parse_version(expected_version):
- extra_message = (
- " (CONFLICT: vendor.txt suggests version should"
- f" be {expected_version})"
- )
- logger.info("%s==%s%s", module_name, actual_version, extra_message)
-
-
-def show_vendor_versions() -> None:
- logger.info("vendored library versions:")
-
- vendor_txt_versions = create_vendor_txt_map()
- with indent_log():
- show_actual_vendor_versions(vendor_txt_versions)
-
-
-def show_tags(options: Values) -> None:
- tag_limit = 10
-
- target_python = make_target_python(options)
- tags = target_python.get_sorted_tags()
-
- # Display the target options that were explicitly provided.
- formatted_target = target_python.format_given()
- suffix = ""
- if formatted_target:
- suffix = f" (target: {formatted_target})"
-
- msg = f"Compatible tags: {len(tags)}{suffix}"
- logger.info(msg)
-
- if options.verbose < 1 and len(tags) > tag_limit:
- tags_limited = True
- tags = tags[:tag_limit]
- else:
- tags_limited = False
-
- with indent_log():
- for tag in tags:
- logger.info(str(tag))
-
- if tags_limited:
- msg = f"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
- logger.info(msg)
-
-
-def ca_bundle_info(config: Configuration) -> str:
- levels = {key.split(".", 1)[0] for key, _ in config.items()}
- if not levels:
- return "Not specified"
-
- levels_that_override_global = ["install", "wheel", "download"]
- global_overriding_level = [
- level for level in levels if level in levels_that_override_global
- ]
- if not global_overriding_level:
- return "global"
-
- if "global" in levels:
- levels.remove("global")
- return ", ".join(levels)
-
-
-class DebugCommand(Command):
- """
- Display debug information.
- """
-
- usage = """
- %prog <options>"""
- ignore_require_venv = True
-
- def add_options(self) -> None:
- cmdoptions.add_target_python_options(self.cmd_opts)
- self.parser.insert_option_group(0, self.cmd_opts)
- self.parser.config.load()
-
- def run(self, options: Values, args: List[str]) -> int:
- logger.warning(
- "This command is only meant for debugging. "
- "Do not use this with automation for parsing and getting these "
- "details, since the output and options of this command may "
- "change without notice."
- )
- show_value("pip version", get_pip_version())
- show_value("sys.version", sys.version)
- show_value("sys.executable", sys.executable)
- show_value("sys.getdefaultencoding", sys.getdefaultencoding())
- show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
- show_value(
- "locale.getpreferredencoding",
- locale.getpreferredencoding(),
- )
- show_value("sys.platform", sys.platform)
- show_sys_implementation()
-
- show_value("'cert' config value", ca_bundle_info(self.parser.config))
- show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
- show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
- show_value("pip._vendor.certifi.where()", where())
- show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
-
- show_vendor_versions()
-
- show_tags(options)
-
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/download.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/download.py
deleted file mode 100644
index 54247a7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/download.py
+++ /dev/null
@@ -1,147 +0,0 @@
-import logging
-import os
-from optparse import Values
-from typing import List
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.req_command import RequirementCommand, with_cleanup
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.operations.build.build_tracker import get_build_tracker
-from pip._internal.req.req_install import check_legacy_setup_py_options
-from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-class DownloadCommand(RequirementCommand):
- """
- Download packages from:
-
- - PyPI (and other indexes) using requirement specifiers.
- - VCS project urls.
- - Local project directories.
- - Local or remote source archives.
-
- pip also supports downloading from "requirements files", which provide
- an easy way to specify a whole environment to be downloaded.
- """
-
- usage = """
- %prog [options] <requirement specifier> [package-index-options] ...
- %prog [options] -r <requirements file> [package-index-options] ...
- %prog [options] <vcs project url> ...
- %prog [options] <local project path> ...
- %prog [options] <archive url/path> ..."""
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(cmdoptions.constraints())
- self.cmd_opts.add_option(cmdoptions.requirements())
- self.cmd_opts.add_option(cmdoptions.no_deps())
- self.cmd_opts.add_option(cmdoptions.global_options())
- self.cmd_opts.add_option(cmdoptions.no_binary())
- self.cmd_opts.add_option(cmdoptions.only_binary())
- self.cmd_opts.add_option(cmdoptions.prefer_binary())
- self.cmd_opts.add_option(cmdoptions.src())
- self.cmd_opts.add_option(cmdoptions.pre())
- self.cmd_opts.add_option(cmdoptions.require_hashes())
- self.cmd_opts.add_option(cmdoptions.progress_bar())
- self.cmd_opts.add_option(cmdoptions.no_build_isolation())
- self.cmd_opts.add_option(cmdoptions.use_pep517())
- self.cmd_opts.add_option(cmdoptions.no_use_pep517())
- self.cmd_opts.add_option(cmdoptions.check_build_deps())
- self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
-
- self.cmd_opts.add_option(
- "-d",
- "--dest",
- "--destination-dir",
- "--destination-directory",
- dest="download_dir",
- metavar="dir",
- default=os.curdir,
- help="Download packages into <dir>.",
- )
-
- cmdoptions.add_target_python_options(self.cmd_opts)
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, self.cmd_opts)
-
- @with_cleanup
- def run(self, options: Values, args: List[str]) -> int:
- options.ignore_installed = True
- # editable doesn't really make sense for `pip download`, but the bowels
- # of the RequirementSet code require that property.
- options.editables = []
-
- cmdoptions.check_dist_restriction(options)
-
- options.download_dir = normalize_path(options.download_dir)
- ensure_dir(options.download_dir)
-
- session = self.get_default_session(options)
-
- target_python = make_target_python(options)
- finder = self._build_package_finder(
- options=options,
- session=session,
- target_python=target_python,
- ignore_requires_python=options.ignore_requires_python,
- )
-
- build_tracker = self.enter_context(get_build_tracker())
-
- directory = TempDirectory(
- delete=not options.no_clean,
- kind="download",
- globally_managed=True,
- )
-
- reqs = self.get_requirements(args, options, finder, session)
- check_legacy_setup_py_options(options, reqs)
-
- preparer = self.make_requirement_preparer(
- temp_build_dir=directory,
- options=options,
- build_tracker=build_tracker,
- session=session,
- finder=finder,
- download_dir=options.download_dir,
- use_user_site=False,
- verbosity=self.verbosity,
- )
-
- resolver = self.make_resolver(
- preparer=preparer,
- finder=finder,
- options=options,
- ignore_requires_python=options.ignore_requires_python,
- use_pep517=options.use_pep517,
- py_version_info=options.python_version,
- )
-
- self.trace_basic_info(finder)
-
- requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
-
- downloaded: List[str] = []
- for req in requirement_set.requirements.values():
- if req.satisfied_by is None:
- assert req.name is not None
- preparer.save_linked_requirement(req)
- downloaded.append(req.name)
-
- preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
- requirement_set.warn_legacy_versions_and_specifiers()
-
- if downloaded:
- write_output("Successfully downloaded %s", " ".join(downloaded))
-
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/freeze.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/freeze.py
deleted file mode 100644
index fd9d88a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/freeze.py
+++ /dev/null
@@ -1,108 +0,0 @@
-import sys
-from optparse import Values
-from typing import AbstractSet, List
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.operations.freeze import freeze
-from pip._internal.utils.compat import stdlib_pkgs
-
-
-def _should_suppress_build_backends() -> bool:
- return sys.version_info < (3, 12)
-
-
-def _dev_pkgs() -> AbstractSet[str]:
- pkgs = {"pip"}
-
- if _should_suppress_build_backends():
- pkgs |= {"setuptools", "distribute", "wheel"}
-
- return pkgs
-
-
-class FreezeCommand(Command):
- """
- Output installed packages in requirements format.
-
- packages are listed in a case-insensitive sorted order.
- """
-
- usage = """
- %prog [options]"""
- log_streams = ("ext://sys.stderr", "ext://sys.stderr")
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-r",
- "--requirement",
- dest="requirements",
- action="append",
- default=[],
- metavar="file",
- help=(
- "Use the order in the given requirements file and its "
- "comments when generating output. This option can be "
- "used multiple times."
- ),
- )
- self.cmd_opts.add_option(
- "-l",
- "--local",
- dest="local",
- action="store_true",
- default=False,
- help=(
- "If in a virtualenv that has global access, do not output "
- "globally-installed packages."
- ),
- )
- self.cmd_opts.add_option(
- "--user",
- dest="user",
- action="store_true",
- default=False,
- help="Only output packages installed in user-site.",
- )
- self.cmd_opts.add_option(cmdoptions.list_path())
- self.cmd_opts.add_option(
- "--all",
- dest="freeze_all",
- action="store_true",
- help=(
- "Do not skip these packages in the output:"
- " {}".format(", ".join(_dev_pkgs()))
- ),
- )
- self.cmd_opts.add_option(
- "--exclude-editable",
- dest="exclude_editable",
- action="store_true",
- help="Exclude editable package from output.",
- )
- self.cmd_opts.add_option(cmdoptions.list_exclude())
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- skip = set(stdlib_pkgs)
- if not options.freeze_all:
- skip.update(_dev_pkgs())
-
- if options.excludes:
- skip.update(options.excludes)
-
- cmdoptions.check_list_path_option(options)
-
- for line in freeze(
- requirement=options.requirements,
- local_only=options.local,
- user_only=options.user,
- paths=options.path,
- isolated=options.isolated_mode,
- skip=skip,
- exclude_editable=options.exclude_editable,
- ):
- sys.stdout.write(line + "\n")
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/hash.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/hash.py
deleted file mode 100644
index 042dac8..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/hash.py
+++ /dev/null
@@ -1,59 +0,0 @@
-import hashlib
-import logging
-import sys
-from optparse import Values
-from typing import List
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
-from pip._internal.utils.misc import read_chunks, write_output
-
-logger = logging.getLogger(__name__)
-
-
-class HashCommand(Command):
- """
- Compute a hash of a local package archive.
-
- These can be used with --hash in a requirements file to do repeatable
- installs.
- """
-
- usage = "%prog [options] <file> ..."
- ignore_require_venv = True
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-a",
- "--algorithm",
- dest="algorithm",
- choices=STRONG_HASHES,
- action="store",
- default=FAVORITE_HASH,
- help="The hash algorithm to use: one of {}".format(
- ", ".join(STRONG_HASHES)
- ),
- )
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- if not args:
- self.parser.print_usage(sys.stderr)
- return ERROR
-
- algorithm = options.algorithm
- for path in args:
- write_output(
- "%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
- )
- return SUCCESS
-
-
-def _hash_of_file(path: str, algorithm: str) -> str:
- """Return the hash digest of a file."""
- with open(path, "rb") as archive:
- hash = hashlib.new(algorithm)
- for chunk in read_chunks(archive):
- hash.update(chunk)
- return hash.hexdigest()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/help.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/help.py
deleted file mode 100644
index 6206631..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/help.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from optparse import Values
-from typing import List
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.exceptions import CommandError
-
-
-class HelpCommand(Command):
- """Show help for commands"""
-
- usage = """
- %prog <command>"""
- ignore_require_venv = True
-
- def run(self, options: Values, args: List[str]) -> int:
- from pip._internal.commands import (
- commands_dict,
- create_command,
- get_similar_commands,
- )
-
- try:
- # 'pip help' with no args is handled by pip.__init__.parseopt()
- cmd_name = args[0] # the command we need help for
- except IndexError:
- return SUCCESS
-
- if cmd_name not in commands_dict:
- guess = get_similar_commands(cmd_name)
-
- msg = [f'unknown command "{cmd_name}"']
- if guess:
- msg.append(f'maybe you meant "{guess}"')
-
- raise CommandError(" - ".join(msg))
-
- command = create_command(cmd_name)
- command.parser.print_help()
-
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/index.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/index.py
deleted file mode 100644
index f55e9e4..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/index.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import logging
-from optparse import Values
-from typing import Any, Iterable, List, Optional, Union
-
-from pip._vendor.packaging.version import LegacyVersion, Version
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.req_command import IndexGroupCommand
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.commands.search import print_dist_installation_info
-from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
-from pip._internal.index.collector import LinkCollector
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.models.target_python import TargetPython
-from pip._internal.network.session import PipSession
-from pip._internal.utils.misc import write_output
-
-logger = logging.getLogger(__name__)
-
-
-class IndexCommand(IndexGroupCommand):
- """
- Inspect information available from package indexes.
- """
-
- ignore_require_venv = True
- usage = """
- %prog versions <package>
- """
-
- def add_options(self) -> None:
- cmdoptions.add_target_python_options(self.cmd_opts)
-
- self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
- self.cmd_opts.add_option(cmdoptions.pre())
- self.cmd_opts.add_option(cmdoptions.no_binary())
- self.cmd_opts.add_option(cmdoptions.only_binary())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- handlers = {
- "versions": self.get_available_package_versions,
- }
-
- logger.warning(
- "pip index is currently an experimental command. "
- "It may be removed/changed in a future release "
- "without prior warning."
- )
-
- # Determine action
- if not args or args[0] not in handlers:
- logger.error(
- "Need an action (%s) to perform.",
- ", ".join(sorted(handlers)),
- )
- return ERROR
-
- action = args[0]
-
- # Error handling happens here, not in the action-handlers.
- try:
- handlers[action](options, args[1:])
- except PipError as e:
- logger.error(e.args[0])
- return ERROR
-
- return SUCCESS
-
- def _build_package_finder(
- self,
- options: Values,
- session: PipSession,
- target_python: Optional[TargetPython] = None,
- ignore_requires_python: Optional[bool] = None,
- ) -> PackageFinder:
- """
- Create a package finder appropriate to the index command.
- """
- link_collector = LinkCollector.create(session, options=options)
-
- # Pass allow_yanked=False to ignore yanked versions.
- selection_prefs = SelectionPreferences(
- allow_yanked=False,
- allow_all_prereleases=options.pre,
- ignore_requires_python=ignore_requires_python,
- )
-
- return PackageFinder.create(
- link_collector=link_collector,
- selection_prefs=selection_prefs,
- target_python=target_python,
- )
-
- def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
- if len(args) != 1:
- raise CommandError("You need to specify exactly one argument")
-
- target_python = cmdoptions.make_target_python(options)
- query = args[0]
-
- with self._build_session(options) as session:
- finder = self._build_package_finder(
- options=options,
- session=session,
- target_python=target_python,
- ignore_requires_python=options.ignore_requires_python,
- )
-
- versions: Iterable[Union[LegacyVersion, Version]] = (
- candidate.version for candidate in finder.find_all_candidates(query)
- )
-
- if not options.pre:
- # Remove prereleases
- versions = (
- version for version in versions if not version.is_prerelease
- )
- versions = set(versions)
-
- if not versions:
- raise DistributionNotFound(
- f"No matching distribution found for {query}"
- )
-
- formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
- latest = formatted_versions[0]
-
- write_output(f"{query} ({latest})")
- write_output("Available versions: {}".format(", ".join(formatted_versions)))
- print_dist_installation_info(query, latest)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/inspect.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/inspect.py
deleted file mode 100644
index 27c8fa3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/inspect.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import logging
-from optparse import Values
-from typing import Any, Dict, List
-
-from pip._vendor.packaging.markers import default_environment
-from pip._vendor.rich import print_json
-
-from pip import __version__
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.req_command import Command
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.metadata import BaseDistribution, get_environment
-from pip._internal.utils.compat import stdlib_pkgs
-from pip._internal.utils.urls import path_to_url
-
-logger = logging.getLogger(__name__)
-
-
-class InspectCommand(Command):
- """
- Inspect the content of a Python environment and produce a report in JSON format.
- """
-
- ignore_require_venv = True
- usage = """
- %prog [options]"""
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "--local",
- action="store_true",
- default=False,
- help=(
- "If in a virtualenv that has global access, do not list "
- "globally-installed packages."
- ),
- )
- self.cmd_opts.add_option(
- "--user",
- dest="user",
- action="store_true",
- default=False,
- help="Only output packages installed in user-site.",
- )
- self.cmd_opts.add_option(cmdoptions.list_path())
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- cmdoptions.check_list_path_option(options)
- dists = get_environment(options.path).iter_installed_distributions(
- local_only=options.local,
- user_only=options.user,
- skip=set(stdlib_pkgs),
- )
- output = {
- "version": "1",
- "pip_version": __version__,
- "installed": [self._dist_to_dict(dist) for dist in dists],
- "environment": default_environment(),
- # TODO tags? scheme?
- }
- print_json(data=output)
- return SUCCESS
-
- def _dist_to_dict(self, dist: BaseDistribution) -> Dict[str, Any]:
- res: Dict[str, Any] = {
- "metadata": dist.metadata_dict,
- "metadata_location": dist.info_location,
- }
- # direct_url. Note that we don't have download_info (as in the installation
- # report) since it is not recorded in installed metadata.
- direct_url = dist.direct_url
- if direct_url is not None:
- res["direct_url"] = direct_url.to_dict()
- else:
- # Emulate direct_url for legacy editable installs.
- editable_project_location = dist.editable_project_location
- if editable_project_location is not None:
- res["direct_url"] = {
- "url": path_to_url(editable_project_location),
- "dir_info": {
- "editable": True,
- },
- }
- # installer
- installer = dist.installer
- if dist.installer:
- res["installer"] = installer
- # requested
- if dist.installed_with_dist_info:
- res["requested"] = dist.requested
- return res
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/install.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/install.py
deleted file mode 100644
index e944bb9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/install.py
+++ /dev/null
@@ -1,774 +0,0 @@
-import errno
-import json
-import operator
-import os
-import shutil
-import site
-from optparse import SUPPRESS_HELP, Values
-from typing import List, Optional
-
-from pip._vendor.rich import print_json
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.cmdoptions import make_target_python
-from pip._internal.cli.req_command import (
- RequirementCommand,
- warn_if_run_as_root,
- with_cleanup,
-)
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.exceptions import CommandError, InstallationError
-from pip._internal.locations import get_scheme
-from pip._internal.metadata import get_environment
-from pip._internal.models.installation_report import InstallationReport
-from pip._internal.operations.build.build_tracker import get_build_tracker
-from pip._internal.operations.check import ConflictDetails, check_install_conflicts
-from pip._internal.req import install_given_reqs
-from pip._internal.req.req_install import (
- InstallRequirement,
- check_legacy_setup_py_options,
-)
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.filesystem import test_writable_dir
-from pip._internal.utils.logging import getLogger
-from pip._internal.utils.misc import (
- check_externally_managed,
- ensure_dir,
- get_pip_version,
- protect_pip_from_modification_on_windows,
- write_output,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.virtualenv import (
- running_under_virtualenv,
- virtualenv_no_global,
-)
-from pip._internal.wheel_builder import build, should_build_for_install_command
-
-logger = getLogger(__name__)
-
-
-class InstallCommand(RequirementCommand):
- """
- Install packages from:
-
- - PyPI (and other indexes) using requirement specifiers.
- - VCS project urls.
- - Local project directories.
- - Local or remote source archives.
-
- pip also supports installing from "requirements files", which provide
- an easy way to specify a whole environment to be installed.
- """
-
- usage = """
- %prog [options] <requirement specifier> [package-index-options] ...
- %prog [options] -r <requirements file> [package-index-options] ...
- %prog [options] [-e] <vcs project url> ...
- %prog [options] [-e] <local project path> ...
- %prog [options] <archive url/path> ..."""
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(cmdoptions.requirements())
- self.cmd_opts.add_option(cmdoptions.constraints())
- self.cmd_opts.add_option(cmdoptions.no_deps())
- self.cmd_opts.add_option(cmdoptions.pre())
-
- self.cmd_opts.add_option(cmdoptions.editable())
- self.cmd_opts.add_option(
- "--dry-run",
- action="store_true",
- dest="dry_run",
- default=False,
- help=(
- "Don't actually install anything, just print what would be. "
- "Can be used in combination with --ignore-installed "
- "to 'resolve' the requirements."
- ),
- )
- self.cmd_opts.add_option(
- "-t",
- "--target",
- dest="target_dir",
- metavar="dir",
- default=None,
- help=(
- "Install packages into <dir>. "
- "By default this will not replace existing files/folders in "
- "<dir>. Use --upgrade to replace existing packages in <dir> "
- "with new versions."
- ),
- )
- cmdoptions.add_target_python_options(self.cmd_opts)
-
- self.cmd_opts.add_option(
- "--user",
- dest="use_user_site",
- action="store_true",
- help=(
- "Install to the Python user install directory for your "
- "platform. Typically ~/.local/, or %APPDATA%\\Python on "
- "Windows. (See the Python documentation for site.USER_BASE "
- "for full details.)"
- ),
- )
- self.cmd_opts.add_option(
- "--no-user",
- dest="use_user_site",
- action="store_false",
- help=SUPPRESS_HELP,
- )
- self.cmd_opts.add_option(
- "--root",
- dest="root_path",
- metavar="dir",
- default=None,
- help="Install everything relative to this alternate root directory.",
- )
- self.cmd_opts.add_option(
- "--prefix",
- dest="prefix_path",
- metavar="dir",
- default=None,
- help=(
- "Installation prefix where lib, bin and other top-level "
- "folders are placed. Note that the resulting installation may "
- "contain scripts and other resources which reference the "
- "Python interpreter of pip, and not that of ``--prefix``. "
- "See also the ``--python`` option if the intention is to "
- "install packages into another (possibly pip-free) "
- "environment."
- ),
- )
-
- self.cmd_opts.add_option(cmdoptions.src())
-
- self.cmd_opts.add_option(
- "-U",
- "--upgrade",
- dest="upgrade",
- action="store_true",
- help=(
- "Upgrade all specified packages to the newest available "
- "version. The handling of dependencies depends on the "
- "upgrade-strategy used."
- ),
- )
-
- self.cmd_opts.add_option(
- "--upgrade-strategy",
- dest="upgrade_strategy",
- default="only-if-needed",
- choices=["only-if-needed", "eager"],
- help=(
- "Determines how dependency upgrading should be handled "
- "[default: %default]. "
- '"eager" - dependencies are upgraded regardless of '
- "whether the currently installed version satisfies the "
- "requirements of the upgraded package(s). "
- '"only-if-needed" - are upgraded only when they do not '
- "satisfy the requirements of the upgraded package(s)."
- ),
- )
-
- self.cmd_opts.add_option(
- "--force-reinstall",
- dest="force_reinstall",
- action="store_true",
- help="Reinstall all packages even if they are already up-to-date.",
- )
-
- self.cmd_opts.add_option(
- "-I",
- "--ignore-installed",
- dest="ignore_installed",
- action="store_true",
- help=(
- "Ignore the installed packages, overwriting them. "
- "This can break your system if the existing package "
- "is of a different version or was installed "
- "with a different package manager!"
- ),
- )
-
- self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
- self.cmd_opts.add_option(cmdoptions.no_build_isolation())
- self.cmd_opts.add_option(cmdoptions.use_pep517())
- self.cmd_opts.add_option(cmdoptions.no_use_pep517())
- self.cmd_opts.add_option(cmdoptions.check_build_deps())
- self.cmd_opts.add_option(cmdoptions.override_externally_managed())
-
- self.cmd_opts.add_option(cmdoptions.config_settings())
- self.cmd_opts.add_option(cmdoptions.global_options())
-
- self.cmd_opts.add_option(
- "--compile",
- action="store_true",
- dest="compile",
- default=True,
- help="Compile Python source files to bytecode",
- )
-
- self.cmd_opts.add_option(
- "--no-compile",
- action="store_false",
- dest="compile",
- help="Do not compile Python source files to bytecode",
- )
-
- self.cmd_opts.add_option(
- "--no-warn-script-location",
- action="store_false",
- dest="warn_script_location",
- default=True,
- help="Do not warn when installing scripts outside PATH",
- )
- self.cmd_opts.add_option(
- "--no-warn-conflicts",
- action="store_false",
- dest="warn_about_conflicts",
- default=True,
- help="Do not warn about broken dependencies",
- )
- self.cmd_opts.add_option(cmdoptions.no_binary())
- self.cmd_opts.add_option(cmdoptions.only_binary())
- self.cmd_opts.add_option(cmdoptions.prefer_binary())
- self.cmd_opts.add_option(cmdoptions.require_hashes())
- self.cmd_opts.add_option(cmdoptions.progress_bar())
- self.cmd_opts.add_option(cmdoptions.root_user_action())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, self.cmd_opts)
-
- self.cmd_opts.add_option(
- "--report",
- dest="json_report_file",
- metavar="file",
- default=None,
- help=(
- "Generate a JSON file describing what pip did to install "
- "the provided requirements. "
- "Can be used in combination with --dry-run and --ignore-installed "
- "to 'resolve' the requirements. "
- "When - is used as file name it writes to stdout. "
- "When writing to stdout, please combine with the --quiet option "
- "to avoid mixing pip logging output with JSON output."
- ),
- )
-
- @with_cleanup
- def run(self, options: Values, args: List[str]) -> int:
- if options.use_user_site and options.target_dir is not None:
- raise CommandError("Can not combine '--user' and '--target'")
-
- # Check whether the environment we're installing into is externally
- # managed, as specified in PEP 668. Specifying --root, --target, or
- # --prefix disables the check, since there's no reliable way to locate
- # the EXTERNALLY-MANAGED file for those cases. An exception is also
- # made specifically for "--dry-run --report" for convenience.
- installing_into_current_environment = (
- not (options.dry_run and options.json_report_file)
- and options.root_path is None
- and options.target_dir is None
- and options.prefix_path is None
- )
- if (
- installing_into_current_environment
- and not options.override_externally_managed
- ):
- check_externally_managed()
-
- upgrade_strategy = "to-satisfy-only"
- if options.upgrade:
- upgrade_strategy = options.upgrade_strategy
-
- cmdoptions.check_dist_restriction(options, check_target=True)
-
- logger.verbose("Using %s", get_pip_version())
- options.use_user_site = decide_user_install(
- options.use_user_site,
- prefix_path=options.prefix_path,
- target_dir=options.target_dir,
- root_path=options.root_path,
- isolated_mode=options.isolated_mode,
- )
-
- target_temp_dir: Optional[TempDirectory] = None
- target_temp_dir_path: Optional[str] = None
- if options.target_dir:
- options.ignore_installed = True
- options.target_dir = os.path.abspath(options.target_dir)
- if (
- # fmt: off
- os.path.exists(options.target_dir) and
- not os.path.isdir(options.target_dir)
- # fmt: on
- ):
- raise CommandError(
- "Target path exists but is not a directory, will not continue."
- )
-
- # Create a target directory for using with the target option
- target_temp_dir = TempDirectory(kind="target")
- target_temp_dir_path = target_temp_dir.path
- self.enter_context(target_temp_dir)
-
- global_options = options.global_options or []
-
- session = self.get_default_session(options)
-
- target_python = make_target_python(options)
- finder = self._build_package_finder(
- options=options,
- session=session,
- target_python=target_python,
- ignore_requires_python=options.ignore_requires_python,
- )
- build_tracker = self.enter_context(get_build_tracker())
-
- directory = TempDirectory(
- delete=not options.no_clean,
- kind="install",
- globally_managed=True,
- )
-
- try:
- reqs = self.get_requirements(args, options, finder, session)
- check_legacy_setup_py_options(options, reqs)
-
- wheel_cache = WheelCache(options.cache_dir)
-
- # Only when installing is it permitted to use PEP 660.
- # In other circumstances (pip wheel, pip download) we generate
- # regular (i.e. non editable) metadata and wheels.
- for req in reqs:
- req.permit_editable_wheels = True
-
- preparer = self.make_requirement_preparer(
- temp_build_dir=directory,
- options=options,
- build_tracker=build_tracker,
- session=session,
- finder=finder,
- use_user_site=options.use_user_site,
- verbosity=self.verbosity,
- )
- resolver = self.make_resolver(
- preparer=preparer,
- finder=finder,
- options=options,
- wheel_cache=wheel_cache,
- use_user_site=options.use_user_site,
- ignore_installed=options.ignore_installed,
- ignore_requires_python=options.ignore_requires_python,
- force_reinstall=options.force_reinstall,
- upgrade_strategy=upgrade_strategy,
- use_pep517=options.use_pep517,
- )
-
- self.trace_basic_info(finder)
-
- requirement_set = resolver.resolve(
- reqs, check_supported_wheels=not options.target_dir
- )
-
- if options.json_report_file:
- report = InstallationReport(requirement_set.requirements_to_install)
- if options.json_report_file == "-":
- print_json(data=report.to_dict())
- else:
- with open(options.json_report_file, "w", encoding="utf-8") as f:
- json.dump(report.to_dict(), f, indent=2, ensure_ascii=False)
-
- if options.dry_run:
- # In non dry-run mode, the legacy versions and specifiers check
- # will be done as part of conflict detection.
- requirement_set.warn_legacy_versions_and_specifiers()
- would_install_items = sorted(
- (r.metadata["name"], r.metadata["version"])
- for r in requirement_set.requirements_to_install
- )
- if would_install_items:
- write_output(
- "Would install %s",
- " ".join("-".join(item) for item in would_install_items),
- )
- return SUCCESS
-
- try:
- pip_req = requirement_set.get_requirement("pip")
- except KeyError:
- modifying_pip = False
- else:
- # If we're not replacing an already installed pip,
- # we're not modifying it.
- modifying_pip = pip_req.satisfied_by is None
- protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
-
- reqs_to_build = [
- r
- for r in requirement_set.requirements.values()
- if should_build_for_install_command(r)
- ]
-
- _, build_failures = build(
- reqs_to_build,
- wheel_cache=wheel_cache,
- verify=True,
- build_options=[],
- global_options=global_options,
- )
-
- if build_failures:
- raise InstallationError(
- "Could not build wheels for {}, which is required to "
- "install pyproject.toml-based projects".format(
- ", ".join(r.name for r in build_failures) # type: ignore
- )
- )
-
- to_install = resolver.get_installation_order(requirement_set)
-
- # Check for conflicts in the package set we're installing.
- conflicts: Optional[ConflictDetails] = None
- should_warn_about_conflicts = (
- not options.ignore_dependencies and options.warn_about_conflicts
- )
- if should_warn_about_conflicts:
- conflicts = self._determine_conflicts(to_install)
-
- # Don't warn about script install locations if
- # --target or --prefix has been specified
- warn_script_location = options.warn_script_location
- if options.target_dir or options.prefix_path:
- warn_script_location = False
-
- installed = install_given_reqs(
- to_install,
- global_options,
- root=options.root_path,
- home=target_temp_dir_path,
- prefix=options.prefix_path,
- warn_script_location=warn_script_location,
- use_user_site=options.use_user_site,
- pycompile=options.compile,
- )
-
- lib_locations = get_lib_location_guesses(
- user=options.use_user_site,
- home=target_temp_dir_path,
- root=options.root_path,
- prefix=options.prefix_path,
- isolated=options.isolated_mode,
- )
- env = get_environment(lib_locations)
-
- installed.sort(key=operator.attrgetter("name"))
- items = []
- for result in installed:
- item = result.name
- try:
- installed_dist = env.get_distribution(item)
- if installed_dist is not None:
- item = f"{item}-{installed_dist.version}"
- except Exception:
- pass
- items.append(item)
-
- if conflicts is not None:
- self._warn_about_conflicts(
- conflicts,
- resolver_variant=self.determine_resolver_variant(options),
- )
-
- installed_desc = " ".join(items)
- if installed_desc:
- write_output(
- "Successfully installed %s",
- installed_desc,
- )
- except OSError as error:
- show_traceback = self.verbosity >= 1
-
- message = create_os_error_message(
- error,
- show_traceback,
- options.use_user_site,
- )
- logger.error(message, exc_info=show_traceback)
-
- return ERROR
-
- if options.target_dir:
- assert target_temp_dir
- self._handle_target_dir(
- options.target_dir, target_temp_dir, options.upgrade
- )
- if options.root_user_action == "warn":
- warn_if_run_as_root()
- return SUCCESS
-
- def _handle_target_dir(
- self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
- ) -> None:
- ensure_dir(target_dir)
-
- # Checking both purelib and platlib directories for installed
- # packages to be moved to target directory
- lib_dir_list = []
-
- # Checking both purelib and platlib directories for installed
- # packages to be moved to target directory
- scheme = get_scheme("", home=target_temp_dir.path)
- purelib_dir = scheme.purelib
- platlib_dir = scheme.platlib
- data_dir = scheme.data
-
- if os.path.exists(purelib_dir):
- lib_dir_list.append(purelib_dir)
- if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
- lib_dir_list.append(platlib_dir)
- if os.path.exists(data_dir):
- lib_dir_list.append(data_dir)
-
- for lib_dir in lib_dir_list:
- for item in os.listdir(lib_dir):
- if lib_dir == data_dir:
- ddir = os.path.join(data_dir, item)
- if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
- continue
- target_item_dir = os.path.join(target_dir, item)
- if os.path.exists(target_item_dir):
- if not upgrade:
- logger.warning(
- "Target directory %s already exists. Specify "
- "--upgrade to force replacement.",
- target_item_dir,
- )
- continue
- if os.path.islink(target_item_dir):
- logger.warning(
- "Target directory %s already exists and is "
- "a link. pip will not automatically replace "
- "links, please remove if replacement is "
- "desired.",
- target_item_dir,
- )
- continue
- if os.path.isdir(target_item_dir):
- shutil.rmtree(target_item_dir)
- else:
- os.remove(target_item_dir)
-
- shutil.move(os.path.join(lib_dir, item), target_item_dir)
-
- def _determine_conflicts(
- self, to_install: List[InstallRequirement]
- ) -> Optional[ConflictDetails]:
- try:
- return check_install_conflicts(to_install)
- except Exception:
- logger.exception(
- "Error while checking for conflicts. Please file an issue on "
- "pip's issue tracker: https://github.com/pypa/pip/issues/new"
- )
- return None
-
- def _warn_about_conflicts(
- self, conflict_details: ConflictDetails, resolver_variant: str
- ) -> None:
- package_set, (missing, conflicting) = conflict_details
- if not missing and not conflicting:
- return
-
- parts: List[str] = []
- if resolver_variant == "legacy":
- parts.append(
- "pip's legacy dependency resolver does not consider dependency "
- "conflicts when selecting packages. This behaviour is the "
- "source of the following dependency conflicts."
- )
- else:
- assert resolver_variant == "resolvelib"
- parts.append(
- "pip's dependency resolver does not currently take into account "
- "all the packages that are installed. This behaviour is the "
- "source of the following dependency conflicts."
- )
-
- # NOTE: There is some duplication here, with commands/check.py
- for project_name in missing:
- version = package_set[project_name][0]
- for dependency in missing[project_name]:
- message = (
- f"{project_name} {version} requires {dependency[1]}, "
- "which is not installed."
- )
- parts.append(message)
-
- for project_name in conflicting:
- version = package_set[project_name][0]
- for dep_name, dep_version, req in conflicting[project_name]:
- message = (
- "{name} {version} requires {requirement}, but {you} have "
- "{dep_name} {dep_version} which is incompatible."
- ).format(
- name=project_name,
- version=version,
- requirement=req,
- dep_name=dep_name,
- dep_version=dep_version,
- you=("you" if resolver_variant == "resolvelib" else "you'll"),
- )
- parts.append(message)
-
- logger.critical("\n".join(parts))
-
-
-def get_lib_location_guesses(
- user: bool = False,
- home: Optional[str] = None,
- root: Optional[str] = None,
- isolated: bool = False,
- prefix: Optional[str] = None,
-) -> List[str]:
- scheme = get_scheme(
- "",
- user=user,
- home=home,
- root=root,
- isolated=isolated,
- prefix=prefix,
- )
- return [scheme.purelib, scheme.platlib]
-
-
-def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
- return all(
- test_writable_dir(d)
- for d in set(get_lib_location_guesses(root=root, isolated=isolated))
- )
-
-
-def decide_user_install(
- use_user_site: Optional[bool],
- prefix_path: Optional[str] = None,
- target_dir: Optional[str] = None,
- root_path: Optional[str] = None,
- isolated_mode: bool = False,
-) -> bool:
- """Determine whether to do a user install based on the input options.
-
- If use_user_site is False, no additional checks are done.
- If use_user_site is True, it is checked for compatibility with other
- options.
- If use_user_site is None, the default behaviour depends on the environment,
- which is provided by the other arguments.
- """
- # In some cases (config from tox), use_user_site can be set to an integer
- # rather than a bool, which 'use_user_site is False' wouldn't catch.
- if (use_user_site is not None) and (not use_user_site):
- logger.debug("Non-user install by explicit request")
- return False
-
- if use_user_site:
- if prefix_path:
- raise CommandError(
- "Can not combine '--user' and '--prefix' as they imply "
- "different installation locations"
- )
- if virtualenv_no_global():
- raise InstallationError(
- "Can not perform a '--user' install. User site-packages "
- "are not visible in this virtualenv."
- )
- logger.debug("User install by explicit request")
- return True
-
- # If we are here, user installs have not been explicitly requested/avoided
- assert use_user_site is None
-
- # user install incompatible with --prefix/--target
- if prefix_path or target_dir:
- logger.debug("Non-user install due to --prefix or --target option")
- return False
-
- # If user installs are not enabled, choose a non-user install
- if not site.ENABLE_USER_SITE:
- logger.debug("Non-user install because user site-packages disabled")
- return False
-
- # If we have permission for a non-user install, do that,
- # otherwise do a user install.
- if site_packages_writable(root=root_path, isolated=isolated_mode):
- logger.debug("Non-user install because site-packages writeable")
- return False
-
- logger.info(
- "Defaulting to user installation because normal site-packages "
- "is not writeable"
- )
- return True
-
-
-def create_os_error_message(
- error: OSError, show_traceback: bool, using_user_site: bool
-) -> str:
- """Format an error message for an OSError
-
- It may occur anytime during the execution of the install command.
- """
- parts = []
-
- # Mention the error if we are not going to show a traceback
- parts.append("Could not install packages due to an OSError")
- if not show_traceback:
- parts.append(": ")
- parts.append(str(error))
- else:
- parts.append(".")
-
- # Spilt the error indication from a helper message (if any)
- parts[-1] += "\n"
-
- # Suggest useful actions to the user:
- # (1) using user site-packages or (2) verifying the permissions
- if error.errno == errno.EACCES:
- user_option_part = "Consider using the `--user` option"
- permissions_part = "Check the permissions"
-
- if not running_under_virtualenv() and not using_user_site:
- parts.extend(
- [
- user_option_part,
- " or ",
- permissions_part.lower(),
- ]
- )
- else:
- parts.append(permissions_part)
- parts.append(".\n")
-
- # Suggest the user to enable Long Paths if path length is
- # more than 260
- if (
- WINDOWS
- and error.errno == errno.ENOENT
- and error.filename
- and len(error.filename) > 260
- ):
- parts.append(
- "HINT: This error might have occurred since "
- "this system does not have Windows Long Path "
- "support enabled. You can find information on "
- "how to enable this at "
- "https://pip.pypa.io/warnings/enable-long-paths\n"
- )
-
- return "".join(parts).strip() + "\n"
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/list.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/list.py
deleted file mode 100644
index e551dda..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/list.py
+++ /dev/null
@@ -1,368 +0,0 @@
-import json
-import logging
-from optparse import Values
-from typing import TYPE_CHECKING, Generator, List, Optional, Sequence, Tuple, cast
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.req_command import IndexGroupCommand
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.exceptions import CommandError
-from pip._internal.index.collector import LinkCollector
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution, get_environment
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.network.session import PipSession
-from pip._internal.utils.compat import stdlib_pkgs
-from pip._internal.utils.misc import tabulate, write_output
-
-if TYPE_CHECKING:
- from pip._internal.metadata.base import DistributionVersion
-
- class _DistWithLatestInfo(BaseDistribution):
- """Give the distribution object a couple of extra fields.
-
- These will be populated during ``get_outdated()``. This is dirty but
- makes the rest of the code much cleaner.
- """
-
- latest_version: DistributionVersion
- latest_filetype: str
-
- _ProcessedDists = Sequence[_DistWithLatestInfo]
-
-
-logger = logging.getLogger(__name__)
-
-
-class ListCommand(IndexGroupCommand):
- """
- List installed packages, including editables.
-
- Packages are listed in a case-insensitive sorted order.
- """
-
- ignore_require_venv = True
- usage = """
- %prog [options]"""
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-o",
- "--outdated",
- action="store_true",
- default=False,
- help="List outdated packages",
- )
- self.cmd_opts.add_option(
- "-u",
- "--uptodate",
- action="store_true",
- default=False,
- help="List uptodate packages",
- )
- self.cmd_opts.add_option(
- "-e",
- "--editable",
- action="store_true",
- default=False,
- help="List editable projects.",
- )
- self.cmd_opts.add_option(
- "-l",
- "--local",
- action="store_true",
- default=False,
- help=(
- "If in a virtualenv that has global access, do not list "
- "globally-installed packages."
- ),
- )
- self.cmd_opts.add_option(
- "--user",
- dest="user",
- action="store_true",
- default=False,
- help="Only output packages installed in user-site.",
- )
- self.cmd_opts.add_option(cmdoptions.list_path())
- self.cmd_opts.add_option(
- "--pre",
- action="store_true",
- default=False,
- help=(
- "Include pre-release and development versions. By default, "
- "pip only finds stable versions."
- ),
- )
-
- self.cmd_opts.add_option(
- "--format",
- action="store",
- dest="list_format",
- default="columns",
- choices=("columns", "freeze", "json"),
- help=(
- "Select the output format among: columns (default), freeze, or json. "
- "The 'freeze' format cannot be used with the --outdated option."
- ),
- )
-
- self.cmd_opts.add_option(
- "--not-required",
- action="store_true",
- dest="not_required",
- help="List packages that are not dependencies of installed packages.",
- )
-
- self.cmd_opts.add_option(
- "--exclude-editable",
- action="store_false",
- dest="include_editable",
- help="Exclude editable package from output.",
- )
- self.cmd_opts.add_option(
- "--include-editable",
- action="store_true",
- dest="include_editable",
- help="Include editable package from output.",
- default=True,
- )
- self.cmd_opts.add_option(cmdoptions.list_exclude())
- index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def _build_package_finder(
- self, options: Values, session: PipSession
- ) -> PackageFinder:
- """
- Create a package finder appropriate to this list command.
- """
- link_collector = LinkCollector.create(session, options=options)
-
- # Pass allow_yanked=False to ignore yanked versions.
- selection_prefs = SelectionPreferences(
- allow_yanked=False,
- allow_all_prereleases=options.pre,
- )
-
- return PackageFinder.create(
- link_collector=link_collector,
- selection_prefs=selection_prefs,
- )
-
- def run(self, options: Values, args: List[str]) -> int:
- if options.outdated and options.uptodate:
- raise CommandError("Options --outdated and --uptodate cannot be combined.")
-
- if options.outdated and options.list_format == "freeze":
- raise CommandError(
- "List format 'freeze' cannot be used with the --outdated option."
- )
-
- cmdoptions.check_list_path_option(options)
-
- skip = set(stdlib_pkgs)
- if options.excludes:
- skip.update(canonicalize_name(n) for n in options.excludes)
-
- packages: "_ProcessedDists" = [
- cast("_DistWithLatestInfo", d)
- for d in get_environment(options.path).iter_installed_distributions(
- local_only=options.local,
- user_only=options.user,
- editables_only=options.editable,
- include_editables=options.include_editable,
- skip=skip,
- )
- ]
-
- # get_not_required must be called firstly in order to find and
- # filter out all dependencies correctly. Otherwise a package
- # can't be identified as requirement because some parent packages
- # could be filtered out before.
- if options.not_required:
- packages = self.get_not_required(packages, options)
-
- if options.outdated:
- packages = self.get_outdated(packages, options)
- elif options.uptodate:
- packages = self.get_uptodate(packages, options)
-
- self.output_package_listing(packages, options)
- return SUCCESS
-
- def get_outdated(
- self, packages: "_ProcessedDists", options: Values
- ) -> "_ProcessedDists":
- return [
- dist
- for dist in self.iter_packages_latest_infos(packages, options)
- if dist.latest_version > dist.version
- ]
-
- def get_uptodate(
- self, packages: "_ProcessedDists", options: Values
- ) -> "_ProcessedDists":
- return [
- dist
- for dist in self.iter_packages_latest_infos(packages, options)
- if dist.latest_version == dist.version
- ]
-
- def get_not_required(
- self, packages: "_ProcessedDists", options: Values
- ) -> "_ProcessedDists":
- dep_keys = {
- canonicalize_name(dep.name)
- for dist in packages
- for dep in (dist.iter_dependencies() or ())
- }
-
- # Create a set to remove duplicate packages, and cast it to a list
- # to keep the return type consistent with get_outdated and
- # get_uptodate
- return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
-
- def iter_packages_latest_infos(
- self, packages: "_ProcessedDists", options: Values
- ) -> Generator["_DistWithLatestInfo", None, None]:
- with self._build_session(options) as session:
- finder = self._build_package_finder(options, session)
-
- def latest_info(
- dist: "_DistWithLatestInfo",
- ) -> Optional["_DistWithLatestInfo"]:
- all_candidates = finder.find_all_candidates(dist.canonical_name)
- if not options.pre:
- # Remove prereleases
- all_candidates = [
- candidate
- for candidate in all_candidates
- if not candidate.version.is_prerelease
- ]
-
- evaluator = finder.make_candidate_evaluator(
- project_name=dist.canonical_name,
- )
- best_candidate = evaluator.sort_best_candidate(all_candidates)
- if best_candidate is None:
- return None
-
- remote_version = best_candidate.version
- if best_candidate.link.is_wheel:
- typ = "wheel"
- else:
- typ = "sdist"
- dist.latest_version = remote_version
- dist.latest_filetype = typ
- return dist
-
- for dist in map(latest_info, packages):
- if dist is not None:
- yield dist
-
- def output_package_listing(
- self, packages: "_ProcessedDists", options: Values
- ) -> None:
- packages = sorted(
- packages,
- key=lambda dist: dist.canonical_name,
- )
- if options.list_format == "columns" and packages:
- data, header = format_for_columns(packages, options)
- self.output_package_listing_columns(data, header)
- elif options.list_format == "freeze":
- for dist in packages:
- if options.verbose >= 1:
- write_output(
- "%s==%s (%s)", dist.raw_name, dist.version, dist.location
- )
- else:
- write_output("%s==%s", dist.raw_name, dist.version)
- elif options.list_format == "json":
- write_output(format_for_json(packages, options))
-
- def output_package_listing_columns(
- self, data: List[List[str]], header: List[str]
- ) -> None:
- # insert the header first: we need to know the size of column names
- if len(data) > 0:
- data.insert(0, header)
-
- pkg_strings, sizes = tabulate(data)
-
- # Create and add a separator.
- if len(data) > 0:
- pkg_strings.insert(1, " ".join("-" * x for x in sizes))
-
- for val in pkg_strings:
- write_output(val)
-
-
-def format_for_columns(
- pkgs: "_ProcessedDists", options: Values
-) -> Tuple[List[List[str]], List[str]]:
- """
- Convert the package data into something usable
- by output_package_listing_columns.
- """
- header = ["Package", "Version"]
-
- running_outdated = options.outdated
- if running_outdated:
- header.extend(["Latest", "Type"])
-
- has_editables = any(x.editable for x in pkgs)
- if has_editables:
- header.append("Editable project location")
-
- if options.verbose >= 1:
- header.append("Location")
- if options.verbose >= 1:
- header.append("Installer")
-
- data = []
- for proj in pkgs:
- # if we're working on the 'outdated' list, separate out the
- # latest_version and type
- row = [proj.raw_name, str(proj.version)]
-
- if running_outdated:
- row.append(str(proj.latest_version))
- row.append(proj.latest_filetype)
-
- if has_editables:
- row.append(proj.editable_project_location or "")
-
- if options.verbose >= 1:
- row.append(proj.location or "")
- if options.verbose >= 1:
- row.append(proj.installer)
-
- data.append(row)
-
- return data, header
-
-
-def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
- data = []
- for dist in packages:
- info = {
- "name": dist.raw_name,
- "version": str(dist.version),
- }
- if options.verbose >= 1:
- info["location"] = dist.location or ""
- info["installer"] = dist.installer
- if options.outdated:
- info["latest_version"] = str(dist.latest_version)
- info["latest_filetype"] = dist.latest_filetype
- editable_project_location = dist.editable_project_location
- if editable_project_location:
- info["editable_project_location"] = editable_project_location
- data.append(info)
- return json.dumps(data)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/search.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/search.py
deleted file mode 100644
index 03ed925..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/search.py
+++ /dev/null
@@ -1,174 +0,0 @@
-import logging
-import shutil
-import sys
-import textwrap
-import xmlrpc.client
-from collections import OrderedDict
-from optparse import Values
-from typing import TYPE_CHECKING, Dict, List, Optional
-
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.req_command import SessionCommandMixin
-from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
-from pip._internal.exceptions import CommandError
-from pip._internal.metadata import get_default_environment
-from pip._internal.models.index import PyPI
-from pip._internal.network.xmlrpc import PipXmlrpcTransport
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import write_output
-
-if TYPE_CHECKING:
- from typing import TypedDict
-
- class TransformedHit(TypedDict):
- name: str
- summary: str
- versions: List[str]
-
-
-logger = logging.getLogger(__name__)
-
-
-class SearchCommand(Command, SessionCommandMixin):
- """Search for PyPI packages whose name or summary contains <query>."""
-
- usage = """
- %prog [options] <query>"""
- ignore_require_venv = True
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-i",
- "--index",
- dest="index",
- metavar="URL",
- default=PyPI.pypi_url,
- help="Base URL of Python Package Index (default %default)",
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- if not args:
- raise CommandError("Missing required argument (search query).")
- query = args
- pypi_hits = self.search(query, options)
- hits = transform_hits(pypi_hits)
-
- terminal_width = None
- if sys.stdout.isatty():
- terminal_width = shutil.get_terminal_size()[0]
-
- print_results(hits, terminal_width=terminal_width)
- if pypi_hits:
- return SUCCESS
- return NO_MATCHES_FOUND
-
- def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
- index_url = options.index
-
- session = self.get_default_session(options)
-
- transport = PipXmlrpcTransport(index_url, session)
- pypi = xmlrpc.client.ServerProxy(index_url, transport)
- try:
- hits = pypi.search({"name": query, "summary": query}, "or")
- except xmlrpc.client.Fault as fault:
- message = "XMLRPC request failed [code: {code}]\n{string}".format(
- code=fault.faultCode,
- string=fault.faultString,
- )
- raise CommandError(message)
- assert isinstance(hits, list)
- return hits
-
-
-def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
- """
- The list from pypi is really a list of versions. We want a list of
- packages with the list of versions stored inline. This converts the
- list from pypi into one we can use.
- """
- packages: Dict[str, "TransformedHit"] = OrderedDict()
- for hit in hits:
- name = hit["name"]
- summary = hit["summary"]
- version = hit["version"]
-
- if name not in packages.keys():
- packages[name] = {
- "name": name,
- "summary": summary,
- "versions": [version],
- }
- else:
- packages[name]["versions"].append(version)
-
- # if this is the highest version, replace summary and score
- if version == highest_version(packages[name]["versions"]):
- packages[name]["summary"] = summary
-
- return list(packages.values())
-
-
-def print_dist_installation_info(name: str, latest: str) -> None:
- env = get_default_environment()
- dist = env.get_distribution(name)
- if dist is not None:
- with indent_log():
- if dist.version == latest:
- write_output("INSTALLED: %s (latest)", dist.version)
- else:
- write_output("INSTALLED: %s", dist.version)
- if parse_version(latest).pre:
- write_output(
- "LATEST: %s (pre-release; install"
- " with `pip install --pre`)",
- latest,
- )
- else:
- write_output("LATEST: %s", latest)
-
-
-def print_results(
- hits: List["TransformedHit"],
- name_column_width: Optional[int] = None,
- terminal_width: Optional[int] = None,
-) -> None:
- if not hits:
- return
- if name_column_width is None:
- name_column_width = (
- max(
- [
- len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
- for hit in hits
- ]
- )
- + 4
- )
-
- for hit in hits:
- name = hit["name"]
- summary = hit["summary"] or ""
- latest = highest_version(hit.get("versions", ["-"]))
- if terminal_width is not None:
- target_width = terminal_width - name_column_width - 5
- if target_width > 10:
- # wrap and indent summary to fit terminal
- summary_lines = textwrap.wrap(summary, target_width)
- summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
-
- name_latest = f"{name} ({latest})"
- line = f"{name_latest:{name_column_width}} - {summary}"
- try:
- write_output(line)
- print_dist_installation_info(name, latest)
- except UnicodeEncodeError:
- pass
-
-
-def highest_version(versions: List[str]) -> str:
- return max(versions, key=parse_version)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/show.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/show.py
deleted file mode 100644
index 3f10701..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/show.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import logging
-from optparse import Values
-from typing import Generator, Iterable, Iterator, List, NamedTuple, Optional
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.status_codes import ERROR, SUCCESS
-from pip._internal.metadata import BaseDistribution, get_default_environment
-from pip._internal.utils.misc import write_output
-
-logger = logging.getLogger(__name__)
-
-
-class ShowCommand(Command):
- """
- Show information about one or more installed packages.
-
- The output is in RFC-compliant mail header format.
- """
-
- usage = """
- %prog [options] <package> ..."""
- ignore_require_venv = True
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-f",
- "--files",
- dest="files",
- action="store_true",
- default=False,
- help="Show the full list of installed files for each package.",
- )
-
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- if not args:
- logger.warning("ERROR: Please provide a package name or names.")
- return ERROR
- query = args
-
- results = search_packages_info(query)
- if not print_results(
- results, list_files=options.files, verbose=options.verbose
- ):
- return ERROR
- return SUCCESS
-
-
-class _PackageInfo(NamedTuple):
- name: str
- version: str
- location: str
- editable_project_location: Optional[str]
- requires: List[str]
- required_by: List[str]
- installer: str
- metadata_version: str
- classifiers: List[str]
- summary: str
- homepage: str
- project_urls: List[str]
- author: str
- author_email: str
- license: str
- entry_points: List[str]
- files: Optional[List[str]]
-
-
-def search_packages_info(query: List[str]) -> Generator[_PackageInfo, None, None]:
- """
- Gather details from installed distributions. Print distribution name,
- version, location, and installed files. Installed files requires a
- pip generated 'installed-files.txt' in the distributions '.egg-info'
- directory.
- """
- env = get_default_environment()
-
- installed = {dist.canonical_name: dist for dist in env.iter_all_distributions()}
- query_names = [canonicalize_name(name) for name in query]
- missing = sorted(
- [name for name, pkg in zip(query, query_names) if pkg not in installed]
- )
- if missing:
- logger.warning("Package(s) not found: %s", ", ".join(missing))
-
- def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
- return (
- dist.metadata["Name"] or "UNKNOWN"
- for dist in installed.values()
- if current_dist.canonical_name
- in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
- )
-
- for query_name in query_names:
- try:
- dist = installed[query_name]
- except KeyError:
- continue
-
- requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
- required_by = sorted(_get_requiring_packages(dist), key=str.lower)
-
- try:
- entry_points_text = dist.read_text("entry_points.txt")
- entry_points = entry_points_text.splitlines(keepends=False)
- except FileNotFoundError:
- entry_points = []
-
- files_iter = dist.iter_declared_entries()
- if files_iter is None:
- files: Optional[List[str]] = None
- else:
- files = sorted(files_iter)
-
- metadata = dist.metadata
-
- yield _PackageInfo(
- name=dist.raw_name,
- version=str(dist.version),
- location=dist.location or "",
- editable_project_location=dist.editable_project_location,
- requires=requires,
- required_by=required_by,
- installer=dist.installer,
- metadata_version=dist.metadata_version or "",
- classifiers=metadata.get_all("Classifier", []),
- summary=metadata.get("Summary", ""),
- homepage=metadata.get("Home-page", ""),
- project_urls=metadata.get_all("Project-URL", []),
- author=metadata.get("Author", ""),
- author_email=metadata.get("Author-email", ""),
- license=metadata.get("License", ""),
- entry_points=entry_points,
- files=files,
- )
-
-
-def print_results(
- distributions: Iterable[_PackageInfo],
- list_files: bool,
- verbose: bool,
-) -> bool:
- """
- Print the information from installed distributions found.
- """
- results_printed = False
- for i, dist in enumerate(distributions):
- results_printed = True
- if i > 0:
- write_output("---")
-
- write_output("Name: %s", dist.name)
- write_output("Version: %s", dist.version)
- write_output("Summary: %s", dist.summary)
- write_output("Home-page: %s", dist.homepage)
- write_output("Author: %s", dist.author)
- write_output("Author-email: %s", dist.author_email)
- write_output("License: %s", dist.license)
- write_output("Location: %s", dist.location)
- if dist.editable_project_location is not None:
- write_output(
- "Editable project location: %s", dist.editable_project_location
- )
- write_output("Requires: %s", ", ".join(dist.requires))
- write_output("Required-by: %s", ", ".join(dist.required_by))
-
- if verbose:
- write_output("Metadata-Version: %s", dist.metadata_version)
- write_output("Installer: %s", dist.installer)
- write_output("Classifiers:")
- for classifier in dist.classifiers:
- write_output(" %s", classifier)
- write_output("Entry-points:")
- for entry in dist.entry_points:
- write_output(" %s", entry.strip())
- write_output("Project-URLs:")
- for project_url in dist.project_urls:
- write_output(" %s", project_url)
- if list_files:
- write_output("Files:")
- if dist.files is None:
- write_output("Cannot locate RECORD or installed-files.txt")
- else:
- for line in dist.files:
- write_output(" %s", line.strip())
- return results_printed
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/uninstall.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/uninstall.py
deleted file mode 100644
index f198fc3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/uninstall.py
+++ /dev/null
@@ -1,113 +0,0 @@
-import logging
-from optparse import Values
-from typing import List
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.base_command import Command
-from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.exceptions import InstallationError
-from pip._internal.req import parse_requirements
-from pip._internal.req.constructors import (
- install_req_from_line,
- install_req_from_parsed_requirement,
-)
-from pip._internal.utils.misc import (
- check_externally_managed,
- protect_pip_from_modification_on_windows,
-)
-
-logger = logging.getLogger(__name__)
-
-
-class UninstallCommand(Command, SessionCommandMixin):
- """
- Uninstall packages.
-
- pip is able to uninstall most installed packages. Known exceptions are:
-
- - Pure distutils packages installed with ``python setup.py install``, which
- leave behind no metadata to determine what files were installed.
- - Script wrappers installed by ``python setup.py develop``.
- """
-
- usage = """
- %prog [options] <package> ...
- %prog [options] -r <requirements file> ..."""
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-r",
- "--requirement",
- dest="requirements",
- action="append",
- default=[],
- metavar="file",
- help=(
- "Uninstall all the packages listed in the given requirements "
- "file. This option can be used multiple times."
- ),
- )
- self.cmd_opts.add_option(
- "-y",
- "--yes",
- dest="yes",
- action="store_true",
- help="Don't ask for confirmation of uninstall deletions.",
- )
- self.cmd_opts.add_option(cmdoptions.root_user_action())
- self.cmd_opts.add_option(cmdoptions.override_externally_managed())
- self.parser.insert_option_group(0, self.cmd_opts)
-
- def run(self, options: Values, args: List[str]) -> int:
- session = self.get_default_session(options)
-
- reqs_to_uninstall = {}
- for name in args:
- req = install_req_from_line(
- name,
- isolated=options.isolated_mode,
- )
- if req.name:
- reqs_to_uninstall[canonicalize_name(req.name)] = req
- else:
- logger.warning(
- "Invalid requirement: %r ignored -"
- " the uninstall command expects named"
- " requirements.",
- name,
- )
- for filename in options.requirements:
- for parsed_req in parse_requirements(
- filename, options=options, session=session
- ):
- req = install_req_from_parsed_requirement(
- parsed_req, isolated=options.isolated_mode
- )
- if req.name:
- reqs_to_uninstall[canonicalize_name(req.name)] = req
- if not reqs_to_uninstall:
- raise InstallationError(
- f"You must give at least one requirement to {self.name} (see "
- f'"pip help {self.name}")'
- )
-
- if not options.override_externally_managed:
- check_externally_managed()
-
- protect_pip_from_modification_on_windows(
- modifying_pip="pip" in reqs_to_uninstall
- )
-
- for req in reqs_to_uninstall.values():
- uninstall_pathset = req.uninstall(
- auto_confirm=options.yes,
- verbose=self.verbosity > 0,
- )
- if uninstall_pathset:
- uninstall_pathset.commit()
- if options.root_user_action == "warn":
- warn_if_run_as_root()
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/commands/wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/commands/wheel.py
deleted file mode 100644
index ed578aa..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/commands/wheel.py
+++ /dev/null
@@ -1,183 +0,0 @@
-import logging
-import os
-import shutil
-from optparse import Values
-from typing import List
-
-from pip._internal.cache import WheelCache
-from pip._internal.cli import cmdoptions
-from pip._internal.cli.req_command import RequirementCommand, with_cleanup
-from pip._internal.cli.status_codes import SUCCESS
-from pip._internal.exceptions import CommandError
-from pip._internal.operations.build.build_tracker import get_build_tracker
-from pip._internal.req.req_install import (
- InstallRequirement,
- check_legacy_setup_py_options,
-)
-from pip._internal.utils.misc import ensure_dir, normalize_path
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.wheel_builder import build, should_build_for_wheel_command
-
-logger = logging.getLogger(__name__)
-
-
-class WheelCommand(RequirementCommand):
- """
- Build Wheel archives for your requirements and dependencies.
-
- Wheel is a built-package format, and offers the advantage of not
- recompiling your software during every install. For more details, see the
- wheel docs: https://wheel.readthedocs.io/en/latest/
-
- 'pip wheel' uses the build system interface as described here:
- https://pip.pypa.io/en/stable/reference/build-system/
-
- """
-
- usage = """
- %prog [options] <requirement specifier> ...
- %prog [options] -r <requirements file> ...
- %prog [options] [-e] <vcs project url> ...
- %prog [options] [-e] <local project path> ...
- %prog [options] <archive url/path> ..."""
-
- def add_options(self) -> None:
- self.cmd_opts.add_option(
- "-w",
- "--wheel-dir",
- dest="wheel_dir",
- metavar="dir",
- default=os.curdir,
- help=(
- "Build wheels into <dir>, where the default is the "
- "current working directory."
- ),
- )
- self.cmd_opts.add_option(cmdoptions.no_binary())
- self.cmd_opts.add_option(cmdoptions.only_binary())
- self.cmd_opts.add_option(cmdoptions.prefer_binary())
- self.cmd_opts.add_option(cmdoptions.no_build_isolation())
- self.cmd_opts.add_option(cmdoptions.use_pep517())
- self.cmd_opts.add_option(cmdoptions.no_use_pep517())
- self.cmd_opts.add_option(cmdoptions.check_build_deps())
- self.cmd_opts.add_option(cmdoptions.constraints())
- self.cmd_opts.add_option(cmdoptions.editable())
- self.cmd_opts.add_option(cmdoptions.requirements())
- self.cmd_opts.add_option(cmdoptions.src())
- self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
- self.cmd_opts.add_option(cmdoptions.no_deps())
- self.cmd_opts.add_option(cmdoptions.progress_bar())
-
- self.cmd_opts.add_option(
- "--no-verify",
- dest="no_verify",
- action="store_true",
- default=False,
- help="Don't verify if built wheel is valid.",
- )
-
- self.cmd_opts.add_option(cmdoptions.config_settings())
- self.cmd_opts.add_option(cmdoptions.build_options())
- self.cmd_opts.add_option(cmdoptions.global_options())
-
- self.cmd_opts.add_option(
- "--pre",
- action="store_true",
- default=False,
- help=(
- "Include pre-release and development versions. By default, "
- "pip only finds stable versions."
- ),
- )
-
- self.cmd_opts.add_option(cmdoptions.require_hashes())
-
- index_opts = cmdoptions.make_option_group(
- cmdoptions.index_group,
- self.parser,
- )
-
- self.parser.insert_option_group(0, index_opts)
- self.parser.insert_option_group(0, self.cmd_opts)
-
- @with_cleanup
- def run(self, options: Values, args: List[str]) -> int:
- session = self.get_default_session(options)
-
- finder = self._build_package_finder(options, session)
-
- options.wheel_dir = normalize_path(options.wheel_dir)
- ensure_dir(options.wheel_dir)
-
- build_tracker = self.enter_context(get_build_tracker())
-
- directory = TempDirectory(
- delete=not options.no_clean,
- kind="wheel",
- globally_managed=True,
- )
-
- reqs = self.get_requirements(args, options, finder, session)
- check_legacy_setup_py_options(options, reqs)
-
- wheel_cache = WheelCache(options.cache_dir)
-
- preparer = self.make_requirement_preparer(
- temp_build_dir=directory,
- options=options,
- build_tracker=build_tracker,
- session=session,
- finder=finder,
- download_dir=options.wheel_dir,
- use_user_site=False,
- verbosity=self.verbosity,
- )
-
- resolver = self.make_resolver(
- preparer=preparer,
- finder=finder,
- options=options,
- wheel_cache=wheel_cache,
- ignore_requires_python=options.ignore_requires_python,
- use_pep517=options.use_pep517,
- )
-
- self.trace_basic_info(finder)
-
- requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
-
- reqs_to_build: List[InstallRequirement] = []
- for req in requirement_set.requirements.values():
- if req.is_wheel:
- preparer.save_linked_requirement(req)
- elif should_build_for_wheel_command(req):
- reqs_to_build.append(req)
-
- preparer.prepare_linked_requirements_more(requirement_set.requirements.values())
- requirement_set.warn_legacy_versions_and_specifiers()
-
- # build wheels
- build_successes, build_failures = build(
- reqs_to_build,
- wheel_cache=wheel_cache,
- verify=(not options.no_verify),
- build_options=options.build_options or [],
- global_options=options.global_options or [],
- )
- for req in build_successes:
- assert req.link and req.link.is_wheel
- assert req.local_file_path
- # copy from cache to target directory
- try:
- shutil.copy(req.local_file_path, options.wheel_dir)
- except OSError as e:
- logger.warning(
- "Building wheel for %s failed: %s",
- req.name,
- e,
- )
- build_failures.append(req)
- if len(build_failures) != 0:
- raise CommandError("Failed to build one or more wheels")
-
- return SUCCESS
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/configuration.py b/venv/lib/python3.11/site-packages/pip/_internal/configuration.py
deleted file mode 100644
index c25273d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/configuration.py
+++ /dev/null
@@ -1,383 +0,0 @@
-"""Configuration management setup
-
-Some terminology:
-- name
- As written in config files.
-- value
- Value associated with a name
-- key
- Name combined with it's section (section.name)
-- variant
- A single word describing where the configuration key-value pair came from
-"""
-
-import configparser
-import locale
-import os
-import sys
-from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
-
-from pip._internal.exceptions import (
- ConfigurationError,
- ConfigurationFileCouldNotBeLoaded,
-)
-from pip._internal.utils import appdirs
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.logging import getLogger
-from pip._internal.utils.misc import ensure_dir, enum
-
-RawConfigParser = configparser.RawConfigParser # Shorthand
-Kind = NewType("Kind", str)
-
-CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
-ENV_NAMES_IGNORED = "version", "help"
-
-# The kinds of configurations there are.
-kinds = enum(
- USER="user", # User Specific
- GLOBAL="global", # System Wide
- SITE="site", # [Virtual] Environment Specific
- ENV="env", # from PIP_CONFIG_FILE
- ENV_VAR="env-var", # from Environment Variables
-)
-OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
-VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
-
-logger = getLogger(__name__)
-
-
-# NOTE: Maybe use the optionx attribute to normalize keynames.
-def _normalize_name(name: str) -> str:
- """Make a name consistent regardless of source (environment or file)"""
- name = name.lower().replace("_", "-")
- if name.startswith("--"):
- name = name[2:] # only prefer long opts
- return name
-
-
-def _disassemble_key(name: str) -> List[str]:
- if "." not in name:
- error_message = (
- "Key does not contain dot separated section and key. "
- f"Perhaps you wanted to use 'global.{name}' instead?"
- )
- raise ConfigurationError(error_message)
- return name.split(".", 1)
-
-
-def get_configuration_files() -> Dict[Kind, List[str]]:
- global_config_files = [
- os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
- ]
-
- site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
- legacy_config_file = os.path.join(
- os.path.expanduser("~"),
- "pip" if WINDOWS else ".pip",
- CONFIG_BASENAME,
- )
- new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
- return {
- kinds.GLOBAL: global_config_files,
- kinds.SITE: [site_config_file],
- kinds.USER: [legacy_config_file, new_config_file],
- }
-
-
-class Configuration:
- """Handles management of configuration.
-
- Provides an interface to accessing and managing configuration files.
-
- This class converts provides an API that takes "section.key-name" style
- keys and stores the value associated with it as "key-name" under the
- section "section".
-
- This allows for a clean interface wherein the both the section and the
- key-name are preserved in an easy to manage form in the configuration files
- and the data stored is also nice.
- """
-
- def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
- super().__init__()
-
- if load_only is not None and load_only not in VALID_LOAD_ONLY:
- raise ConfigurationError(
- "Got invalid value for load_only - should be one of {}".format(
- ", ".join(map(repr, VALID_LOAD_ONLY))
- )
- )
- self.isolated = isolated
- self.load_only = load_only
-
- # Because we keep track of where we got the data from
- self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
- variant: [] for variant in OVERRIDE_ORDER
- }
- self._config: Dict[Kind, Dict[str, Any]] = {
- variant: {} for variant in OVERRIDE_ORDER
- }
- self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
-
- def load(self) -> None:
- """Loads configuration from configuration files and environment"""
- self._load_config_files()
- if not self.isolated:
- self._load_environment_vars()
-
- def get_file_to_edit(self) -> Optional[str]:
- """Returns the file with highest priority in configuration"""
- assert self.load_only is not None, "Need to be specified a file to be editing"
-
- try:
- return self._get_parser_to_modify()[0]
- except IndexError:
- return None
-
- def items(self) -> Iterable[Tuple[str, Any]]:
- """Returns key-value pairs like dict.items() representing the loaded
- configuration
- """
- return self._dictionary.items()
-
- def get_value(self, key: str) -> Any:
- """Get a value from the configuration."""
- orig_key = key
- key = _normalize_name(key)
- try:
- return self._dictionary[key]
- except KeyError:
- # disassembling triggers a more useful error message than simply
- # "No such key" in the case that the key isn't in the form command.option
- _disassemble_key(key)
- raise ConfigurationError(f"No such key - {orig_key}")
-
- def set_value(self, key: str, value: Any) -> None:
- """Modify a value in the configuration."""
- key = _normalize_name(key)
- self._ensure_have_load_only()
-
- assert self.load_only
- fname, parser = self._get_parser_to_modify()
-
- if parser is not None:
- section, name = _disassemble_key(key)
-
- # Modify the parser and the configuration
- if not parser.has_section(section):
- parser.add_section(section)
- parser.set(section, name, value)
-
- self._config[self.load_only][key] = value
- self._mark_as_modified(fname, parser)
-
- def unset_value(self, key: str) -> None:
- """Unset a value in the configuration."""
- orig_key = key
- key = _normalize_name(key)
- self._ensure_have_load_only()
-
- assert self.load_only
- if key not in self._config[self.load_only]:
- raise ConfigurationError(f"No such key - {orig_key}")
-
- fname, parser = self._get_parser_to_modify()
-
- if parser is not None:
- section, name = _disassemble_key(key)
- if not (
- parser.has_section(section) and parser.remove_option(section, name)
- ):
- # The option was not removed.
- raise ConfigurationError(
- "Fatal Internal error [id=1]. Please report as a bug."
- )
-
- # The section may be empty after the option was removed.
- if not parser.items(section):
- parser.remove_section(section)
- self._mark_as_modified(fname, parser)
-
- del self._config[self.load_only][key]
-
- def save(self) -> None:
- """Save the current in-memory state."""
- self._ensure_have_load_only()
-
- for fname, parser in self._modified_parsers:
- logger.info("Writing to %s", fname)
-
- # Ensure directory exists.
- ensure_dir(os.path.dirname(fname))
-
- # Ensure directory's permission(need to be writeable)
- try:
- with open(fname, "w") as f:
- parser.write(f)
- except OSError as error:
- raise ConfigurationError(
- f"An error occurred while writing to the configuration file "
- f"{fname}: {error}"
- )
-
- #
- # Private routines
- #
-
- def _ensure_have_load_only(self) -> None:
- if self.load_only is None:
- raise ConfigurationError("Needed a specific file to be modifying.")
- logger.debug("Will be working with %s variant only", self.load_only)
-
- @property
- def _dictionary(self) -> Dict[str, Any]:
- """A dictionary representing the loaded configuration."""
- # NOTE: Dictionaries are not populated if not loaded. So, conditionals
- # are not needed here.
- retval = {}
-
- for variant in OVERRIDE_ORDER:
- retval.update(self._config[variant])
-
- return retval
-
- def _load_config_files(self) -> None:
- """Loads configuration from configuration files"""
- config_files = dict(self.iter_config_files())
- if config_files[kinds.ENV][0:1] == [os.devnull]:
- logger.debug(
- "Skipping loading configuration files due to "
- "environment's PIP_CONFIG_FILE being os.devnull"
- )
- return
-
- for variant, files in config_files.items():
- for fname in files:
- # If there's specific variant set in `load_only`, load only
- # that variant, not the others.
- if self.load_only is not None and variant != self.load_only:
- logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
- continue
-
- parser = self._load_file(variant, fname)
-
- # Keeping track of the parsers used
- self._parsers[variant].append((fname, parser))
-
- def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
- logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
- parser = self._construct_parser(fname)
-
- for section in parser.sections():
- items = parser.items(section)
- self._config[variant].update(self._normalized_keys(section, items))
-
- return parser
-
- def _construct_parser(self, fname: str) -> RawConfigParser:
- parser = configparser.RawConfigParser()
- # If there is no such file, don't bother reading it but create the
- # parser anyway, to hold the data.
- # Doing this is useful when modifying and saving files, where we don't
- # need to construct a parser.
- if os.path.exists(fname):
- locale_encoding = locale.getpreferredencoding(False)
- try:
- parser.read(fname, encoding=locale_encoding)
- except UnicodeDecodeError:
- # See https://github.com/pypa/pip/issues/4963
- raise ConfigurationFileCouldNotBeLoaded(
- reason=f"contains invalid {locale_encoding} characters",
- fname=fname,
- )
- except configparser.Error as error:
- # See https://github.com/pypa/pip/issues/4893
- raise ConfigurationFileCouldNotBeLoaded(error=error)
- return parser
-
- def _load_environment_vars(self) -> None:
- """Loads configuration from environment variables"""
- self._config[kinds.ENV_VAR].update(
- self._normalized_keys(":env:", self.get_environ_vars())
- )
-
- def _normalized_keys(
- self, section: str, items: Iterable[Tuple[str, Any]]
- ) -> Dict[str, Any]:
- """Normalizes items to construct a dictionary with normalized keys.
-
- This routine is where the names become keys and are made the same
- regardless of source - configuration files or environment.
- """
- normalized = {}
- for name, val in items:
- key = section + "." + _normalize_name(name)
- normalized[key] = val
- return normalized
-
- def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
- """Returns a generator with all environmental vars with prefix PIP_"""
- for key, val in os.environ.items():
- if key.startswith("PIP_"):
- name = key[4:].lower()
- if name not in ENV_NAMES_IGNORED:
- yield name, val
-
- # XXX: This is patched in the tests.
- def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
- """Yields variant and configuration files associated with it.
-
- This should be treated like items of a dictionary. The order
- here doesn't affect what gets overridden. That is controlled
- by OVERRIDE_ORDER. However this does control the order they are
- displayed to the user. It's probably most ergononmic to display
- things in the same order as OVERRIDE_ORDER
- """
- # SMELL: Move the conditions out of this function
-
- env_config_file = os.environ.get("PIP_CONFIG_FILE", None)
- config_files = get_configuration_files()
-
- yield kinds.GLOBAL, config_files[kinds.GLOBAL]
-
- # per-user config is not loaded when env_config_file exists
- should_load_user_config = not self.isolated and not (
- env_config_file and os.path.exists(env_config_file)
- )
- if should_load_user_config:
- # The legacy config file is overridden by the new config file
- yield kinds.USER, config_files[kinds.USER]
-
- # virtualenv config
- yield kinds.SITE, config_files[kinds.SITE]
-
- if env_config_file is not None:
- yield kinds.ENV, [env_config_file]
- else:
- yield kinds.ENV, []
-
- def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
- """Get values present in a config file"""
- return self._config[variant]
-
- def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
- # Determine which parser to modify
- assert self.load_only
- parsers = self._parsers[self.load_only]
- if not parsers:
- # This should not happen if everything works correctly.
- raise ConfigurationError(
- "Fatal Internal error [id=2]. Please report as a bug."
- )
-
- # Use the highest priority parser.
- return parsers[-1]
-
- # XXX: This is patched in the tests.
- def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
- file_parser_tuple = (fname, parser)
- if file_parser_tuple not in self._modified_parsers:
- self._modified_parsers.append(file_parser_tuple)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self._dictionary!r})"
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/distributions/__init__.py
deleted file mode 100644
index 9a89a83..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__init__.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from pip._internal.distributions.base import AbstractDistribution
-from pip._internal.distributions.sdist import SourceDistribution
-from pip._internal.distributions.wheel import WheelDistribution
-from pip._internal.req.req_install import InstallRequirement
-
-
-def make_distribution_for_install_requirement(
- install_req: InstallRequirement,
-) -> AbstractDistribution:
- """Returns a Distribution for the given InstallRequirement"""
- # Editable requirements will always be source distributions. They use the
- # legacy logic until we create a modern standard for them.
- if install_req.editable:
- return SourceDistribution(install_req)
-
- # If it's a wheel, it's a WheelDistribution
- if install_req.is_wheel:
- return WheelDistribution(install_req)
-
- # Otherwise, a SourceDistribution
- return SourceDistribution(install_req)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index dbf4bb6..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 34dbbec..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc
deleted file mode 100644
index 7ec21d0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc
deleted file mode 100644
index 839b060..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc
deleted file mode 100644
index c383e50..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/base.py b/venv/lib/python3.11/site-packages/pip/_internal/distributions/base.py
deleted file mode 100644
index 6fb0d7b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/base.py
+++ /dev/null
@@ -1,51 +0,0 @@
-import abc
-from typing import Optional
-
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata.base import BaseDistribution
-from pip._internal.req import InstallRequirement
-
-
-class AbstractDistribution(metaclass=abc.ABCMeta):
- """A base class for handling installable artifacts.
-
- The requirements for anything installable are as follows:
-
- - we must be able to determine the requirement name
- (or we can't correctly handle the non-upgrade case).
-
- - for packages with setup requirements, we must also be able
- to determine their requirements without installing additional
- packages (for the same reason as run-time dependencies)
-
- - we must be able to create a Distribution object exposing the
- above metadata.
-
- - if we need to do work in the build tracker, we must be able to generate a unique
- string to identify the requirement in the build tracker.
- """
-
- def __init__(self, req: InstallRequirement) -> None:
- super().__init__()
- self.req = req
-
- @abc.abstractproperty
- def build_tracker_id(self) -> Optional[str]:
- """A string that uniquely identifies this requirement to the build tracker.
-
- If None, then this dist has no work to do in the build tracker, and
- ``.prepare_distribution_metadata()`` will not be called."""
- raise NotImplementedError()
-
- @abc.abstractmethod
- def get_metadata_distribution(self) -> BaseDistribution:
- raise NotImplementedError()
-
- @abc.abstractmethod
- def prepare_distribution_metadata(
- self,
- finder: PackageFinder,
- build_isolation: bool,
- check_build_deps: bool,
- ) -> None:
- raise NotImplementedError()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/installed.py b/venv/lib/python3.11/site-packages/pip/_internal/distributions/installed.py
deleted file mode 100644
index ab8d53b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/installed.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from typing import Optional
-
-from pip._internal.distributions.base import AbstractDistribution
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution
-
-
-class InstalledDistribution(AbstractDistribution):
- """Represents an installed package.
-
- This does not need any preparation as the required information has already
- been computed.
- """
-
- @property
- def build_tracker_id(self) -> Optional[str]:
- return None
-
- def get_metadata_distribution(self) -> BaseDistribution:
- assert self.req.satisfied_by is not None, "not actually installed"
- return self.req.satisfied_by
-
- def prepare_distribution_metadata(
- self,
- finder: PackageFinder,
- build_isolation: bool,
- check_build_deps: bool,
- ) -> None:
- pass
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/sdist.py b/venv/lib/python3.11/site-packages/pip/_internal/distributions/sdist.py
deleted file mode 100644
index 15ff42b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/sdist.py
+++ /dev/null
@@ -1,156 +0,0 @@
-import logging
-from typing import Iterable, Optional, Set, Tuple
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.distributions.base import AbstractDistribution
-from pip._internal.exceptions import InstallationError
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution
-from pip._internal.utils.subprocess import runner_with_spinner_message
-
-logger = logging.getLogger(__name__)
-
-
-class SourceDistribution(AbstractDistribution):
- """Represents a source distribution.
-
- The preparation step for these needs metadata for the packages to be
- generated, either using PEP 517 or using the legacy `setup.py egg_info`.
- """
-
- @property
- def build_tracker_id(self) -> Optional[str]:
- """Identify this requirement uniquely by its link."""
- assert self.req.link
- return self.req.link.url_without_fragment
-
- def get_metadata_distribution(self) -> BaseDistribution:
- return self.req.get_dist()
-
- def prepare_distribution_metadata(
- self,
- finder: PackageFinder,
- build_isolation: bool,
- check_build_deps: bool,
- ) -> None:
- # Load pyproject.toml, to determine whether PEP 517 is to be used
- self.req.load_pyproject_toml()
-
- # Set up the build isolation, if this requirement should be isolated
- should_isolate = self.req.use_pep517 and build_isolation
- if should_isolate:
- # Setup an isolated environment and install the build backend static
- # requirements in it.
- self._prepare_build_backend(finder)
- # Check that if the requirement is editable, it either supports PEP 660 or
- # has a setup.py or a setup.cfg. This cannot be done earlier because we need
- # to setup the build backend to verify it supports build_editable, nor can
- # it be done later, because we want to avoid installing build requirements
- # needlessly. Doing it here also works around setuptools generating
- # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
- # without setup.py nor setup.cfg.
- self.req.isolated_editable_sanity_check()
- # Install the dynamic build requirements.
- self._install_build_reqs(finder)
- # Check if the current environment provides build dependencies
- should_check_deps = self.req.use_pep517 and check_build_deps
- if should_check_deps:
- pyproject_requires = self.req.pyproject_requires
- assert pyproject_requires is not None
- conflicting, missing = self.req.build_env.check_requirements(
- pyproject_requires
- )
- if conflicting:
- self._raise_conflicts("the backend dependencies", conflicting)
- if missing:
- self._raise_missing_reqs(missing)
- self.req.prepare_metadata()
-
- def _prepare_build_backend(self, finder: PackageFinder) -> None:
- # Isolate in a BuildEnvironment and install the build-time
- # requirements.
- pyproject_requires = self.req.pyproject_requires
- assert pyproject_requires is not None
-
- self.req.build_env = BuildEnvironment()
- self.req.build_env.install_requirements(
- finder, pyproject_requires, "overlay", kind="build dependencies"
- )
- conflicting, missing = self.req.build_env.check_requirements(
- self.req.requirements_to_check
- )
- if conflicting:
- self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
- if missing:
- logger.warning(
- "Missing build requirements in pyproject.toml for %s.",
- self.req,
- )
- logger.warning(
- "The project does not specify a build backend, and "
- "pip cannot fall back to setuptools without %s.",
- " and ".join(map(repr, sorted(missing))),
- )
-
- def _get_build_requires_wheel(self) -> Iterable[str]:
- with self.req.build_env:
- runner = runner_with_spinner_message("Getting requirements to build wheel")
- backend = self.req.pep517_backend
- assert backend is not None
- with backend.subprocess_runner(runner):
- return backend.get_requires_for_build_wheel()
-
- def _get_build_requires_editable(self) -> Iterable[str]:
- with self.req.build_env:
- runner = runner_with_spinner_message(
- "Getting requirements to build editable"
- )
- backend = self.req.pep517_backend
- assert backend is not None
- with backend.subprocess_runner(runner):
- return backend.get_requires_for_build_editable()
-
- def _install_build_reqs(self, finder: PackageFinder) -> None:
- # Install any extra build dependencies that the backend requests.
- # This must be done in a second pass, as the pyproject.toml
- # dependencies must be installed before we can call the backend.
- if (
- self.req.editable
- and self.req.permit_editable_wheels
- and self.req.supports_pyproject_editable()
- ):
- build_reqs = self._get_build_requires_editable()
- else:
- build_reqs = self._get_build_requires_wheel()
- conflicting, missing = self.req.build_env.check_requirements(build_reqs)
- if conflicting:
- self._raise_conflicts("the backend dependencies", conflicting)
- self.req.build_env.install_requirements(
- finder, missing, "normal", kind="backend dependencies"
- )
-
- def _raise_conflicts(
- self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
- ) -> None:
- format_string = (
- "Some build dependencies for {requirement} "
- "conflict with {conflicting_with}: {description}."
- )
- error_message = format_string.format(
- requirement=self.req,
- conflicting_with=conflicting_with,
- description=", ".join(
- f"{installed} is incompatible with {wanted}"
- for installed, wanted in sorted(conflicting_reqs)
- ),
- )
- raise InstallationError(error_message)
-
- def _raise_missing_reqs(self, missing: Set[str]) -> None:
- format_string = (
- "Some build dependencies for {requirement} are missing: {missing}."
- )
- error_message = format_string.format(
- requirement=self.req, missing=", ".join(map(repr, sorted(missing)))
- )
- raise InstallationError(error_message)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/distributions/wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/distributions/wheel.py
deleted file mode 100644
index eb16e25..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/distributions/wheel.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from typing import Optional
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.distributions.base import AbstractDistribution
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import (
- BaseDistribution,
- FilesystemWheel,
- get_wheel_distribution,
-)
-
-
-class WheelDistribution(AbstractDistribution):
- """Represents a wheel distribution.
-
- This does not need any preparation as wheels can be directly unpacked.
- """
-
- @property
- def build_tracker_id(self) -> Optional[str]:
- return None
-
- def get_metadata_distribution(self) -> BaseDistribution:
- """Loads the metadata from the wheel file into memory and returns a
- Distribution that uses it, not relying on the wheel file or
- requirement.
- """
- assert self.req.local_file_path, "Set as part of preparation during download"
- assert self.req.name, "Wheels are never unnamed"
- wheel = FilesystemWheel(self.req.local_file_path)
- return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
-
- def prepare_distribution_metadata(
- self,
- finder: PackageFinder,
- build_isolation: bool,
- check_build_deps: bool,
- ) -> None:
- pass
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/exceptions.py b/venv/lib/python3.11/site-packages/pip/_internal/exceptions.py
deleted file mode 100644
index 5007a62..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/exceptions.py
+++ /dev/null
@@ -1,728 +0,0 @@
-"""Exceptions used throughout package.
-
-This module MUST NOT try to import from anything within `pip._internal` to
-operate. This is expected to be importable from any/all files within the
-subpackage and, thus, should not depend on them.
-"""
-
-import configparser
-import contextlib
-import locale
-import logging
-import pathlib
-import re
-import sys
-from itertools import chain, groupby, repeat
-from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Union
-
-from pip._vendor.requests.models import Request, Response
-from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
-from pip._vendor.rich.markup import escape
-from pip._vendor.rich.text import Text
-
-if TYPE_CHECKING:
- from hashlib import _Hash
- from typing import Literal
-
- from pip._internal.metadata import BaseDistribution
- from pip._internal.req.req_install import InstallRequirement
-
-logger = logging.getLogger(__name__)
-
-
-#
-# Scaffolding
-#
-def _is_kebab_case(s: str) -> bool:
- return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
-
-
-def _prefix_with_indent(
- s: Union[Text, str],
- console: Console,
- *,
- prefix: str,
- indent: str,
-) -> Text:
- if isinstance(s, Text):
- text = s
- else:
- text = console.render_str(s)
-
- return console.render_str(prefix, overflow="ignore") + console.render_str(
- f"\n{indent}", overflow="ignore"
- ).join(text.split(allow_blank=True))
-
-
-class PipError(Exception):
- """The base pip error."""
-
-
-class DiagnosticPipError(PipError):
- """An error, that presents diagnostic information to the user.
-
- This contains a bunch of logic, to enable pretty presentation of our error
- messages. Each error gets a unique reference. Each error can also include
- additional context, a hint and/or a note -- which are presented with the
- main error message in a consistent style.
-
- This is adapted from the error output styling in `sphinx-theme-builder`.
- """
-
- reference: str
-
- def __init__(
- self,
- *,
- kind: 'Literal["error", "warning"]' = "error",
- reference: Optional[str] = None,
- message: Union[str, Text],
- context: Optional[Union[str, Text]],
- hint_stmt: Optional[Union[str, Text]],
- note_stmt: Optional[Union[str, Text]] = None,
- link: Optional[str] = None,
- ) -> None:
- # Ensure a proper reference is provided.
- if reference is None:
- assert hasattr(self, "reference"), "error reference not provided!"
- reference = self.reference
- assert _is_kebab_case(reference), "error reference must be kebab-case!"
-
- self.kind = kind
- self.reference = reference
-
- self.message = message
- self.context = context
-
- self.note_stmt = note_stmt
- self.hint_stmt = hint_stmt
-
- self.link = link
-
- super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
-
- def __repr__(self) -> str:
- return (
- f"<{self.__class__.__name__}("
- f"reference={self.reference!r}, "
- f"message={self.message!r}, "
- f"context={self.context!r}, "
- f"note_stmt={self.note_stmt!r}, "
- f"hint_stmt={self.hint_stmt!r}"
- ")>"
- )
-
- def __rich_console__(
- self,
- console: Console,
- options: ConsoleOptions,
- ) -> RenderResult:
- colour = "red" if self.kind == "error" else "yellow"
-
- yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
- yield ""
-
- if not options.ascii_only:
- # Present the main message, with relevant context indented.
- if self.context is not None:
- yield _prefix_with_indent(
- self.message,
- console,
- prefix=f"[{colour}]×[/] ",
- indent=f"[{colour}]│[/] ",
- )
- yield _prefix_with_indent(
- self.context,
- console,
- prefix=f"[{colour}]╰─>[/] ",
- indent=f"[{colour}] [/] ",
- )
- else:
- yield _prefix_with_indent(
- self.message,
- console,
- prefix="[red]×[/] ",
- indent=" ",
- )
- else:
- yield self.message
- if self.context is not None:
- yield ""
- yield self.context
-
- if self.note_stmt is not None or self.hint_stmt is not None:
- yield ""
-
- if self.note_stmt is not None:
- yield _prefix_with_indent(
- self.note_stmt,
- console,
- prefix="[magenta bold]note[/]: ",
- indent=" ",
- )
- if self.hint_stmt is not None:
- yield _prefix_with_indent(
- self.hint_stmt,
- console,
- prefix="[cyan bold]hint[/]: ",
- indent=" ",
- )
-
- if self.link is not None:
- yield ""
- yield f"Link: {self.link}"
-
-
-#
-# Actual Errors
-#
-class ConfigurationError(PipError):
- """General exception in configuration"""
-
-
-class InstallationError(PipError):
- """General exception during installation"""
-
-
-class UninstallationError(PipError):
- """General exception during uninstallation"""
-
-
-class MissingPyProjectBuildRequires(DiagnosticPipError):
- """Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
-
- reference = "missing-pyproject-build-system-requires"
-
- def __init__(self, *, package: str) -> None:
- super().__init__(
- message=f"Can not process {escape(package)}",
- context=Text(
- "This package has an invalid pyproject.toml file.\n"
- "The [build-system] table is missing the mandatory `requires` key."
- ),
- note_stmt="This is an issue with the package mentioned above, not pip.",
- hint_stmt=Text("See PEP 518 for the detailed specification."),
- )
-
-
-class InvalidPyProjectBuildRequires(DiagnosticPipError):
- """Raised when pyproject.toml an invalid `build-system.requires`."""
-
- reference = "invalid-pyproject-build-system-requires"
-
- def __init__(self, *, package: str, reason: str) -> None:
- super().__init__(
- message=f"Can not process {escape(package)}",
- context=Text(
- "This package has an invalid `build-system.requires` key in "
- f"pyproject.toml.\n{reason}"
- ),
- note_stmt="This is an issue with the package mentioned above, not pip.",
- hint_stmt=Text("See PEP 518 for the detailed specification."),
- )
-
-
-class NoneMetadataError(PipError):
- """Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
-
- This signifies an inconsistency, when the Distribution claims to have
- the metadata file (if not, raise ``FileNotFoundError`` instead), but is
- not actually able to produce its content. This may be due to permission
- errors.
- """
-
- def __init__(
- self,
- dist: "BaseDistribution",
- metadata_name: str,
- ) -> None:
- """
- :param dist: A Distribution object.
- :param metadata_name: The name of the metadata being accessed
- (can be "METADATA" or "PKG-INFO").
- """
- self.dist = dist
- self.metadata_name = metadata_name
-
- def __str__(self) -> str:
- # Use `dist` in the error message because its stringification
- # includes more information, like the version and location.
- return f"None {self.metadata_name} metadata found for distribution: {self.dist}"
-
-
-class UserInstallationInvalid(InstallationError):
- """A --user install is requested on an environment without user site."""
-
- def __str__(self) -> str:
- return "User base directory is not specified"
-
-
-class InvalidSchemeCombination(InstallationError):
- def __str__(self) -> str:
- before = ", ".join(str(a) for a in self.args[:-1])
- return f"Cannot set {before} and {self.args[-1]} together"
-
-
-class DistributionNotFound(InstallationError):
- """Raised when a distribution cannot be found to satisfy a requirement"""
-
-
-class RequirementsFileParseError(InstallationError):
- """Raised when a general error occurs parsing a requirements file line."""
-
-
-class BestVersionAlreadyInstalled(PipError):
- """Raised when the most up-to-date version of a package is already
- installed."""
-
-
-class BadCommand(PipError):
- """Raised when virtualenv or a command is not found"""
-
-
-class CommandError(PipError):
- """Raised when there is an error in command-line arguments"""
-
-
-class PreviousBuildDirError(PipError):
- """Raised when there's a previous conflicting build directory"""
-
-
-class NetworkConnectionError(PipError):
- """HTTP connection error"""
-
- def __init__(
- self,
- error_msg: str,
- response: Optional[Response] = None,
- request: Optional[Request] = None,
- ) -> None:
- """
- Initialize NetworkConnectionError with `request` and `response`
- objects.
- """
- self.response = response
- self.request = request
- self.error_msg = error_msg
- if (
- self.response is not None
- and not self.request
- and hasattr(response, "request")
- ):
- self.request = self.response.request
- super().__init__(error_msg, response, request)
-
- def __str__(self) -> str:
- return str(self.error_msg)
-
-
-class InvalidWheelFilename(InstallationError):
- """Invalid wheel filename."""
-
-
-class UnsupportedWheel(InstallationError):
- """Unsupported wheel."""
-
-
-class InvalidWheel(InstallationError):
- """Invalid (e.g. corrupt) wheel."""
-
- def __init__(self, location: str, name: str):
- self.location = location
- self.name = name
-
- def __str__(self) -> str:
- return f"Wheel '{self.name}' located at {self.location} is invalid."
-
-
-class MetadataInconsistent(InstallationError):
- """Built metadata contains inconsistent information.
-
- This is raised when the metadata contains values (e.g. name and version)
- that do not match the information previously obtained from sdist filename,
- user-supplied ``#egg=`` value, or an install requirement name.
- """
-
- def __init__(
- self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
- ) -> None:
- self.ireq = ireq
- self.field = field
- self.f_val = f_val
- self.m_val = m_val
-
- def __str__(self) -> str:
- return (
- f"Requested {self.ireq} has inconsistent {self.field}: "
- f"expected {self.f_val!r}, but metadata has {self.m_val!r}"
- )
-
-
-class InstallationSubprocessError(DiagnosticPipError, InstallationError):
- """A subprocess call failed."""
-
- reference = "subprocess-exited-with-error"
-
- def __init__(
- self,
- *,
- command_description: str,
- exit_code: int,
- output_lines: Optional[List[str]],
- ) -> None:
- if output_lines is None:
- output_prompt = Text("See above for output.")
- else:
- output_prompt = (
- Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
- + Text("".join(output_lines))
- + Text.from_markup(R"[red]\[end of output][/]")
- )
-
- super().__init__(
- message=(
- f"[green]{escape(command_description)}[/] did not run successfully.\n"
- f"exit code: {exit_code}"
- ),
- context=output_prompt,
- hint_stmt=None,
- note_stmt=(
- "This error originates from a subprocess, and is likely not a "
- "problem with pip."
- ),
- )
-
- self.command_description = command_description
- self.exit_code = exit_code
-
- def __str__(self) -> str:
- return f"{self.command_description} exited with {self.exit_code}"
-
-
-class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
- reference = "metadata-generation-failed"
-
- def __init__(
- self,
- *,
- package_details: str,
- ) -> None:
- super(InstallationSubprocessError, self).__init__(
- message="Encountered error while generating package metadata.",
- context=escape(package_details),
- hint_stmt="See above for details.",
- note_stmt="This is an issue with the package mentioned above, not pip.",
- )
-
- def __str__(self) -> str:
- return "metadata generation failed"
-
-
-class HashErrors(InstallationError):
- """Multiple HashError instances rolled into one for reporting"""
-
- def __init__(self) -> None:
- self.errors: List["HashError"] = []
-
- def append(self, error: "HashError") -> None:
- self.errors.append(error)
-
- def __str__(self) -> str:
- lines = []
- self.errors.sort(key=lambda e: e.order)
- for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
- lines.append(cls.head)
- lines.extend(e.body() for e in errors_of_cls)
- if lines:
- return "\n".join(lines)
- return ""
-
- def __bool__(self) -> bool:
- return bool(self.errors)
-
-
-class HashError(InstallationError):
- """
- A failure to verify a package against known-good hashes
-
- :cvar order: An int sorting hash exception classes by difficulty of
- recovery (lower being harder), so the user doesn't bother fretting
- about unpinned packages when he has deeper issues, like VCS
- dependencies, to deal with. Also keeps error reports in a
- deterministic order.
- :cvar head: A section heading for display above potentially many
- exceptions of this kind
- :ivar req: The InstallRequirement that triggered this error. This is
- pasted on after the exception is instantiated, because it's not
- typically available earlier.
-
- """
-
- req: Optional["InstallRequirement"] = None
- head = ""
- order: int = -1
-
- def body(self) -> str:
- """Return a summary of me for display under the heading.
-
- This default implementation simply prints a description of the
- triggering requirement.
-
- :param req: The InstallRequirement that provoked this error, with
- its link already populated by the resolver's _populate_link().
-
- """
- return f" {self._requirement_name()}"
-
- def __str__(self) -> str:
- return f"{self.head}\n{self.body()}"
-
- def _requirement_name(self) -> str:
- """Return a description of the requirement that triggered me.
-
- This default implementation returns long description of the req, with
- line numbers
-
- """
- return str(self.req) if self.req else "unknown package"
-
-
-class VcsHashUnsupported(HashError):
- """A hash was provided for a version-control-system-based requirement, but
- we don't have a method for hashing those."""
-
- order = 0
- head = (
- "Can't verify hashes for these requirements because we don't "
- "have a way to hash version control repositories:"
- )
-
-
-class DirectoryUrlHashUnsupported(HashError):
- """A hash was provided for a version-control-system-based requirement, but
- we don't have a method for hashing those."""
-
- order = 1
- head = (
- "Can't verify hashes for these file:// requirements because they "
- "point to directories:"
- )
-
-
-class HashMissing(HashError):
- """A hash was needed for a requirement but is absent."""
-
- order = 2
- head = (
- "Hashes are required in --require-hashes mode, but they are "
- "missing from some requirements. Here is a list of those "
- "requirements along with the hashes their downloaded archives "
- "actually had. Add lines like these to your requirements files to "
- "prevent tampering. (If you did not enable --require-hashes "
- "manually, note that it turns on automatically when any package "
- "has a hash.)"
- )
-
- def __init__(self, gotten_hash: str) -> None:
- """
- :param gotten_hash: The hash of the (possibly malicious) archive we
- just downloaded
- """
- self.gotten_hash = gotten_hash
-
- def body(self) -> str:
- # Dodge circular import.
- from pip._internal.utils.hashes import FAVORITE_HASH
-
- package = None
- if self.req:
- # In the case of URL-based requirements, display the original URL
- # seen in the requirements file rather than the package name,
- # so the output can be directly copied into the requirements file.
- package = (
- self.req.original_link
- if self.req.is_direct
- # In case someone feeds something downright stupid
- # to InstallRequirement's constructor.
- else getattr(self.req, "req", None)
- )
- return " {} --hash={}:{}".format(
- package or "unknown package", FAVORITE_HASH, self.gotten_hash
- )
-
-
-class HashUnpinned(HashError):
- """A requirement had a hash specified but was not pinned to a specific
- version."""
-
- order = 3
- head = (
- "In --require-hashes mode, all requirements must have their "
- "versions pinned with ==. These do not:"
- )
-
-
-class HashMismatch(HashError):
- """
- Distribution file hash values don't match.
-
- :ivar package_name: The name of the package that triggered the hash
- mismatch. Feel free to write to this after the exception is raise to
- improve its error message.
-
- """
-
- order = 4
- head = (
- "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
- "FILE. If you have updated the package versions, please update "
- "the hashes. Otherwise, examine the package contents carefully; "
- "someone may have tampered with them."
- )
-
- def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
- """
- :param allowed: A dict of algorithm names pointing to lists of allowed
- hex digests
- :param gots: A dict of algorithm names pointing to hashes we
- actually got from the files under suspicion
- """
- self.allowed = allowed
- self.gots = gots
-
- def body(self) -> str:
- return f" {self._requirement_name()}:\n{self._hash_comparison()}"
-
- def _hash_comparison(self) -> str:
- """
- Return a comparison of actual and expected hash values.
-
- Example::
-
- Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
- or 123451234512345123451234512345123451234512345
- Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
-
- """
-
- def hash_then_or(hash_name: str) -> "chain[str]":
- # For now, all the decent hashes have 6-char names, so we can get
- # away with hard-coding space literals.
- return chain([hash_name], repeat(" or"))
-
- lines: List[str] = []
- for hash_name, expecteds in self.allowed.items():
- prefix = hash_then_or(hash_name)
- lines.extend((f" Expected {next(prefix)} {e}") for e in expecteds)
- lines.append(
- f" Got {self.gots[hash_name].hexdigest()}\n"
- )
- return "\n".join(lines)
-
-
-class UnsupportedPythonVersion(InstallationError):
- """Unsupported python version according to Requires-Python package
- metadata."""
-
-
-class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
- """When there are errors while loading a configuration file"""
-
- def __init__(
- self,
- reason: str = "could not be loaded",
- fname: Optional[str] = None,
- error: Optional[configparser.Error] = None,
- ) -> None:
- super().__init__(error)
- self.reason = reason
- self.fname = fname
- self.error = error
-
- def __str__(self) -> str:
- if self.fname is not None:
- message_part = f" in {self.fname}."
- else:
- assert self.error is not None
- message_part = f".\n{self.error}\n"
- return f"Configuration file {self.reason}{message_part}"
-
-
-_DEFAULT_EXTERNALLY_MANAGED_ERROR = f"""\
-The Python environment under {sys.prefix} is managed externally, and may not be
-manipulated by the user. Please use specific tooling from the distributor of
-the Python installation to interact with this environment instead.
-"""
-
-
-class ExternallyManagedEnvironment(DiagnosticPipError):
- """The current environment is externally managed.
-
- This is raised when the current environment is externally managed, as
- defined by `PEP 668`_. The ``EXTERNALLY-MANAGED`` configuration is checked
- and displayed when the error is bubbled up to the user.
-
- :param error: The error message read from ``EXTERNALLY-MANAGED``.
- """
-
- reference = "externally-managed-environment"
-
- def __init__(self, error: Optional[str]) -> None:
- if error is None:
- context = Text(_DEFAULT_EXTERNALLY_MANAGED_ERROR)
- else:
- context = Text(error)
- super().__init__(
- message="This environment is externally managed",
- context=context,
- note_stmt=(
- "If you believe this is a mistake, please contact your "
- "Python installation or OS distribution provider. "
- "You can override this, at the risk of breaking your Python "
- "installation or OS, by passing --break-system-packages."
- ),
- hint_stmt=Text("See PEP 668 for the detailed specification."),
- )
-
- @staticmethod
- def _iter_externally_managed_error_keys() -> Iterator[str]:
- # LC_MESSAGES is in POSIX, but not the C standard. The most common
- # platform that does not implement this category is Windows, where
- # using other categories for console message localization is equally
- # unreliable, so we fall back to the locale-less vendor message. This
- # can always be re-evaluated when a vendor proposes a new alternative.
- try:
- category = locale.LC_MESSAGES
- except AttributeError:
- lang: Optional[str] = None
- else:
- lang, _ = locale.getlocale(category)
- if lang is not None:
- yield f"Error-{lang}"
- for sep in ("-", "_"):
- before, found, _ = lang.partition(sep)
- if not found:
- continue
- yield f"Error-{before}"
- yield "Error"
-
- @classmethod
- def from_config(
- cls,
- config: Union[pathlib.Path, str],
- ) -> "ExternallyManagedEnvironment":
- parser = configparser.ConfigParser(interpolation=None)
- try:
- parser.read(config, encoding="utf-8")
- section = parser["externally-managed"]
- for key in cls._iter_externally_managed_error_keys():
- with contextlib.suppress(KeyError):
- return cls(section[key])
- except KeyError:
- pass
- except (OSError, UnicodeDecodeError, configparser.ParsingError):
- from pip._internal.utils._log import VERBOSE
-
- exc_info = logger.isEnabledFor(VERBOSE)
- logger.warning("Failed to read %s", config, exc_info=exc_info)
- return cls(None)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/index/__init__.py
deleted file mode 100644
index 7a17b7b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""Index interaction code
-"""
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 446ac36..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc
deleted file mode 100644
index a1dc4c1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/collector.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc
deleted file mode 100644
index 8379671..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc
deleted file mode 100644
index e7e9b99..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/__pycache__/sources.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/collector.py b/venv/lib/python3.11/site-packages/pip/_internal/index/collector.py
deleted file mode 100644
index 08c8bdd..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/collector.py
+++ /dev/null
@@ -1,507 +0,0 @@
-"""
-The main purpose of this module is to expose LinkCollector.collect_sources().
-"""
-
-import collections
-import email.message
-import functools
-import itertools
-import json
-import logging
-import os
-import urllib.parse
-import urllib.request
-from html.parser import HTMLParser
-from optparse import Values
-from typing import (
- TYPE_CHECKING,
- Callable,
- Dict,
- Iterable,
- List,
- MutableMapping,
- NamedTuple,
- Optional,
- Sequence,
- Tuple,
- Union,
-)
-
-from pip._vendor import requests
-from pip._vendor.requests import Response
-from pip._vendor.requests.exceptions import RetryError, SSLError
-
-from pip._internal.exceptions import NetworkConnectionError
-from pip._internal.models.link import Link
-from pip._internal.models.search_scope import SearchScope
-from pip._internal.network.session import PipSession
-from pip._internal.network.utils import raise_for_status
-from pip._internal.utils.filetypes import is_archive_file
-from pip._internal.utils.misc import redact_auth_from_url
-from pip._internal.vcs import vcs
-
-from .sources import CandidatesFromPage, LinkSource, build_source
-
-if TYPE_CHECKING:
- from typing import Protocol
-else:
- Protocol = object
-
-logger = logging.getLogger(__name__)
-
-ResponseHeaders = MutableMapping[str, str]
-
-
-def _match_vcs_scheme(url: str) -> Optional[str]:
- """Look for VCS schemes in the URL.
-
- Returns the matched VCS scheme, or None if there's no match.
- """
- for scheme in vcs.schemes:
- if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
- return scheme
- return None
-
-
-class _NotAPIContent(Exception):
- def __init__(self, content_type: str, request_desc: str) -> None:
- super().__init__(content_type, request_desc)
- self.content_type = content_type
- self.request_desc = request_desc
-
-
-def _ensure_api_header(response: Response) -> None:
- """
- Check the Content-Type header to ensure the response contains a Simple
- API Response.
-
- Raises `_NotAPIContent` if the content type is not a valid content-type.
- """
- content_type = response.headers.get("Content-Type", "Unknown")
-
- content_type_l = content_type.lower()
- if content_type_l.startswith(
- (
- "text/html",
- "application/vnd.pypi.simple.v1+html",
- "application/vnd.pypi.simple.v1+json",
- )
- ):
- return
-
- raise _NotAPIContent(content_type, response.request.method)
-
-
-class _NotHTTP(Exception):
- pass
-
-
-def _ensure_api_response(url: str, session: PipSession) -> None:
- """
- Send a HEAD request to the URL, and ensure the response contains a simple
- API Response.
-
- Raises `_NotHTTP` if the URL is not available for a HEAD request, or
- `_NotAPIContent` if the content type is not a valid content type.
- """
- scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
- if scheme not in {"http", "https"}:
- raise _NotHTTP()
-
- resp = session.head(url, allow_redirects=True)
- raise_for_status(resp)
-
- _ensure_api_header(resp)
-
-
-def _get_simple_response(url: str, session: PipSession) -> Response:
- """Access an Simple API response with GET, and return the response.
-
- This consists of three parts:
-
- 1. If the URL looks suspiciously like an archive, send a HEAD first to
- check the Content-Type is HTML or Simple API, to avoid downloading a
- large file. Raise `_NotHTTP` if the content type cannot be determined, or
- `_NotAPIContent` if it is not HTML or a Simple API.
- 2. Actually perform the request. Raise HTTP exceptions on network failures.
- 3. Check the Content-Type header to make sure we got a Simple API response,
- and raise `_NotAPIContent` otherwise.
- """
- if is_archive_file(Link(url).filename):
- _ensure_api_response(url, session=session)
-
- logger.debug("Getting page %s", redact_auth_from_url(url))
-
- resp = session.get(
- url,
- headers={
- "Accept": ", ".join(
- [
- "application/vnd.pypi.simple.v1+json",
- "application/vnd.pypi.simple.v1+html; q=0.1",
- "text/html; q=0.01",
- ]
- ),
- # We don't want to blindly returned cached data for
- # /simple/, because authors generally expecting that
- # twine upload && pip install will function, but if
- # they've done a pip install in the last ~10 minutes
- # it won't. Thus by setting this to zero we will not
- # blindly use any cached data, however the benefit of
- # using max-age=0 instead of no-cache, is that we will
- # still support conditional requests, so we will still
- # minimize traffic sent in cases where the page hasn't
- # changed at all, we will just always incur the round
- # trip for the conditional GET now instead of only
- # once per 10 minutes.
- # For more information, please see pypa/pip#5670.
- "Cache-Control": "max-age=0",
- },
- )
- raise_for_status(resp)
-
- # The check for archives above only works if the url ends with
- # something that looks like an archive. However that is not a
- # requirement of an url. Unless we issue a HEAD request on every
- # url we cannot know ahead of time for sure if something is a
- # Simple API response or not. However we can check after we've
- # downloaded it.
- _ensure_api_header(resp)
-
- logger.debug(
- "Fetched page %s as %s",
- redact_auth_from_url(url),
- resp.headers.get("Content-Type", "Unknown"),
- )
-
- return resp
-
-
-def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
- """Determine if we have any encoding information in our headers."""
- if headers and "Content-Type" in headers:
- m = email.message.Message()
- m["content-type"] = headers["Content-Type"]
- charset = m.get_param("charset")
- if charset:
- return str(charset)
- return None
-
-
-class CacheablePageContent:
- def __init__(self, page: "IndexContent") -> None:
- assert page.cache_link_parsing
- self.page = page
-
- def __eq__(self, other: object) -> bool:
- return isinstance(other, type(self)) and self.page.url == other.page.url
-
- def __hash__(self) -> int:
- return hash(self.page.url)
-
-
-class ParseLinks(Protocol):
- def __call__(self, page: "IndexContent") -> Iterable[Link]:
- ...
-
-
-def with_cached_index_content(fn: ParseLinks) -> ParseLinks:
- """
- Given a function that parses an Iterable[Link] from an IndexContent, cache the
- function's result (keyed by CacheablePageContent), unless the IndexContent
- `page` has `page.cache_link_parsing == False`.
- """
-
- @functools.lru_cache(maxsize=None)
- def wrapper(cacheable_page: CacheablePageContent) -> List[Link]:
- return list(fn(cacheable_page.page))
-
- @functools.wraps(fn)
- def wrapper_wrapper(page: "IndexContent") -> List[Link]:
- if page.cache_link_parsing:
- return wrapper(CacheablePageContent(page))
- return list(fn(page))
-
- return wrapper_wrapper
-
-
-@with_cached_index_content
-def parse_links(page: "IndexContent") -> Iterable[Link]:
- """
- Parse a Simple API's Index Content, and yield its anchor elements as Link objects.
- """
-
- content_type_l = page.content_type.lower()
- if content_type_l.startswith("application/vnd.pypi.simple.v1+json"):
- data = json.loads(page.content)
- for file in data.get("files", []):
- link = Link.from_json(file, page.url)
- if link is None:
- continue
- yield link
- return
-
- parser = HTMLLinkParser(page.url)
- encoding = page.encoding or "utf-8"
- parser.feed(page.content.decode(encoding))
-
- url = page.url
- base_url = parser.base_url or url
- for anchor in parser.anchors:
- link = Link.from_element(anchor, page_url=url, base_url=base_url)
- if link is None:
- continue
- yield link
-
-
-class IndexContent:
- """Represents one response (or page), along with its URL"""
-
- def __init__(
- self,
- content: bytes,
- content_type: str,
- encoding: Optional[str],
- url: str,
- cache_link_parsing: bool = True,
- ) -> None:
- """
- :param encoding: the encoding to decode the given content.
- :param url: the URL from which the HTML was downloaded.
- :param cache_link_parsing: whether links parsed from this page's url
- should be cached. PyPI index urls should
- have this set to False, for example.
- """
- self.content = content
- self.content_type = content_type
- self.encoding = encoding
- self.url = url
- self.cache_link_parsing = cache_link_parsing
-
- def __str__(self) -> str:
- return redact_auth_from_url(self.url)
-
-
-class HTMLLinkParser(HTMLParser):
- """
- HTMLParser that keeps the first base HREF and a list of all anchor
- elements' attributes.
- """
-
- def __init__(self, url: str) -> None:
- super().__init__(convert_charrefs=True)
-
- self.url: str = url
- self.base_url: Optional[str] = None
- self.anchors: List[Dict[str, Optional[str]]] = []
-
- def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
- if tag == "base" and self.base_url is None:
- href = self.get_href(attrs)
- if href is not None:
- self.base_url = href
- elif tag == "a":
- self.anchors.append(dict(attrs))
-
- def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
- for name, value in attrs:
- if name == "href":
- return value
- return None
-
-
-def _handle_get_simple_fail(
- link: Link,
- reason: Union[str, Exception],
- meth: Optional[Callable[..., None]] = None,
-) -> None:
- if meth is None:
- meth = logger.debug
- meth("Could not fetch URL %s: %s - skipping", link, reason)
-
-
-def _make_index_content(
- response: Response, cache_link_parsing: bool = True
-) -> IndexContent:
- encoding = _get_encoding_from_headers(response.headers)
- return IndexContent(
- response.content,
- response.headers["Content-Type"],
- encoding=encoding,
- url=response.url,
- cache_link_parsing=cache_link_parsing,
- )
-
-
-def _get_index_content(link: Link, *, session: PipSession) -> Optional["IndexContent"]:
- url = link.url.split("#", 1)[0]
-
- # Check for VCS schemes that do not support lookup as web pages.
- vcs_scheme = _match_vcs_scheme(url)
- if vcs_scheme:
- logger.warning(
- "Cannot look at %s URL %s because it does not support lookup as web pages.",
- vcs_scheme,
- link,
- )
- return None
-
- # Tack index.html onto file:// URLs that point to directories
- scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
- if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
- # add trailing slash if not present so urljoin doesn't trim
- # final segment
- if not url.endswith("/"):
- url += "/"
- # TODO: In the future, it would be nice if pip supported PEP 691
- # style responses in the file:// URLs, however there's no
- # standard file extension for application/vnd.pypi.simple.v1+json
- # so we'll need to come up with something on our own.
- url = urllib.parse.urljoin(url, "index.html")
- logger.debug(" file: URL is directory, getting %s", url)
-
- try:
- resp = _get_simple_response(url, session=session)
- except _NotHTTP:
- logger.warning(
- "Skipping page %s because it looks like an archive, and cannot "
- "be checked by a HTTP HEAD request.",
- link,
- )
- except _NotAPIContent as exc:
- logger.warning(
- "Skipping page %s because the %s request got Content-Type: %s. "
- "The only supported Content-Types are application/vnd.pypi.simple.v1+json, "
- "application/vnd.pypi.simple.v1+html, and text/html",
- link,
- exc.request_desc,
- exc.content_type,
- )
- except NetworkConnectionError as exc:
- _handle_get_simple_fail(link, exc)
- except RetryError as exc:
- _handle_get_simple_fail(link, exc)
- except SSLError as exc:
- reason = "There was a problem confirming the ssl certificate: "
- reason += str(exc)
- _handle_get_simple_fail(link, reason, meth=logger.info)
- except requests.ConnectionError as exc:
- _handle_get_simple_fail(link, f"connection error: {exc}")
- except requests.Timeout:
- _handle_get_simple_fail(link, "timed out")
- else:
- return _make_index_content(resp, cache_link_parsing=link.cache_link_parsing)
- return None
-
-
-class CollectedSources(NamedTuple):
- find_links: Sequence[Optional[LinkSource]]
- index_urls: Sequence[Optional[LinkSource]]
-
-
-class LinkCollector:
-
- """
- Responsible for collecting Link objects from all configured locations,
- making network requests as needed.
-
- The class's main method is its collect_sources() method.
- """
-
- def __init__(
- self,
- session: PipSession,
- search_scope: SearchScope,
- ) -> None:
- self.search_scope = search_scope
- self.session = session
-
- @classmethod
- def create(
- cls,
- session: PipSession,
- options: Values,
- suppress_no_index: bool = False,
- ) -> "LinkCollector":
- """
- :param session: The Session to use to make requests.
- :param suppress_no_index: Whether to ignore the --no-index option
- when constructing the SearchScope object.
- """
- index_urls = [options.index_url] + options.extra_index_urls
- if options.no_index and not suppress_no_index:
- logger.debug(
- "Ignoring indexes: %s",
- ",".join(redact_auth_from_url(url) for url in index_urls),
- )
- index_urls = []
-
- # Make sure find_links is a list before passing to create().
- find_links = options.find_links or []
-
- search_scope = SearchScope.create(
- find_links=find_links,
- index_urls=index_urls,
- no_index=options.no_index,
- )
- link_collector = LinkCollector(
- session=session,
- search_scope=search_scope,
- )
- return link_collector
-
- @property
- def find_links(self) -> List[str]:
- return self.search_scope.find_links
-
- def fetch_response(self, location: Link) -> Optional[IndexContent]:
- """
- Fetch an HTML page containing package links.
- """
- return _get_index_content(location, session=self.session)
-
- def collect_sources(
- self,
- project_name: str,
- candidates_from_page: CandidatesFromPage,
- ) -> CollectedSources:
- # The OrderedDict calls deduplicate sources by URL.
- index_url_sources = collections.OrderedDict(
- build_source(
- loc,
- candidates_from_page=candidates_from_page,
- page_validator=self.session.is_secure_origin,
- expand_dir=False,
- cache_link_parsing=False,
- project_name=project_name,
- )
- for loc in self.search_scope.get_index_urls_locations(project_name)
- ).values()
- find_links_sources = collections.OrderedDict(
- build_source(
- loc,
- candidates_from_page=candidates_from_page,
- page_validator=self.session.is_secure_origin,
- expand_dir=True,
- cache_link_parsing=True,
- project_name=project_name,
- )
- for loc in self.find_links
- ).values()
-
- if logger.isEnabledFor(logging.DEBUG):
- lines = [
- f"* {s.link}"
- for s in itertools.chain(find_links_sources, index_url_sources)
- if s is not None and s.link is not None
- ]
- lines = [
- f"{len(lines)} location(s) to search "
- f"for versions of {project_name}:"
- ] + lines
- logger.debug("\n".join(lines))
-
- return CollectedSources(
- find_links=list(find_links_sources),
- index_urls=list(index_url_sources),
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/package_finder.py b/venv/lib/python3.11/site-packages/pip/_internal/index/package_finder.py
deleted file mode 100644
index ec9ebc3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/package_finder.py
+++ /dev/null
@@ -1,1027 +0,0 @@
-"""Routines related to PyPI, indexes"""
-
-import enum
-import functools
-import itertools
-import logging
-import re
-from typing import TYPE_CHECKING, FrozenSet, Iterable, List, Optional, Set, Tuple, Union
-
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.tags import Tag
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import _BaseVersion
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled,
- DistributionNotFound,
- InvalidWheelFilename,
- UnsupportedWheel,
-)
-from pip._internal.index.collector import LinkCollector, parse_links
-from pip._internal.models.candidate import InstallationCandidate
-from pip._internal.models.format_control import FormatControl
-from pip._internal.models.link import Link
-from pip._internal.models.search_scope import SearchScope
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.models.target_python import TargetPython
-from pip._internal.models.wheel import Wheel
-from pip._internal.req import InstallRequirement
-from pip._internal.utils._log import getLogger
-from pip._internal.utils.filetypes import WHEEL_EXTENSION
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import build_netloc
-from pip._internal.utils.packaging import check_requires_python
-from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
-
-if TYPE_CHECKING:
- from pip._vendor.typing_extensions import TypeGuard
-
-__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
-
-
-logger = getLogger(__name__)
-
-BuildTag = Union[Tuple[()], Tuple[int, str]]
-CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
-
-
-def _check_link_requires_python(
- link: Link,
- version_info: Tuple[int, int, int],
- ignore_requires_python: bool = False,
-) -> bool:
- """
- Return whether the given Python version is compatible with a link's
- "Requires-Python" value.
-
- :param version_info: A 3-tuple of ints representing the Python
- major-minor-micro version to check.
- :param ignore_requires_python: Whether to ignore the "Requires-Python"
- value if the given Python version isn't compatible.
- """
- try:
- is_compatible = check_requires_python(
- link.requires_python,
- version_info=version_info,
- )
- except specifiers.InvalidSpecifier:
- logger.debug(
- "Ignoring invalid Requires-Python (%r) for link: %s",
- link.requires_python,
- link,
- )
- else:
- if not is_compatible:
- version = ".".join(map(str, version_info))
- if not ignore_requires_python:
- logger.verbose(
- "Link requires a different Python (%s not in: %r): %s",
- version,
- link.requires_python,
- link,
- )
- return False
-
- logger.debug(
- "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
- version,
- link.requires_python,
- link,
- )
-
- return True
-
-
-class LinkType(enum.Enum):
- candidate = enum.auto()
- different_project = enum.auto()
- yanked = enum.auto()
- format_unsupported = enum.auto()
- format_invalid = enum.auto()
- platform_mismatch = enum.auto()
- requires_python_mismatch = enum.auto()
-
-
-class LinkEvaluator:
-
- """
- Responsible for evaluating links for a particular project.
- """
-
- _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
-
- # Don't include an allow_yanked default value to make sure each call
- # site considers whether yanked releases are allowed. This also causes
- # that decision to be made explicit in the calling code, which helps
- # people when reading the code.
- def __init__(
- self,
- project_name: str,
- canonical_name: str,
- formats: FrozenSet[str],
- target_python: TargetPython,
- allow_yanked: bool,
- ignore_requires_python: Optional[bool] = None,
- ) -> None:
- """
- :param project_name: The user supplied package name.
- :param canonical_name: The canonical package name.
- :param formats: The formats allowed for this package. Should be a set
- with 'binary' or 'source' or both in it.
- :param target_python: The target Python interpreter to use when
- evaluating link compatibility. This is used, for example, to
- check wheel compatibility, as well as when checking the Python
- version, e.g. the Python version embedded in a link filename
- (or egg fragment) and against an HTML link's optional PEP 503
- "data-requires-python" attribute.
- :param allow_yanked: Whether files marked as yanked (in the sense
- of PEP 592) are permitted to be candidates for install.
- :param ignore_requires_python: Whether to ignore incompatible
- PEP 503 "data-requires-python" values in HTML links. Defaults
- to False.
- """
- if ignore_requires_python is None:
- ignore_requires_python = False
-
- self._allow_yanked = allow_yanked
- self._canonical_name = canonical_name
- self._ignore_requires_python = ignore_requires_python
- self._formats = formats
- self._target_python = target_python
-
- self.project_name = project_name
-
- def evaluate_link(self, link: Link) -> Tuple[LinkType, str]:
- """
- Determine whether a link is a candidate for installation.
-
- :return: A tuple (result, detail), where *result* is an enum
- representing whether the evaluation found a candidate, or the reason
- why one is not found. If a candidate is found, *detail* will be the
- candidate's version string; if one is not found, it contains the
- reason the link fails to qualify.
- """
- version = None
- if link.is_yanked and not self._allow_yanked:
- reason = link.yanked_reason or "<none given>"
- return (LinkType.yanked, f"yanked for reason: {reason}")
-
- if link.egg_fragment:
- egg_info = link.egg_fragment
- ext = link.ext
- else:
- egg_info, ext = link.splitext()
- if not ext:
- return (LinkType.format_unsupported, "not a file")
- if ext not in SUPPORTED_EXTENSIONS:
- return (
- LinkType.format_unsupported,
- f"unsupported archive format: {ext}",
- )
- if "binary" not in self._formats and ext == WHEEL_EXTENSION:
- reason = f"No binaries permitted for {self.project_name}"
- return (LinkType.format_unsupported, reason)
- if "macosx10" in link.path and ext == ".zip":
- return (LinkType.format_unsupported, "macosx10 one")
- if ext == WHEEL_EXTENSION:
- try:
- wheel = Wheel(link.filename)
- except InvalidWheelFilename:
- return (
- LinkType.format_invalid,
- "invalid wheel filename",
- )
- if canonicalize_name(wheel.name) != self._canonical_name:
- reason = f"wrong project name (not {self.project_name})"
- return (LinkType.different_project, reason)
-
- supported_tags = self._target_python.get_unsorted_tags()
- if not wheel.supported(supported_tags):
- # Include the wheel's tags in the reason string to
- # simplify troubleshooting compatibility issues.
- file_tags = ", ".join(wheel.get_formatted_file_tags())
- reason = (
- f"none of the wheel's tags ({file_tags}) are compatible "
- f"(run pip debug --verbose to show compatible tags)"
- )
- return (LinkType.platform_mismatch, reason)
-
- version = wheel.version
-
- # This should be up by the self.ok_binary check, but see issue 2700.
- if "source" not in self._formats and ext != WHEEL_EXTENSION:
- reason = f"No sources permitted for {self.project_name}"
- return (LinkType.format_unsupported, reason)
-
- if not version:
- version = _extract_version_from_fragment(
- egg_info,
- self._canonical_name,
- )
- if not version:
- reason = f"Missing project version for {self.project_name}"
- return (LinkType.format_invalid, reason)
-
- match = self._py_version_re.search(version)
- if match:
- version = version[: match.start()]
- py_version = match.group(1)
- if py_version != self._target_python.py_version:
- return (
- LinkType.platform_mismatch,
- "Python version is incorrect",
- )
-
- supports_python = _check_link_requires_python(
- link,
- version_info=self._target_python.py_version_info,
- ignore_requires_python=self._ignore_requires_python,
- )
- if not supports_python:
- reason = f"{version} Requires-Python {link.requires_python}"
- return (LinkType.requires_python_mismatch, reason)
-
- logger.debug("Found link %s, version: %s", link, version)
-
- return (LinkType.candidate, version)
-
-
-def filter_unallowed_hashes(
- candidates: List[InstallationCandidate],
- hashes: Optional[Hashes],
- project_name: str,
-) -> List[InstallationCandidate]:
- """
- Filter out candidates whose hashes aren't allowed, and return a new
- list of candidates.
-
- If at least one candidate has an allowed hash, then all candidates with
- either an allowed hash or no hash specified are returned. Otherwise,
- the given candidates are returned.
-
- Including the candidates with no hash specified when there is a match
- allows a warning to be logged if there is a more preferred candidate
- with no hash specified. Returning all candidates in the case of no
- matches lets pip report the hash of the candidate that would otherwise
- have been installed (e.g. permitting the user to more easily update
- their requirements file with the desired hash).
- """
- if not hashes:
- logger.debug(
- "Given no hashes to check %s links for project %r: "
- "discarding no candidates",
- len(candidates),
- project_name,
- )
- # Make sure we're not returning back the given value.
- return list(candidates)
-
- matches_or_no_digest = []
- # Collect the non-matches for logging purposes.
- non_matches = []
- match_count = 0
- for candidate in candidates:
- link = candidate.link
- if not link.has_hash:
- pass
- elif link.is_hash_allowed(hashes=hashes):
- match_count += 1
- else:
- non_matches.append(candidate)
- continue
-
- matches_or_no_digest.append(candidate)
-
- if match_count:
- filtered = matches_or_no_digest
- else:
- # Make sure we're not returning back the given value.
- filtered = list(candidates)
-
- if len(filtered) == len(candidates):
- discard_message = "discarding no candidates"
- else:
- discard_message = "discarding {} non-matches:\n {}".format(
- len(non_matches),
- "\n ".join(str(candidate.link) for candidate in non_matches),
- )
-
- logger.debug(
- "Checked %s links for project %r against %s hashes "
- "(%s matches, %s no digest): %s",
- len(candidates),
- project_name,
- hashes.digest_count,
- match_count,
- len(matches_or_no_digest) - match_count,
- discard_message,
- )
-
- return filtered
-
-
-class CandidatePreferences:
-
- """
- Encapsulates some of the preferences for filtering and sorting
- InstallationCandidate objects.
- """
-
- def __init__(
- self,
- prefer_binary: bool = False,
- allow_all_prereleases: bool = False,
- ) -> None:
- """
- :param allow_all_prereleases: Whether to allow all pre-releases.
- """
- self.allow_all_prereleases = allow_all_prereleases
- self.prefer_binary = prefer_binary
-
-
-class BestCandidateResult:
- """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
-
- This class is only intended to be instantiated by CandidateEvaluator's
- `compute_best_candidate()` method.
- """
-
- def __init__(
- self,
- candidates: List[InstallationCandidate],
- applicable_candidates: List[InstallationCandidate],
- best_candidate: Optional[InstallationCandidate],
- ) -> None:
- """
- :param candidates: A sequence of all available candidates found.
- :param applicable_candidates: The applicable candidates.
- :param best_candidate: The most preferred candidate found, or None
- if no applicable candidates were found.
- """
- assert set(applicable_candidates) <= set(candidates)
-
- if best_candidate is None:
- assert not applicable_candidates
- else:
- assert best_candidate in applicable_candidates
-
- self._applicable_candidates = applicable_candidates
- self._candidates = candidates
-
- self.best_candidate = best_candidate
-
- def iter_all(self) -> Iterable[InstallationCandidate]:
- """Iterate through all candidates."""
- return iter(self._candidates)
-
- def iter_applicable(self) -> Iterable[InstallationCandidate]:
- """Iterate through the applicable candidates."""
- return iter(self._applicable_candidates)
-
-
-class CandidateEvaluator:
-
- """
- Responsible for filtering and sorting candidates for installation based
- on what tags are valid.
- """
-
- @classmethod
- def create(
- cls,
- project_name: str,
- target_python: Optional[TargetPython] = None,
- prefer_binary: bool = False,
- allow_all_prereleases: bool = False,
- specifier: Optional[specifiers.BaseSpecifier] = None,
- hashes: Optional[Hashes] = None,
- ) -> "CandidateEvaluator":
- """Create a CandidateEvaluator object.
-
- :param target_python: The target Python interpreter to use when
- checking compatibility. If None (the default), a TargetPython
- object will be constructed from the running Python.
- :param specifier: An optional object implementing `filter`
- (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
- versions.
- :param hashes: An optional collection of allowed hashes.
- """
- if target_python is None:
- target_python = TargetPython()
- if specifier is None:
- specifier = specifiers.SpecifierSet()
-
- supported_tags = target_python.get_sorted_tags()
-
- return cls(
- project_name=project_name,
- supported_tags=supported_tags,
- specifier=specifier,
- prefer_binary=prefer_binary,
- allow_all_prereleases=allow_all_prereleases,
- hashes=hashes,
- )
-
- def __init__(
- self,
- project_name: str,
- supported_tags: List[Tag],
- specifier: specifiers.BaseSpecifier,
- prefer_binary: bool = False,
- allow_all_prereleases: bool = False,
- hashes: Optional[Hashes] = None,
- ) -> None:
- """
- :param supported_tags: The PEP 425 tags supported by the target
- Python in order of preference (most preferred first).
- """
- self._allow_all_prereleases = allow_all_prereleases
- self._hashes = hashes
- self._prefer_binary = prefer_binary
- self._project_name = project_name
- self._specifier = specifier
- self._supported_tags = supported_tags
- # Since the index of the tag in the _supported_tags list is used
- # as a priority, precompute a map from tag to index/priority to be
- # used in wheel.find_most_preferred_tag.
- self._wheel_tag_preferences = {
- tag: idx for idx, tag in enumerate(supported_tags)
- }
-
- def get_applicable_candidates(
- self,
- candidates: List[InstallationCandidate],
- ) -> List[InstallationCandidate]:
- """
- Return the applicable candidates from a list of candidates.
- """
- # Using None infers from the specifier instead.
- allow_prereleases = self._allow_all_prereleases or None
- specifier = self._specifier
- versions = {
- str(v)
- for v in specifier.filter(
- # We turn the version object into a str here because otherwise
- # when we're debundled but setuptools isn't, Python will see
- # packaging.version.Version and
- # pkg_resources._vendor.packaging.version.Version as different
- # types. This way we'll use a str as a common data interchange
- # format. If we stop using the pkg_resources provided specifier
- # and start using our own, we can drop the cast to str().
- (str(c.version) for c in candidates),
- prereleases=allow_prereleases,
- )
- }
-
- # Again, converting version to str to deal with debundling.
- applicable_candidates = [c for c in candidates if str(c.version) in versions]
-
- filtered_applicable_candidates = filter_unallowed_hashes(
- candidates=applicable_candidates,
- hashes=self._hashes,
- project_name=self._project_name,
- )
-
- return sorted(filtered_applicable_candidates, key=self._sort_key)
-
- def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
- """
- Function to pass as the `key` argument to a call to sorted() to sort
- InstallationCandidates by preference.
-
- Returns a tuple such that tuples sorting as greater using Python's
- default comparison operator are more preferred.
-
- The preference is as follows:
-
- First and foremost, candidates with allowed (matching) hashes are
- always preferred over candidates without matching hashes. This is
- because e.g. if the only candidate with an allowed hash is yanked,
- we still want to use that candidate.
-
- Second, excepting hash considerations, candidates that have been
- yanked (in the sense of PEP 592) are always less preferred than
- candidates that haven't been yanked. Then:
-
- If not finding wheels, they are sorted by version only.
- If finding wheels, then the sort order is by version, then:
- 1. existing installs
- 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
- 3. source archives
- If prefer_binary was set, then all wheels are sorted above sources.
-
- Note: it was considered to embed this logic into the Link
- comparison operators, but then different sdist links
- with the same version, would have to be considered equal
- """
- valid_tags = self._supported_tags
- support_num = len(valid_tags)
- build_tag: BuildTag = ()
- binary_preference = 0
- link = candidate.link
- if link.is_wheel:
- # can raise InvalidWheelFilename
- wheel = Wheel(link.filename)
- try:
- pri = -(
- wheel.find_most_preferred_tag(
- valid_tags, self._wheel_tag_preferences
- )
- )
- except ValueError:
- raise UnsupportedWheel(
- f"{wheel.filename} is not a supported wheel for this platform. It "
- "can't be sorted."
- )
- if self._prefer_binary:
- binary_preference = 1
- if wheel.build_tag is not None:
- match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
- assert match is not None, "guaranteed by filename validation"
- build_tag_groups = match.groups()
- build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
- else: # sdist
- pri = -(support_num)
- has_allowed_hash = int(link.is_hash_allowed(self._hashes))
- yank_value = -1 * int(link.is_yanked) # -1 for yanked.
- return (
- has_allowed_hash,
- yank_value,
- binary_preference,
- candidate.version,
- pri,
- build_tag,
- )
-
- def sort_best_candidate(
- self,
- candidates: List[InstallationCandidate],
- ) -> Optional[InstallationCandidate]:
- """
- Return the best candidate per the instance's sort order, or None if
- no candidate is acceptable.
- """
- if not candidates:
- return None
- best_candidate = max(candidates, key=self._sort_key)
- return best_candidate
-
- def compute_best_candidate(
- self,
- candidates: List[InstallationCandidate],
- ) -> BestCandidateResult:
- """
- Compute and return a `BestCandidateResult` instance.
- """
- applicable_candidates = self.get_applicable_candidates(candidates)
-
- best_candidate = self.sort_best_candidate(applicable_candidates)
-
- return BestCandidateResult(
- candidates,
- applicable_candidates=applicable_candidates,
- best_candidate=best_candidate,
- )
-
-
-class PackageFinder:
- """This finds packages.
-
- This is meant to match easy_install's technique for looking for
- packages, by reading pages and looking for appropriate links.
- """
-
- def __init__(
- self,
- link_collector: LinkCollector,
- target_python: TargetPython,
- allow_yanked: bool,
- format_control: Optional[FormatControl] = None,
- candidate_prefs: Optional[CandidatePreferences] = None,
- ignore_requires_python: Optional[bool] = None,
- ) -> None:
- """
- This constructor is primarily meant to be used by the create() class
- method and from tests.
-
- :param format_control: A FormatControl object, used to control
- the selection of source packages / binary packages when consulting
- the index and links.
- :param candidate_prefs: Options to use when creating a
- CandidateEvaluator object.
- """
- if candidate_prefs is None:
- candidate_prefs = CandidatePreferences()
-
- format_control = format_control or FormatControl(set(), set())
-
- self._allow_yanked = allow_yanked
- self._candidate_prefs = candidate_prefs
- self._ignore_requires_python = ignore_requires_python
- self._link_collector = link_collector
- self._target_python = target_python
-
- self.format_control = format_control
-
- # These are boring links that have already been logged somehow.
- self._logged_links: Set[Tuple[Link, LinkType, str]] = set()
-
- # Don't include an allow_yanked default value to make sure each call
- # site considers whether yanked releases are allowed. This also causes
- # that decision to be made explicit in the calling code, which helps
- # people when reading the code.
- @classmethod
- def create(
- cls,
- link_collector: LinkCollector,
- selection_prefs: SelectionPreferences,
- target_python: Optional[TargetPython] = None,
- ) -> "PackageFinder":
- """Create a PackageFinder.
-
- :param selection_prefs: The candidate selection preferences, as a
- SelectionPreferences object.
- :param target_python: The target Python interpreter to use when
- checking compatibility. If None (the default), a TargetPython
- object will be constructed from the running Python.
- """
- if target_python is None:
- target_python = TargetPython()
-
- candidate_prefs = CandidatePreferences(
- prefer_binary=selection_prefs.prefer_binary,
- allow_all_prereleases=selection_prefs.allow_all_prereleases,
- )
-
- return cls(
- candidate_prefs=candidate_prefs,
- link_collector=link_collector,
- target_python=target_python,
- allow_yanked=selection_prefs.allow_yanked,
- format_control=selection_prefs.format_control,
- ignore_requires_python=selection_prefs.ignore_requires_python,
- )
-
- @property
- def target_python(self) -> TargetPython:
- return self._target_python
-
- @property
- def search_scope(self) -> SearchScope:
- return self._link_collector.search_scope
-
- @search_scope.setter
- def search_scope(self, search_scope: SearchScope) -> None:
- self._link_collector.search_scope = search_scope
-
- @property
- def find_links(self) -> List[str]:
- return self._link_collector.find_links
-
- @property
- def index_urls(self) -> List[str]:
- return self.search_scope.index_urls
-
- @property
- def trusted_hosts(self) -> Iterable[str]:
- for host_port in self._link_collector.session.pip_trusted_origins:
- yield build_netloc(*host_port)
-
- @property
- def allow_all_prereleases(self) -> bool:
- return self._candidate_prefs.allow_all_prereleases
-
- def set_allow_all_prereleases(self) -> None:
- self._candidate_prefs.allow_all_prereleases = True
-
- @property
- def prefer_binary(self) -> bool:
- return self._candidate_prefs.prefer_binary
-
- def set_prefer_binary(self) -> None:
- self._candidate_prefs.prefer_binary = True
-
- def requires_python_skipped_reasons(self) -> List[str]:
- reasons = {
- detail
- for _, result, detail in self._logged_links
- if result == LinkType.requires_python_mismatch
- }
- return sorted(reasons)
-
- def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
- canonical_name = canonicalize_name(project_name)
- formats = self.format_control.get_allowed_formats(canonical_name)
-
- return LinkEvaluator(
- project_name=project_name,
- canonical_name=canonical_name,
- formats=formats,
- target_python=self._target_python,
- allow_yanked=self._allow_yanked,
- ignore_requires_python=self._ignore_requires_python,
- )
-
- def _sort_links(self, links: Iterable[Link]) -> List[Link]:
- """
- Returns elements of links in order, non-egg links first, egg links
- second, while eliminating duplicates
- """
- eggs, no_eggs = [], []
- seen: Set[Link] = set()
- for link in links:
- if link not in seen:
- seen.add(link)
- if link.egg_fragment:
- eggs.append(link)
- else:
- no_eggs.append(link)
- return no_eggs + eggs
-
- def _log_skipped_link(self, link: Link, result: LinkType, detail: str) -> None:
- entry = (link, result, detail)
- if entry not in self._logged_links:
- # Put the link at the end so the reason is more visible and because
- # the link string is usually very long.
- logger.debug("Skipping link: %s: %s", detail, link)
- self._logged_links.add(entry)
-
- def get_install_candidate(
- self, link_evaluator: LinkEvaluator, link: Link
- ) -> Optional[InstallationCandidate]:
- """
- If the link is a candidate for install, convert it to an
- InstallationCandidate and return it. Otherwise, return None.
- """
- result, detail = link_evaluator.evaluate_link(link)
- if result != LinkType.candidate:
- self._log_skipped_link(link, result, detail)
- return None
-
- return InstallationCandidate(
- name=link_evaluator.project_name,
- link=link,
- version=detail,
- )
-
- def evaluate_links(
- self, link_evaluator: LinkEvaluator, links: Iterable[Link]
- ) -> List[InstallationCandidate]:
- """
- Convert links that are candidates to InstallationCandidate objects.
- """
- candidates = []
- for link in self._sort_links(links):
- candidate = self.get_install_candidate(link_evaluator, link)
- if candidate is not None:
- candidates.append(candidate)
-
- return candidates
-
- def process_project_url(
- self, project_url: Link, link_evaluator: LinkEvaluator
- ) -> List[InstallationCandidate]:
- logger.debug(
- "Fetching project page and analyzing links: %s",
- project_url,
- )
- index_response = self._link_collector.fetch_response(project_url)
- if index_response is None:
- return []
-
- page_links = list(parse_links(index_response))
-
- with indent_log():
- package_links = self.evaluate_links(
- link_evaluator,
- links=page_links,
- )
-
- return package_links
-
- @functools.lru_cache(maxsize=None)
- def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
- """Find all available InstallationCandidate for project_name
-
- This checks index_urls and find_links.
- All versions found are returned as an InstallationCandidate list.
-
- See LinkEvaluator.evaluate_link() for details on which files
- are accepted.
- """
- link_evaluator = self.make_link_evaluator(project_name)
-
- collected_sources = self._link_collector.collect_sources(
- project_name=project_name,
- candidates_from_page=functools.partial(
- self.process_project_url,
- link_evaluator=link_evaluator,
- ),
- )
-
- page_candidates_it = itertools.chain.from_iterable(
- source.page_candidates()
- for sources in collected_sources
- for source in sources
- if source is not None
- )
- page_candidates = list(page_candidates_it)
-
- file_links_it = itertools.chain.from_iterable(
- source.file_links()
- for sources in collected_sources
- for source in sources
- if source is not None
- )
- file_candidates = self.evaluate_links(
- link_evaluator,
- sorted(file_links_it, reverse=True),
- )
-
- if logger.isEnabledFor(logging.DEBUG) and file_candidates:
- paths = []
- for candidate in file_candidates:
- assert candidate.link.url # we need to have a URL
- try:
- paths.append(candidate.link.file_path)
- except Exception:
- paths.append(candidate.link.url) # it's not a local file
-
- logger.debug("Local files found: %s", ", ".join(paths))
-
- # This is an intentional priority ordering
- return file_candidates + page_candidates
-
- def make_candidate_evaluator(
- self,
- project_name: str,
- specifier: Optional[specifiers.BaseSpecifier] = None,
- hashes: Optional[Hashes] = None,
- ) -> CandidateEvaluator:
- """Create a CandidateEvaluator object to use."""
- candidate_prefs = self._candidate_prefs
- return CandidateEvaluator.create(
- project_name=project_name,
- target_python=self._target_python,
- prefer_binary=candidate_prefs.prefer_binary,
- allow_all_prereleases=candidate_prefs.allow_all_prereleases,
- specifier=specifier,
- hashes=hashes,
- )
-
- @functools.lru_cache(maxsize=None)
- def find_best_candidate(
- self,
- project_name: str,
- specifier: Optional[specifiers.BaseSpecifier] = None,
- hashes: Optional[Hashes] = None,
- ) -> BestCandidateResult:
- """Find matches for the given project and specifier.
-
- :param specifier: An optional object implementing `filter`
- (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
- versions.
-
- :return: A `BestCandidateResult` instance.
- """
- candidates = self.find_all_candidates(project_name)
- candidate_evaluator = self.make_candidate_evaluator(
- project_name=project_name,
- specifier=specifier,
- hashes=hashes,
- )
- return candidate_evaluator.compute_best_candidate(candidates)
-
- def find_requirement(
- self, req: InstallRequirement, upgrade: bool
- ) -> Optional[InstallationCandidate]:
- """Try to find a Link matching req
-
- Expects req, an InstallRequirement and upgrade, a boolean
- Returns a InstallationCandidate if found,
- Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
- """
- hashes = req.hashes(trust_internet=False)
- best_candidate_result = self.find_best_candidate(
- req.name,
- specifier=req.specifier,
- hashes=hashes,
- )
- best_candidate = best_candidate_result.best_candidate
-
- installed_version: Optional[_BaseVersion] = None
- if req.satisfied_by is not None:
- installed_version = req.satisfied_by.version
-
- def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
- # This repeated parse_version and str() conversion is needed to
- # handle different vendoring sources from pip and pkg_resources.
- # If we stop using the pkg_resources provided specifier and start
- # using our own, we can drop the cast to str().
- return (
- ", ".join(
- sorted(
- {str(c.version) for c in cand_iter},
- key=parse_version,
- )
- )
- or "none"
- )
-
- if installed_version is None and best_candidate is None:
- logger.critical(
- "Could not find a version that satisfies the requirement %s "
- "(from versions: %s)",
- req,
- _format_versions(best_candidate_result.iter_all()),
- )
-
- raise DistributionNotFound(f"No matching distribution found for {req}")
-
- def _should_install_candidate(
- candidate: Optional[InstallationCandidate],
- ) -> "TypeGuard[InstallationCandidate]":
- if installed_version is None:
- return True
- if best_candidate is None:
- return False
- return best_candidate.version > installed_version
-
- if not upgrade and installed_version is not None:
- if _should_install_candidate(best_candidate):
- logger.debug(
- "Existing installed version (%s) satisfies requirement "
- "(most up-to-date version is %s)",
- installed_version,
- best_candidate.version,
- )
- else:
- logger.debug(
- "Existing installed version (%s) is most up-to-date and "
- "satisfies requirement",
- installed_version,
- )
- return None
-
- if _should_install_candidate(best_candidate):
- logger.debug(
- "Using version %s (newest of versions: %s)",
- best_candidate.version,
- _format_versions(best_candidate_result.iter_applicable()),
- )
- return best_candidate
-
- # We have an existing version, and its the best version
- logger.debug(
- "Installed version (%s) is most up-to-date (past versions: %s)",
- installed_version,
- _format_versions(best_candidate_result.iter_applicable()),
- )
- raise BestVersionAlreadyInstalled
-
-
-def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
- """Find the separator's index based on the package's canonical name.
-
- :param fragment: A <package>+<version> filename "fragment" (stem) or
- egg fragment.
- :param canonical_name: The package's canonical name.
-
- This function is needed since the canonicalized name does not necessarily
- have the same length as the egg info's name part. An example::
-
- >>> fragment = 'foo__bar-1.0'
- >>> canonical_name = 'foo-bar'
- >>> _find_name_version_sep(fragment, canonical_name)
- 8
- """
- # Project name and version must be separated by one single dash. Find all
- # occurrences of dashes; if the string in front of it matches the canonical
- # name, this is the one separating the name and version parts.
- for i, c in enumerate(fragment):
- if c != "-":
- continue
- if canonicalize_name(fragment[:i]) == canonical_name:
- return i
- raise ValueError(f"{fragment} does not match {canonical_name}")
-
-
-def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
- """Parse the version string from a <package>+<version> filename
- "fragment" (stem) or egg fragment.
-
- :param fragment: The string to parse. E.g. foo-2.1
- :param canonical_name: The canonicalized name of the package this
- belongs to.
- """
- try:
- version_start = _find_name_version_sep(fragment, canonical_name) + 1
- except ValueError:
- return None
- version = fragment[version_start:]
- if not version:
- return None
- return version
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/index/sources.py b/venv/lib/python3.11/site-packages/pip/_internal/index/sources.py
deleted file mode 100644
index f4626d7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/index/sources.py
+++ /dev/null
@@ -1,285 +0,0 @@
-import logging
-import mimetypes
-import os
-from collections import defaultdict
-from typing import Callable, Dict, Iterable, List, Optional, Tuple
-
-from pip._vendor.packaging.utils import (
- InvalidSdistFilename,
- InvalidVersion,
- InvalidWheelFilename,
- canonicalize_name,
- parse_sdist_filename,
- parse_wheel_filename,
-)
-
-from pip._internal.models.candidate import InstallationCandidate
-from pip._internal.models.link import Link
-from pip._internal.utils.urls import path_to_url, url_to_path
-from pip._internal.vcs import is_url
-
-logger = logging.getLogger(__name__)
-
-FoundCandidates = Iterable[InstallationCandidate]
-FoundLinks = Iterable[Link]
-CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
-PageValidator = Callable[[Link], bool]
-
-
-class LinkSource:
- @property
- def link(self) -> Optional[Link]:
- """Returns the underlying link, if there's one."""
- raise NotImplementedError()
-
- def page_candidates(self) -> FoundCandidates:
- """Candidates found by parsing an archive listing HTML file."""
- raise NotImplementedError()
-
- def file_links(self) -> FoundLinks:
- """Links found by specifying archives directly."""
- raise NotImplementedError()
-
-
-def _is_html_file(file_url: str) -> bool:
- return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
-
-
-class _FlatDirectoryToUrls:
- """Scans directory and caches results"""
-
- def __init__(self, path: str) -> None:
- self._path = path
- self._page_candidates: List[str] = []
- self._project_name_to_urls: Dict[str, List[str]] = defaultdict(list)
- self._scanned_directory = False
-
- def _scan_directory(self) -> None:
- """Scans directory once and populates both page_candidates
- and project_name_to_urls at the same time
- """
- for entry in os.scandir(self._path):
- url = path_to_url(entry.path)
- if _is_html_file(url):
- self._page_candidates.append(url)
- continue
-
- # File must have a valid wheel or sdist name,
- # otherwise not worth considering as a package
- try:
- project_filename = parse_wheel_filename(entry.name)[0]
- except (InvalidWheelFilename, InvalidVersion):
- try:
- project_filename = parse_sdist_filename(entry.name)[0]
- except (InvalidSdistFilename, InvalidVersion):
- continue
-
- self._project_name_to_urls[project_filename].append(url)
- self._scanned_directory = True
-
- @property
- def page_candidates(self) -> List[str]:
- if not self._scanned_directory:
- self._scan_directory()
-
- return self._page_candidates
-
- @property
- def project_name_to_urls(self) -> Dict[str, List[str]]:
- if not self._scanned_directory:
- self._scan_directory()
-
- return self._project_name_to_urls
-
-
-class _FlatDirectorySource(LinkSource):
- """Link source specified by ``--find-links=<path-to-dir>``.
-
- This looks the content of the directory, and returns:
-
- * ``page_candidates``: Links listed on each HTML file in the directory.
- * ``file_candidates``: Archives in the directory.
- """
-
- _paths_to_urls: Dict[str, _FlatDirectoryToUrls] = {}
-
- def __init__(
- self,
- candidates_from_page: CandidatesFromPage,
- path: str,
- project_name: str,
- ) -> None:
- self._candidates_from_page = candidates_from_page
- self._project_name = canonicalize_name(project_name)
-
- # Get existing instance of _FlatDirectoryToUrls if it exists
- if path in self._paths_to_urls:
- self._path_to_urls = self._paths_to_urls[path]
- else:
- self._path_to_urls = _FlatDirectoryToUrls(path=path)
- self._paths_to_urls[path] = self._path_to_urls
-
- @property
- def link(self) -> Optional[Link]:
- return None
-
- def page_candidates(self) -> FoundCandidates:
- for url in self._path_to_urls.page_candidates:
- yield from self._candidates_from_page(Link(url))
-
- def file_links(self) -> FoundLinks:
- for url in self._path_to_urls.project_name_to_urls[self._project_name]:
- yield Link(url)
-
-
-class _LocalFileSource(LinkSource):
- """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
-
- If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
- the option, it is converted to a URL first. This returns:
-
- * ``page_candidates``: Links listed on an HTML file.
- * ``file_candidates``: The non-HTML file.
- """
-
- def __init__(
- self,
- candidates_from_page: CandidatesFromPage,
- link: Link,
- ) -> None:
- self._candidates_from_page = candidates_from_page
- self._link = link
-
- @property
- def link(self) -> Optional[Link]:
- return self._link
-
- def page_candidates(self) -> FoundCandidates:
- if not _is_html_file(self._link.url):
- return
- yield from self._candidates_from_page(self._link)
-
- def file_links(self) -> FoundLinks:
- if _is_html_file(self._link.url):
- return
- yield self._link
-
-
-class _RemoteFileSource(LinkSource):
- """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
-
- This returns:
-
- * ``page_candidates``: Links listed on an HTML file.
- * ``file_candidates``: The non-HTML file.
- """
-
- def __init__(
- self,
- candidates_from_page: CandidatesFromPage,
- page_validator: PageValidator,
- link: Link,
- ) -> None:
- self._candidates_from_page = candidates_from_page
- self._page_validator = page_validator
- self._link = link
-
- @property
- def link(self) -> Optional[Link]:
- return self._link
-
- def page_candidates(self) -> FoundCandidates:
- if not self._page_validator(self._link):
- return
- yield from self._candidates_from_page(self._link)
-
- def file_links(self) -> FoundLinks:
- yield self._link
-
-
-class _IndexDirectorySource(LinkSource):
- """``--[extra-]index-url=<path-to-directory>``.
-
- This is treated like a remote URL; ``candidates_from_page`` contains logic
- for this by appending ``index.html`` to the link.
- """
-
- def __init__(
- self,
- candidates_from_page: CandidatesFromPage,
- link: Link,
- ) -> None:
- self._candidates_from_page = candidates_from_page
- self._link = link
-
- @property
- def link(self) -> Optional[Link]:
- return self._link
-
- def page_candidates(self) -> FoundCandidates:
- yield from self._candidates_from_page(self._link)
-
- def file_links(self) -> FoundLinks:
- return ()
-
-
-def build_source(
- location: str,
- *,
- candidates_from_page: CandidatesFromPage,
- page_validator: PageValidator,
- expand_dir: bool,
- cache_link_parsing: bool,
- project_name: str,
-) -> Tuple[Optional[str], Optional[LinkSource]]:
- path: Optional[str] = None
- url: Optional[str] = None
- if os.path.exists(location): # Is a local path.
- url = path_to_url(location)
- path = location
- elif location.startswith("file:"): # A file: URL.
- url = location
- path = url_to_path(location)
- elif is_url(location):
- url = location
-
- if url is None:
- msg = (
- "Location '%s' is ignored: "
- "it is either a non-existing path or lacks a specific scheme."
- )
- logger.warning(msg, location)
- return (None, None)
-
- if path is None:
- source: LinkSource = _RemoteFileSource(
- candidates_from_page=candidates_from_page,
- page_validator=page_validator,
- link=Link(url, cache_link_parsing=cache_link_parsing),
- )
- return (url, source)
-
- if os.path.isdir(path):
- if expand_dir:
- source = _FlatDirectorySource(
- candidates_from_page=candidates_from_page,
- path=path,
- project_name=project_name,
- )
- else:
- source = _IndexDirectorySource(
- candidates_from_page=candidates_from_page,
- link=Link(url, cache_link_parsing=cache_link_parsing),
- )
- return (url, source)
- elif os.path.isfile(path):
- source = _LocalFileSource(
- candidates_from_page=candidates_from_page,
- link=Link(url, cache_link_parsing=cache_link_parsing),
- )
- return (url, source)
- logger.warning(
- "Location '%s' is ignored: it is neither a file nor a directory.",
- location,
- )
- return (url, None)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/locations/__init__.py
deleted file mode 100644
index d54bc63..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/__init__.py
+++ /dev/null
@@ -1,467 +0,0 @@
-import functools
-import logging
-import os
-import pathlib
-import sys
-import sysconfig
-from typing import Any, Dict, Generator, Optional, Tuple
-
-from pip._internal.models.scheme import SCHEME_KEYS, Scheme
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-from . import _sysconfig
-from .base import (
- USER_CACHE_DIR,
- get_major_minor_version,
- get_src_prefix,
- is_osx_framework,
- site_packages,
- user_site,
-)
-
-__all__ = [
- "USER_CACHE_DIR",
- "get_bin_prefix",
- "get_bin_user",
- "get_major_minor_version",
- "get_platlib",
- "get_purelib",
- "get_scheme",
- "get_src_prefix",
- "site_packages",
- "user_site",
-]
-
-
-logger = logging.getLogger(__name__)
-
-
-_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
-
-_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
-
-
-def _should_use_sysconfig() -> bool:
- """This function determines the value of _USE_SYSCONFIG.
-
- By default, pip uses sysconfig on Python 3.10+.
- But Python distributors can override this decision by setting:
- sysconfig._PIP_USE_SYSCONFIG = True / False
- Rationale in https://github.com/pypa/pip/issues/10647
-
- This is a function for testability, but should be constant during any one
- run.
- """
- return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
-
-
-_USE_SYSCONFIG = _should_use_sysconfig()
-
-if not _USE_SYSCONFIG:
- # Import distutils lazily to avoid deprecation warnings,
- # but import it soon enough that it is in memory and available during
- # a pip reinstall.
- from . import _distutils
-
-# Be noisy about incompatibilities if this platforms "should" be using
-# sysconfig, but is explicitly opting out and using distutils instead.
-if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
- _MISMATCH_LEVEL = logging.WARNING
-else:
- _MISMATCH_LEVEL = logging.DEBUG
-
-
-def _looks_like_bpo_44860() -> bool:
- """The resolution to bpo-44860 will change this incorrect platlib.
-
- See <https://bugs.python.org/issue44860>.
- """
- from distutils.command.install import INSTALL_SCHEMES
-
- try:
- unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
- except KeyError:
- return False
- return unix_user_platlib == "$usersite"
-
-
-def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
- platlib = scheme["platlib"]
- if "/$platlibdir/" in platlib:
- platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
- if "/lib64/" not in platlib:
- return False
- unpatched = platlib.replace("/lib64/", "/lib/")
- return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
-
-
-@functools.lru_cache(maxsize=None)
-def _looks_like_red_hat_lib() -> bool:
- """Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
-
- This is the only way I can see to tell a Red Hat-patched Python.
- """
- from distutils.command.install import INSTALL_SCHEMES
-
- return all(
- k in INSTALL_SCHEMES
- and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
- for k in ("unix_prefix", "unix_home")
- )
-
-
-@functools.lru_cache(maxsize=None)
-def _looks_like_debian_scheme() -> bool:
- """Debian adds two additional schemes."""
- from distutils.command.install import INSTALL_SCHEMES
-
- return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
-
-
-@functools.lru_cache(maxsize=None)
-def _looks_like_red_hat_scheme() -> bool:
- """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
-
- Red Hat's ``00251-change-user-install-location.patch`` changes the install
- command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
- (fortunately?) done quite unconditionally, so we create a default command
- object without any configuration to detect this.
- """
- from distutils.command.install import install
- from distutils.dist import Distribution
-
- cmd: Any = install(Distribution())
- cmd.finalize_options()
- return (
- cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
- and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
- )
-
-
-@functools.lru_cache(maxsize=None)
-def _looks_like_slackware_scheme() -> bool:
- """Slackware patches sysconfig but fails to patch distutils and site.
-
- Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
- path, but does not do the same to the site module.
- """
- if user_site is None: # User-site not available.
- return False
- try:
- paths = sysconfig.get_paths(scheme="posix_user", expand=False)
- except KeyError: # User-site not available.
- return False
- return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
-
-
-@functools.lru_cache(maxsize=None)
-def _looks_like_msys2_mingw_scheme() -> bool:
- """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
-
- However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
- likely going to be included in their 3.10 release, so we ignore the warning.
- See msys2/MINGW-packages#9319.
-
- MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
- and is missing the final ``"site-packages"``.
- """
- paths = sysconfig.get_paths("nt", expand=False)
- return all(
- "Lib" not in p and "lib" in p and not p.endswith("site-packages")
- for p in (paths[key] for key in ("platlib", "purelib"))
- )
-
-
-def _fix_abiflags(parts: Tuple[str]) -> Generator[str, None, None]:
- ldversion = sysconfig.get_config_var("LDVERSION")
- abiflags = getattr(sys, "abiflags", None)
-
- # LDVERSION does not end with sys.abiflags. Just return the path unchanged.
- if not ldversion or not abiflags or not ldversion.endswith(abiflags):
- yield from parts
- return
-
- # Strip sys.abiflags from LDVERSION-based path components.
- for part in parts:
- if part.endswith(ldversion):
- part = part[: (0 - len(abiflags))]
- yield part
-
-
-@functools.lru_cache(maxsize=None)
-def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
- issue_url = "https://github.com/pypa/pip/issues/10151"
- message = (
- "Value for %s does not match. Please report this to <%s>"
- "\ndistutils: %s"
- "\nsysconfig: %s"
- )
- logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
-
-
-def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
- if old == new:
- return False
- _warn_mismatched(old, new, key=key)
- return True
-
-
-@functools.lru_cache(maxsize=None)
-def _log_context(
- *,
- user: bool = False,
- home: Optional[str] = None,
- root: Optional[str] = None,
- prefix: Optional[str] = None,
-) -> None:
- parts = [
- "Additional context:",
- "user = %r",
- "home = %r",
- "root = %r",
- "prefix = %r",
- ]
-
- logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
-
-
-def get_scheme(
- dist_name: str,
- user: bool = False,
- home: Optional[str] = None,
- root: Optional[str] = None,
- isolated: bool = False,
- prefix: Optional[str] = None,
-) -> Scheme:
- new = _sysconfig.get_scheme(
- dist_name,
- user=user,
- home=home,
- root=root,
- isolated=isolated,
- prefix=prefix,
- )
- if _USE_SYSCONFIG:
- return new
-
- old = _distutils.get_scheme(
- dist_name,
- user=user,
- home=home,
- root=root,
- isolated=isolated,
- prefix=prefix,
- )
-
- warning_contexts = []
- for k in SCHEME_KEYS:
- old_v = pathlib.Path(getattr(old, k))
- new_v = pathlib.Path(getattr(new, k))
-
- if old_v == new_v:
- continue
-
- # distutils incorrectly put PyPy packages under ``site-packages/python``
- # in the ``posix_home`` scheme, but PyPy devs said they expect the
- # directory name to be ``pypy`` instead. So we treat this as a bug fix
- # and not warn about it. See bpo-43307 and python/cpython#24628.
- skip_pypy_special_case = (
- sys.implementation.name == "pypy"
- and home is not None
- and k in ("platlib", "purelib")
- and old_v.parent == new_v.parent
- and old_v.name.startswith("python")
- and new_v.name.startswith("pypy")
- )
- if skip_pypy_special_case:
- continue
-
- # sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
- # the ``include`` value, but distutils's ``headers`` does. We'll let
- # CPython decide whether this is a bug or feature. See bpo-43948.
- skip_osx_framework_user_special_case = (
- user
- and is_osx_framework()
- and k == "headers"
- and old_v.parent.parent == new_v.parent
- and old_v.parent.name.startswith("python")
- )
- if skip_osx_framework_user_special_case:
- continue
-
- # On Red Hat and derived Linux distributions, distutils is patched to
- # use "lib64" instead of "lib" for platlib.
- if k == "platlib" and _looks_like_red_hat_lib():
- continue
-
- # On Python 3.9+, sysconfig's posix_user scheme sets platlib against
- # sys.platlibdir, but distutils's unix_user incorrectly coninutes
- # using the same $usersite for both platlib and purelib. This creates a
- # mismatch when sys.platlibdir is not "lib".
- skip_bpo_44860 = (
- user
- and k == "platlib"
- and not WINDOWS
- and sys.version_info >= (3, 9)
- and _PLATLIBDIR != "lib"
- and _looks_like_bpo_44860()
- )
- if skip_bpo_44860:
- continue
-
- # Slackware incorrectly patches posix_user to use lib64 instead of lib,
- # but not usersite to match the location.
- skip_slackware_user_scheme = (
- user
- and k in ("platlib", "purelib")
- and not WINDOWS
- and _looks_like_slackware_scheme()
- )
- if skip_slackware_user_scheme:
- continue
-
- # Both Debian and Red Hat patch Python to place the system site under
- # /usr/local instead of /usr. Debian also places lib in dist-packages
- # instead of site-packages, but the /usr/local check should cover it.
- skip_linux_system_special_case = (
- not (user or home or prefix or running_under_virtualenv())
- and old_v.parts[1:3] == ("usr", "local")
- and len(new_v.parts) > 1
- and new_v.parts[1] == "usr"
- and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
- and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
- )
- if skip_linux_system_special_case:
- continue
-
- # On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
- # the "pythonX.Y" part of the path, but distutils does.
- skip_sysconfig_abiflag_bug = (
- sys.version_info < (3, 8)
- and not WINDOWS
- and k in ("headers", "platlib", "purelib")
- and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
- )
- if skip_sysconfig_abiflag_bug:
- continue
-
- # MSYS2 MINGW's sysconfig patch does not include the "site-packages"
- # part of the path. This is incorrect and will be fixed in MSYS.
- skip_msys2_mingw_bug = (
- WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
- )
- if skip_msys2_mingw_bug:
- continue
-
- # CPython's POSIX install script invokes pip (via ensurepip) against the
- # interpreter located in the source tree, not the install site. This
- # triggers special logic in sysconfig that's not present in distutils.
- # https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
- skip_cpython_build = (
- sysconfig.is_python_build(check_home=True)
- and not WINDOWS
- and k in ("headers", "include", "platinclude")
- )
- if skip_cpython_build:
- continue
-
- warning_contexts.append((old_v, new_v, f"scheme.{k}"))
-
- if not warning_contexts:
- return old
-
- # Check if this path mismatch is caused by distutils config files. Those
- # files will no longer work once we switch to sysconfig, so this raises a
- # deprecation message for them.
- default_old = _distutils.distutils_scheme(
- dist_name,
- user,
- home,
- root,
- isolated,
- prefix,
- ignore_config_files=True,
- )
- if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
- deprecated(
- reason=(
- "Configuring installation scheme with distutils config files "
- "is deprecated and will no longer work in the near future. If you "
- "are using a Homebrew or Linuxbrew Python, please see discussion "
- "at https://github.com/Homebrew/homebrew-core/issues/76621"
- ),
- replacement=None,
- gone_in=None,
- )
- return old
-
- # Post warnings about this mismatch so user can report them back.
- for old_v, new_v, key in warning_contexts:
- _warn_mismatched(old_v, new_v, key=key)
- _log_context(user=user, home=home, root=root, prefix=prefix)
-
- return old
-
-
-def get_bin_prefix() -> str:
- new = _sysconfig.get_bin_prefix()
- if _USE_SYSCONFIG:
- return new
-
- old = _distutils.get_bin_prefix()
- if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
- _log_context()
- return old
-
-
-def get_bin_user() -> str:
- return _sysconfig.get_scheme("", user=True).scripts
-
-
-def _looks_like_deb_system_dist_packages(value: str) -> bool:
- """Check if the value is Debian's APT-controlled dist-packages.
-
- Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
- default package path controlled by APT, but does not patch ``sysconfig`` to
- do the same. This is similar to the bug worked around in ``get_scheme()``,
- but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
- we can't do anything about this Debian bug, and this detection allows us to
- skip the warning when needed.
- """
- if not _looks_like_debian_scheme():
- return False
- if value == "/usr/lib/python3/dist-packages":
- return True
- return False
-
-
-def get_purelib() -> str:
- """Return the default pure-Python lib location."""
- new = _sysconfig.get_purelib()
- if _USE_SYSCONFIG:
- return new
-
- old = _distutils.get_purelib()
- if _looks_like_deb_system_dist_packages(old):
- return old
- if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
- _log_context()
- return old
-
-
-def get_platlib() -> str:
- """Return the default platform-shared lib location."""
- new = _sysconfig.get_platlib()
- if _USE_SYSCONFIG:
- return new
-
- from . import _distutils
-
- old = _distutils.get_platlib()
- if _looks_like_deb_system_dist_packages(old):
- return old
- if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
- _log_context()
- return old
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index ebf551f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc
deleted file mode 100644
index 6d5fb10..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc
deleted file mode 100644
index ef53c99..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index f80c04f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/_distutils.py b/venv/lib/python3.11/site-packages/pip/_internal/locations/_distutils.py
deleted file mode 100644
index 0e18c6e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/_distutils.py
+++ /dev/null
@@ -1,172 +0,0 @@
-"""Locations where we look for configs, install stuff, etc"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-# If pip's going to use distutils, it should not be using the copy that setuptools
-# might have injected into the environment. This is done by removing the injected
-# shim, if it's injected.
-#
-# See https://github.com/pypa/pip/issues/8761 for the original discussion and
-# rationale for why this is done within pip.
-try:
- __import__("_distutils_hack").remove_shim()
-except (ImportError, AttributeError):
- pass
-
-import logging
-import os
-import sys
-from distutils.cmd import Command as DistutilsCommand
-from distutils.command.install import SCHEME_KEYS
-from distutils.command.install import install as distutils_install_command
-from distutils.sysconfig import get_python_lib
-from typing import Dict, List, Optional, Union, cast
-
-from pip._internal.models.scheme import Scheme
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-from .base import get_major_minor_version
-
-logger = logging.getLogger(__name__)
-
-
-def distutils_scheme(
- dist_name: str,
- user: bool = False,
- home: Optional[str] = None,
- root: Optional[str] = None,
- isolated: bool = False,
- prefix: Optional[str] = None,
- *,
- ignore_config_files: bool = False,
-) -> Dict[str, str]:
- """
- Return a distutils install scheme
- """
- from distutils.dist import Distribution
-
- dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
- if isolated:
- dist_args["script_args"] = ["--no-user-cfg"]
-
- d = Distribution(dist_args)
- if not ignore_config_files:
- try:
- d.parse_config_files()
- except UnicodeDecodeError:
- paths = d.find_config_files()
- logger.warning(
- "Ignore distutils configs in %s due to encoding errors.",
- ", ".join(os.path.basename(p) for p in paths),
- )
- obj: Optional[DistutilsCommand] = None
- obj = d.get_command_obj("install", create=True)
- assert obj is not None
- i = cast(distutils_install_command, obj)
- # NOTE: setting user or home has the side-effect of creating the home dir
- # or user base for installations during finalize_options()
- # ideally, we'd prefer a scheme class that has no side-effects.
- assert not (user and prefix), f"user={user} prefix={prefix}"
- assert not (home and prefix), f"home={home} prefix={prefix}"
- i.user = user or i.user
- if user or home:
- i.prefix = ""
- i.prefix = prefix or i.prefix
- i.home = home or i.home
- i.root = root or i.root
- i.finalize_options()
-
- scheme = {}
- for key in SCHEME_KEYS:
- scheme[key] = getattr(i, "install_" + key)
-
- # install_lib specified in setup.cfg should install *everything*
- # into there (i.e. it takes precedence over both purelib and
- # platlib). Note, i.install_lib is *always* set after
- # finalize_options(); we only want to override here if the user
- # has explicitly requested it hence going back to the config
- if "install_lib" in d.get_option_dict("install"):
- scheme.update({"purelib": i.install_lib, "platlib": i.install_lib})
-
- if running_under_virtualenv():
- if home:
- prefix = home
- elif user:
- prefix = i.install_userbase
- else:
- prefix = i.prefix
- scheme["headers"] = os.path.join(
- prefix,
- "include",
- "site",
- f"python{get_major_minor_version()}",
- dist_name,
- )
-
- if root is not None:
- path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
- scheme["headers"] = os.path.join(root, path_no_drive[1:])
-
- return scheme
-
-
-def get_scheme(
- dist_name: str,
- user: bool = False,
- home: Optional[str] = None,
- root: Optional[str] = None,
- isolated: bool = False,
- prefix: Optional[str] = None,
-) -> Scheme:
- """
- Get the "scheme" corresponding to the input parameters. The distutils
- documentation provides the context for the available schemes:
- https://docs.python.org/3/install/index.html#alternate-installation
-
- :param dist_name: the name of the package to retrieve the scheme for, used
- in the headers scheme path
- :param user: indicates to use the "user" scheme
- :param home: indicates to use the "home" scheme and provides the base
- directory for the same
- :param root: root under which other directories are re-based
- :param isolated: equivalent to --no-user-cfg, i.e. do not consider
- ~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
- scheme paths
- :param prefix: indicates to use the "prefix" scheme and provides the
- base directory for the same
- """
- scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
- return Scheme(
- platlib=scheme["platlib"],
- purelib=scheme["purelib"],
- headers=scheme["headers"],
- scripts=scheme["scripts"],
- data=scheme["data"],
- )
-
-
-def get_bin_prefix() -> str:
- # XXX: In old virtualenv versions, sys.prefix can contain '..' components,
- # so we need to call normpath to eliminate them.
- prefix = os.path.normpath(sys.prefix)
- if WINDOWS:
- bin_py = os.path.join(prefix, "Scripts")
- # buildout uses 'bin' on Windows too?
- if not os.path.exists(bin_py):
- bin_py = os.path.join(prefix, "bin")
- return bin_py
- # Forcing to use /usr/local/bin for standard macOS framework installs
- # Also log to ~/Library/Logs/ for use with the Console.app log viewer
- if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
- return "/usr/local/bin"
- return os.path.join(prefix, "bin")
-
-
-def get_purelib() -> str:
- return get_python_lib(plat_specific=False)
-
-
-def get_platlib() -> str:
- return get_python_lib(plat_specific=True)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/_sysconfig.py b/venv/lib/python3.11/site-packages/pip/_internal/locations/_sysconfig.py
deleted file mode 100644
index 97aef1f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/_sysconfig.py
+++ /dev/null
@@ -1,213 +0,0 @@
-import logging
-import os
-import sys
-import sysconfig
-import typing
-
-from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
-from pip._internal.models.scheme import SCHEME_KEYS, Scheme
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-from .base import change_root, get_major_minor_version, is_osx_framework
-
-logger = logging.getLogger(__name__)
-
-
-# Notes on _infer_* functions.
-# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
-# way to ask things like "what is the '_prefix' scheme on this platform". These
-# functions try to answer that with some heuristics while accounting for ad-hoc
-# platforms not covered by CPython's default sysconfig implementation. If the
-# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
-# a POSIX scheme.
-
-_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
-
-_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
-
-
-def _should_use_osx_framework_prefix() -> bool:
- """Check for Apple's ``osx_framework_library`` scheme.
-
- Python distributed by Apple's Command Line Tools has this special scheme
- that's used when:
-
- * This is a framework build.
- * We are installing into the system prefix.
-
- This does not account for ``pip install --prefix`` (also means we're not
- installing to the system prefix), which should use ``posix_prefix``, but
- logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
- since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
- which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
- wouldn't be able to magically switch between ``osx_framework_library`` and
- ``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
- means its behavior is consistent whether we use the stdlib implementation
- or our own, and we deal with this special case in ``get_scheme()`` instead.
- """
- return (
- "osx_framework_library" in _AVAILABLE_SCHEMES
- and not running_under_virtualenv()
- and is_osx_framework()
- )
-
-
-def _infer_prefix() -> str:
- """Try to find a prefix scheme for the current platform.
-
- This tries:
-
- * A special ``osx_framework_library`` for Python distributed by Apple's
- Command Line Tools, when not running in a virtual environment.
- * Implementation + OS, used by PyPy on Windows (``pypy_nt``).
- * Implementation without OS, used by PyPy on POSIX (``pypy``).
- * OS + "prefix", used by CPython on POSIX (``posix_prefix``).
- * Just the OS name, used by CPython on Windows (``nt``).
-
- If none of the above works, fall back to ``posix_prefix``.
- """
- if _PREFERRED_SCHEME_API:
- return _PREFERRED_SCHEME_API("prefix")
- if _should_use_osx_framework_prefix():
- return "osx_framework_library"
- implementation_suffixed = f"{sys.implementation.name}_{os.name}"
- if implementation_suffixed in _AVAILABLE_SCHEMES:
- return implementation_suffixed
- if sys.implementation.name in _AVAILABLE_SCHEMES:
- return sys.implementation.name
- suffixed = f"{os.name}_prefix"
- if suffixed in _AVAILABLE_SCHEMES:
- return suffixed
- if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
- return os.name
- return "posix_prefix"
-
-
-def _infer_user() -> str:
- """Try to find a user scheme for the current platform."""
- if _PREFERRED_SCHEME_API:
- return _PREFERRED_SCHEME_API("user")
- if is_osx_framework() and not running_under_virtualenv():
- suffixed = "osx_framework_user"
- else:
- suffixed = f"{os.name}_user"
- if suffixed in _AVAILABLE_SCHEMES:
- return suffixed
- if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
- raise UserInstallationInvalid()
- return "posix_user"
-
-
-def _infer_home() -> str:
- """Try to find a home for the current platform."""
- if _PREFERRED_SCHEME_API:
- return _PREFERRED_SCHEME_API("home")
- suffixed = f"{os.name}_home"
- if suffixed in _AVAILABLE_SCHEMES:
- return suffixed
- return "posix_home"
-
-
-# Update these keys if the user sets a custom home.
-_HOME_KEYS = [
- "installed_base",
- "base",
- "installed_platbase",
- "platbase",
- "prefix",
- "exec_prefix",
-]
-if sysconfig.get_config_var("userbase") is not None:
- _HOME_KEYS.append("userbase")
-
-
-def get_scheme(
- dist_name: str,
- user: bool = False,
- home: typing.Optional[str] = None,
- root: typing.Optional[str] = None,
- isolated: bool = False,
- prefix: typing.Optional[str] = None,
-) -> Scheme:
- """
- Get the "scheme" corresponding to the input parameters.
-
- :param dist_name: the name of the package to retrieve the scheme for, used
- in the headers scheme path
- :param user: indicates to use the "user" scheme
- :param home: indicates to use the "home" scheme
- :param root: root under which other directories are re-based
- :param isolated: ignored, but kept for distutils compatibility (where
- this controls whether the user-site pydistutils.cfg is honored)
- :param prefix: indicates to use the "prefix" scheme and provides the
- base directory for the same
- """
- if user and prefix:
- raise InvalidSchemeCombination("--user", "--prefix")
- if home and prefix:
- raise InvalidSchemeCombination("--home", "--prefix")
-
- if home is not None:
- scheme_name = _infer_home()
- elif user:
- scheme_name = _infer_user()
- else:
- scheme_name = _infer_prefix()
-
- # Special case: When installing into a custom prefix, use posix_prefix
- # instead of osx_framework_library. See _should_use_osx_framework_prefix()
- # docstring for details.
- if prefix is not None and scheme_name == "osx_framework_library":
- scheme_name = "posix_prefix"
-
- if home is not None:
- variables = {k: home for k in _HOME_KEYS}
- elif prefix is not None:
- variables = {k: prefix for k in _HOME_KEYS}
- else:
- variables = {}
-
- paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
-
- # Logic here is very arbitrary, we're doing it for compatibility, don't ask.
- # 1. Pip historically uses a special header path in virtual environments.
- # 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
- # only do the same when not running in a virtual environment because
- # pip's historical header path logic (see point 1) did not do this.
- if running_under_virtualenv():
- if user:
- base = variables.get("userbase", sys.prefix)
- else:
- base = variables.get("base", sys.prefix)
- python_xy = f"python{get_major_minor_version()}"
- paths["include"] = os.path.join(base, "include", "site", python_xy)
- elif not dist_name:
- dist_name = "UNKNOWN"
-
- scheme = Scheme(
- platlib=paths["platlib"],
- purelib=paths["purelib"],
- headers=os.path.join(paths["include"], dist_name),
- scripts=paths["scripts"],
- data=paths["data"],
- )
- if root is not None:
- for key in SCHEME_KEYS:
- value = change_root(root, getattr(scheme, key))
- setattr(scheme, key, value)
- return scheme
-
-
-def get_bin_prefix() -> str:
- # Forcing to use /usr/local/bin for standard macOS framework installs.
- if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
- return "/usr/local/bin"
- return sysconfig.get_paths()["scripts"]
-
-
-def get_purelib() -> str:
- return sysconfig.get_paths()["purelib"]
-
-
-def get_platlib() -> str:
- return sysconfig.get_paths()["platlib"]
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/locations/base.py b/venv/lib/python3.11/site-packages/pip/_internal/locations/base.py
deleted file mode 100644
index 3f9f896..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/locations/base.py
+++ /dev/null
@@ -1,81 +0,0 @@
-import functools
-import os
-import site
-import sys
-import sysconfig
-import typing
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils import appdirs
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-# Application Directories
-USER_CACHE_DIR = appdirs.user_cache_dir("pip")
-
-# FIXME doesn't account for venv linked to global site-packages
-site_packages: str = sysconfig.get_path("purelib")
-
-
-def get_major_minor_version() -> str:
- """
- Return the major-minor version of the current Python as a string, e.g.
- "3.7" or "3.10".
- """
- return "{}.{}".format(*sys.version_info)
-
-
-def change_root(new_root: str, pathname: str) -> str:
- """Return 'pathname' with 'new_root' prepended.
-
- If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
- Otherwise, it requires making 'pathname' relative and then joining the
- two, which is tricky on DOS/Windows and Mac OS.
-
- This is borrowed from Python's standard library's distutils module.
- """
- if os.name == "posix":
- if not os.path.isabs(pathname):
- return os.path.join(new_root, pathname)
- else:
- return os.path.join(new_root, pathname[1:])
-
- elif os.name == "nt":
- (drive, path) = os.path.splitdrive(pathname)
- if path[0] == "\\":
- path = path[1:]
- return os.path.join(new_root, path)
-
- else:
- raise InstallationError(
- f"Unknown platform: {os.name}\n"
- "Can not change root path prefix on unknown platform."
- )
-
-
-def get_src_prefix() -> str:
- if running_under_virtualenv():
- src_prefix = os.path.join(sys.prefix, "src")
- else:
- # FIXME: keep src in cwd for now (it is not a temporary folder)
- try:
- src_prefix = os.path.join(os.getcwd(), "src")
- except OSError:
- # In case the current working directory has been renamed or deleted
- sys.exit("The folder you are executing pip from can no longer be found.")
-
- # under macOS + virtualenv sys.prefix is not properly resolved
- # it is something like /path/to/python/bin/..
- return os.path.abspath(src_prefix)
-
-
-try:
- # Use getusersitepackages if this is present, as it ensures that the
- # value is initialised properly.
- user_site: typing.Optional[str] = site.getusersitepackages()
-except AttributeError:
- user_site = site.USER_SITE
-
-
-@functools.lru_cache(maxsize=None)
-def is_osx_framework() -> bool:
- return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/main.py b/venv/lib/python3.11/site-packages/pip/_internal/main.py
deleted file mode 100644
index 33c6d24..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/main.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from typing import List, Optional
-
-
-def main(args: Optional[List[str]] = None) -> int:
- """This is preserved for old console scripts that may still be referencing
- it.
-
- For additional details, see https://github.com/pypa/pip/issues/7498.
- """
- from pip._internal.utils.entrypoints import _wrapper
-
- return _wrapper(args)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/__init__.py
deleted file mode 100644
index aa232b6..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__init__.py
+++ /dev/null
@@ -1,128 +0,0 @@
-import contextlib
-import functools
-import os
-import sys
-from typing import TYPE_CHECKING, List, Optional, Type, cast
-
-from pip._internal.utils.misc import strtobool
-
-from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
-
-if TYPE_CHECKING:
- from typing import Literal, Protocol
-else:
- Protocol = object
-
-__all__ = [
- "BaseDistribution",
- "BaseEnvironment",
- "FilesystemWheel",
- "MemoryWheel",
- "Wheel",
- "get_default_environment",
- "get_environment",
- "get_wheel_distribution",
- "select_backend",
-]
-
-
-def _should_use_importlib_metadata() -> bool:
- """Whether to use the ``importlib.metadata`` or ``pkg_resources`` backend.
-
- By default, pip uses ``importlib.metadata`` on Python 3.11+, and
- ``pkg_resourcess`` otherwise. This can be overridden by a couple of ways:
-
- * If environment variable ``_PIP_USE_IMPORTLIB_METADATA`` is set, it
- dictates whether ``importlib.metadata`` is used, regardless of Python
- version.
- * On Python 3.11+, Python distributors can patch ``importlib.metadata``
- to add a global constant ``_PIP_USE_IMPORTLIB_METADATA = False``. This
- makes pip use ``pkg_resources`` (unless the user set the aforementioned
- environment variable to *True*).
- """
- with contextlib.suppress(KeyError, ValueError):
- return bool(strtobool(os.environ["_PIP_USE_IMPORTLIB_METADATA"]))
- if sys.version_info < (3, 11):
- return False
- import importlib.metadata
-
- return bool(getattr(importlib.metadata, "_PIP_USE_IMPORTLIB_METADATA", True))
-
-
-class Backend(Protocol):
- NAME: 'Literal["importlib", "pkg_resources"]'
- Distribution: Type[BaseDistribution]
- Environment: Type[BaseEnvironment]
-
-
-@functools.lru_cache(maxsize=None)
-def select_backend() -> Backend:
- if _should_use_importlib_metadata():
- from . import importlib
-
- return cast(Backend, importlib)
- from . import pkg_resources
-
- return cast(Backend, pkg_resources)
-
-
-def get_default_environment() -> BaseEnvironment:
- """Get the default representation for the current environment.
-
- This returns an Environment instance from the chosen backend. The default
- Environment instance should be built from ``sys.path`` and may use caching
- to share instance state accorss calls.
- """
- return select_backend().Environment.default()
-
-
-def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
- """Get a representation of the environment specified by ``paths``.
-
- This returns an Environment instance from the chosen backend based on the
- given import paths. The backend must build a fresh instance representing
- the state of installed distributions when this function is called.
- """
- return select_backend().Environment.from_paths(paths)
-
-
-def get_directory_distribution(directory: str) -> BaseDistribution:
- """Get the distribution metadata representation in the specified directory.
-
- This returns a Distribution instance from the chosen backend based on
- the given on-disk ``.dist-info`` directory.
- """
- return select_backend().Distribution.from_directory(directory)
-
-
-def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
- """Get the representation of the specified wheel's distribution metadata.
-
- This returns a Distribution instance from the chosen backend based on
- the given wheel's ``.dist-info`` directory.
-
- :param canonical_name: Normalized project name of the given wheel.
- """
- return select_backend().Distribution.from_wheel(wheel, canonical_name)
-
-
-def get_metadata_distribution(
- metadata_contents: bytes,
- filename: str,
- canonical_name: str,
-) -> BaseDistribution:
- """Get the dist representation of the specified METADATA file contents.
-
- This returns a Distribution instance from the chosen backend sourced from the data
- in `metadata_contents`.
-
- :param metadata_contents: Contents of a METADATA file within a dist, or one served
- via PEP 658.
- :param filename: Filename for the dist this metadata represents.
- :param canonical_name: Normalized project name of the given dist.
- """
- return select_backend().Distribution.from_metadata_file_contents(
- metadata_contents,
- filename,
- canonical_name,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 71df88c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc
deleted file mode 100644
index 5da3c66..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/_json.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 771ed1d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc
deleted file mode 100644
index 131a75e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/_json.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/_json.py
deleted file mode 100644
index 27362fc..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/_json.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# Extracted from https://github.com/pfmoore/pkg_metadata
-
-from email.header import Header, decode_header, make_header
-from email.message import Message
-from typing import Any, Dict, List, Union
-
-METADATA_FIELDS = [
- # Name, Multiple-Use
- ("Metadata-Version", False),
- ("Name", False),
- ("Version", False),
- ("Dynamic", True),
- ("Platform", True),
- ("Supported-Platform", True),
- ("Summary", False),
- ("Description", False),
- ("Description-Content-Type", False),
- ("Keywords", False),
- ("Home-page", False),
- ("Download-URL", False),
- ("Author", False),
- ("Author-email", False),
- ("Maintainer", False),
- ("Maintainer-email", False),
- ("License", False),
- ("Classifier", True),
- ("Requires-Dist", True),
- ("Requires-Python", False),
- ("Requires-External", True),
- ("Project-URL", True),
- ("Provides-Extra", True),
- ("Provides-Dist", True),
- ("Obsoletes-Dist", True),
-]
-
-
-def json_name(field: str) -> str:
- return field.lower().replace("-", "_")
-
-
-def msg_to_json(msg: Message) -> Dict[str, Any]:
- """Convert a Message object into a JSON-compatible dictionary."""
-
- def sanitise_header(h: Union[Header, str]) -> str:
- if isinstance(h, Header):
- chunks = []
- for bytes, encoding in decode_header(h):
- if encoding == "unknown-8bit":
- try:
- # See if UTF-8 works
- bytes.decode("utf-8")
- encoding = "utf-8"
- except UnicodeDecodeError:
- # If not, latin1 at least won't fail
- encoding = "latin1"
- chunks.append((bytes, encoding))
- return str(make_header(chunks))
- return str(h)
-
- result = {}
- for field, multi in METADATA_FIELDS:
- if field not in msg:
- continue
- key = json_name(field)
- if multi:
- value: Union[str, List[str]] = [
- sanitise_header(v) for v in msg.get_all(field) # type: ignore
- ]
- else:
- value = sanitise_header(msg.get(field)) # type: ignore
- if key == "keywords":
- # Accept both comma-separated and space-separated
- # forms, for better compatibility with old data.
- if "," in value:
- value = [v.strip() for v in value.split(",")]
- else:
- value = value.split()
- result[key] = value
-
- payload = msg.get_payload()
- if payload:
- result["description"] = payload
-
- return result
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/base.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/base.py
deleted file mode 100644
index 9249124..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/base.py
+++ /dev/null
@@ -1,702 +0,0 @@
-import csv
-import email.message
-import functools
-import json
-import logging
-import pathlib
-import re
-import zipfile
-from typing import (
- IO,
- TYPE_CHECKING,
- Any,
- Collection,
- Container,
- Dict,
- Iterable,
- Iterator,
- List,
- NamedTuple,
- Optional,
- Tuple,
- Union,
-)
-
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.packaging.version import LegacyVersion, Version
-
-from pip._internal.exceptions import NoneMetadataError
-from pip._internal.locations import site_packages, user_site
-from pip._internal.models.direct_url import (
- DIRECT_URL_METADATA_NAME,
- DirectUrl,
- DirectUrlValidationError,
-)
-from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
-from pip._internal.utils.egg_link import egg_link_path_from_sys_path
-from pip._internal.utils.misc import is_local, normalize_path
-from pip._internal.utils.urls import url_to_path
-
-from ._json import msg_to_json
-
-if TYPE_CHECKING:
- from typing import Protocol
-else:
- Protocol = object
-
-DistributionVersion = Union[LegacyVersion, Version]
-
-InfoPath = Union[str, pathlib.PurePath]
-
-logger = logging.getLogger(__name__)
-
-
-class BaseEntryPoint(Protocol):
- @property
- def name(self) -> str:
- raise NotImplementedError()
-
- @property
- def value(self) -> str:
- raise NotImplementedError()
-
- @property
- def group(self) -> str:
- raise NotImplementedError()
-
-
-def _convert_installed_files_path(
- entry: Tuple[str, ...],
- info: Tuple[str, ...],
-) -> str:
- """Convert a legacy installed-files.txt path into modern RECORD path.
-
- The legacy format stores paths relative to the info directory, while the
- modern format stores paths relative to the package root, e.g. the
- site-packages directory.
-
- :param entry: Path parts of the installed-files.txt entry.
- :param info: Path parts of the egg-info directory relative to package root.
- :returns: The converted entry.
-
- For best compatibility with symlinks, this does not use ``abspath()`` or
- ``Path.resolve()``, but tries to work with path parts:
-
- 1. While ``entry`` starts with ``..``, remove the equal amounts of parts
- from ``info``; if ``info`` is empty, start appending ``..`` instead.
- 2. Join the two directly.
- """
- while entry and entry[0] == "..":
- if not info or info[-1] == "..":
- info += ("..",)
- else:
- info = info[:-1]
- entry = entry[1:]
- return str(pathlib.Path(*info, *entry))
-
-
-class RequiresEntry(NamedTuple):
- requirement: str
- extra: str
- marker: str
-
-
-class BaseDistribution(Protocol):
- @classmethod
- def from_directory(cls, directory: str) -> "BaseDistribution":
- """Load the distribution from a metadata directory.
-
- :param directory: Path to a metadata directory, e.g. ``.dist-info``.
- """
- raise NotImplementedError()
-
- @classmethod
- def from_metadata_file_contents(
- cls,
- metadata_contents: bytes,
- filename: str,
- project_name: str,
- ) -> "BaseDistribution":
- """Load the distribution from the contents of a METADATA file.
-
- This is used to implement PEP 658 by generating a "shallow" dist object that can
- be used for resolution without downloading or building the actual dist yet.
-
- :param metadata_contents: The contents of a METADATA file.
- :param filename: File name for the dist with this metadata.
- :param project_name: Name of the project this dist represents.
- """
- raise NotImplementedError()
-
- @classmethod
- def from_wheel(cls, wheel: "Wheel", name: str) -> "BaseDistribution":
- """Load the distribution from a given wheel.
-
- :param wheel: A concrete wheel definition.
- :param name: File name of the wheel.
-
- :raises InvalidWheel: Whenever loading of the wheel causes a
- :py:exc:`zipfile.BadZipFile` exception to be thrown.
- :raises UnsupportedWheel: If the wheel is a valid zip, but malformed
- internally.
- """
- raise NotImplementedError()
-
- def __repr__(self) -> str:
- return f"{self.raw_name} {self.version} ({self.location})"
-
- def __str__(self) -> str:
- return f"{self.raw_name} {self.version}"
-
- @property
- def location(self) -> Optional[str]:
- """Where the distribution is loaded from.
-
- A string value is not necessarily a filesystem path, since distributions
- can be loaded from other sources, e.g. arbitrary zip archives. ``None``
- means the distribution is created in-memory.
-
- Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
- this is a symbolic link, we want to preserve the relative path between
- it and files in the distribution.
- """
- raise NotImplementedError()
-
- @property
- def editable_project_location(self) -> Optional[str]:
- """The project location for editable distributions.
-
- This is the directory where pyproject.toml or setup.py is located.
- None if the distribution is not installed in editable mode.
- """
- # TODO: this property is relatively costly to compute, memoize it ?
- direct_url = self.direct_url
- if direct_url:
- if direct_url.is_local_editable():
- return url_to_path(direct_url.url)
- else:
- # Search for an .egg-link file by walking sys.path, as it was
- # done before by dist_is_editable().
- egg_link_path = egg_link_path_from_sys_path(self.raw_name)
- if egg_link_path:
- # TODO: get project location from second line of egg_link file
- # (https://github.com/pypa/pip/issues/10243)
- return self.location
- return None
-
- @property
- def installed_location(self) -> Optional[str]:
- """The distribution's "installed" location.
-
- This should generally be a ``site-packages`` directory. This is
- usually ``dist.location``, except for legacy develop-installed packages,
- where ``dist.location`` is the source code location, and this is where
- the ``.egg-link`` file is.
-
- The returned location is normalized (in particular, with symlinks removed).
- """
- raise NotImplementedError()
-
- @property
- def info_location(self) -> Optional[str]:
- """Location of the .[egg|dist]-info directory or file.
-
- Similarly to ``location``, a string value is not necessarily a
- filesystem path. ``None`` means the distribution is created in-memory.
-
- For a modern .dist-info installation on disk, this should be something
- like ``{location}/{raw_name}-{version}.dist-info``.
-
- Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
- this is a symbolic link, we want to preserve the relative path between
- it and other files in the distribution.
- """
- raise NotImplementedError()
-
- @property
- def installed_by_distutils(self) -> bool:
- """Whether this distribution is installed with legacy distutils format.
-
- A distribution installed with "raw" distutils not patched by setuptools
- uses one single file at ``info_location`` to store metadata. We need to
- treat this specially on uninstallation.
- """
- info_location = self.info_location
- if not info_location:
- return False
- return pathlib.Path(info_location).is_file()
-
- @property
- def installed_as_egg(self) -> bool:
- """Whether this distribution is installed as an egg.
-
- This usually indicates the distribution was installed by (older versions
- of) easy_install.
- """
- location = self.location
- if not location:
- return False
- return location.endswith(".egg")
-
- @property
- def installed_with_setuptools_egg_info(self) -> bool:
- """Whether this distribution is installed with the ``.egg-info`` format.
-
- This usually indicates the distribution was installed with setuptools
- with an old pip version or with ``single-version-externally-managed``.
-
- Note that this ensure the metadata store is a directory. distutils can
- also installs an ``.egg-info``, but as a file, not a directory. This
- property is *False* for that case. Also see ``installed_by_distutils``.
- """
- info_location = self.info_location
- if not info_location:
- return False
- if not info_location.endswith(".egg-info"):
- return False
- return pathlib.Path(info_location).is_dir()
-
- @property
- def installed_with_dist_info(self) -> bool:
- """Whether this distribution is installed with the "modern format".
-
- This indicates a "modern" installation, e.g. storing metadata in the
- ``.dist-info`` directory. This applies to installations made by
- setuptools (but through pip, not directly), or anything using the
- standardized build backend interface (PEP 517).
- """
- info_location = self.info_location
- if not info_location:
- return False
- if not info_location.endswith(".dist-info"):
- return False
- return pathlib.Path(info_location).is_dir()
-
- @property
- def canonical_name(self) -> NormalizedName:
- raise NotImplementedError()
-
- @property
- def version(self) -> DistributionVersion:
- raise NotImplementedError()
-
- @property
- def setuptools_filename(self) -> str:
- """Convert a project name to its setuptools-compatible filename.
-
- This is a copy of ``pkg_resources.to_filename()`` for compatibility.
- """
- return self.raw_name.replace("-", "_")
-
- @property
- def direct_url(self) -> Optional[DirectUrl]:
- """Obtain a DirectUrl from this distribution.
-
- Returns None if the distribution has no `direct_url.json` metadata,
- or if `direct_url.json` is invalid.
- """
- try:
- content = self.read_text(DIRECT_URL_METADATA_NAME)
- except FileNotFoundError:
- return None
- try:
- return DirectUrl.from_json(content)
- except (
- UnicodeDecodeError,
- json.JSONDecodeError,
- DirectUrlValidationError,
- ) as e:
- logger.warning(
- "Error parsing %s for %s: %s",
- DIRECT_URL_METADATA_NAME,
- self.canonical_name,
- e,
- )
- return None
-
- @property
- def installer(self) -> str:
- try:
- installer_text = self.read_text("INSTALLER")
- except (OSError, ValueError, NoneMetadataError):
- return "" # Fail silently if the installer file cannot be read.
- for line in installer_text.splitlines():
- cleaned_line = line.strip()
- if cleaned_line:
- return cleaned_line
- return ""
-
- @property
- def requested(self) -> bool:
- return self.is_file("REQUESTED")
-
- @property
- def editable(self) -> bool:
- return bool(self.editable_project_location)
-
- @property
- def local(self) -> bool:
- """If distribution is installed in the current virtual environment.
-
- Always True if we're not in a virtualenv.
- """
- if self.installed_location is None:
- return False
- return is_local(self.installed_location)
-
- @property
- def in_usersite(self) -> bool:
- if self.installed_location is None or user_site is None:
- return False
- return self.installed_location.startswith(normalize_path(user_site))
-
- @property
- def in_site_packages(self) -> bool:
- if self.installed_location is None or site_packages is None:
- return False
- return self.installed_location.startswith(normalize_path(site_packages))
-
- def is_file(self, path: InfoPath) -> bool:
- """Check whether an entry in the info directory is a file."""
- raise NotImplementedError()
-
- def iter_distutils_script_names(self) -> Iterator[str]:
- """Find distutils 'scripts' entries metadata.
-
- If 'scripts' is supplied in ``setup.py``, distutils records those in the
- installed distribution's ``scripts`` directory, a file for each script.
- """
- raise NotImplementedError()
-
- def read_text(self, path: InfoPath) -> str:
- """Read a file in the info directory.
-
- :raise FileNotFoundError: If ``path`` does not exist in the directory.
- :raise NoneMetadataError: If ``path`` exists in the info directory, but
- cannot be read.
- """
- raise NotImplementedError()
-
- def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
- raise NotImplementedError()
-
- def _metadata_impl(self) -> email.message.Message:
- raise NotImplementedError()
-
- @functools.lru_cache(maxsize=1)
- def _metadata_cached(self) -> email.message.Message:
- # When we drop python 3.7 support, move this to the metadata property and use
- # functools.cached_property instead of lru_cache.
- metadata = self._metadata_impl()
- self._add_egg_info_requires(metadata)
- return metadata
-
- @property
- def metadata(self) -> email.message.Message:
- """Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
-
- This should return an empty message if the metadata file is unavailable.
-
- :raises NoneMetadataError: If the metadata file is available, but does
- not contain valid metadata.
- """
- return self._metadata_cached()
-
- @property
- def metadata_dict(self) -> Dict[str, Any]:
- """PEP 566 compliant JSON-serializable representation of METADATA or PKG-INFO.
-
- This should return an empty dict if the metadata file is unavailable.
-
- :raises NoneMetadataError: If the metadata file is available, but does
- not contain valid metadata.
- """
- return msg_to_json(self.metadata)
-
- @property
- def metadata_version(self) -> Optional[str]:
- """Value of "Metadata-Version:" in distribution metadata, if available."""
- return self.metadata.get("Metadata-Version")
-
- @property
- def raw_name(self) -> str:
- """Value of "Name:" in distribution metadata."""
- # The metadata should NEVER be missing the Name: key, but if it somehow
- # does, fall back to the known canonical name.
- return self.metadata.get("Name", self.canonical_name)
-
- @property
- def requires_python(self) -> SpecifierSet:
- """Value of "Requires-Python:" in distribution metadata.
-
- If the key does not exist or contains an invalid value, an empty
- SpecifierSet should be returned.
- """
- value = self.metadata.get("Requires-Python")
- if value is None:
- return SpecifierSet()
- try:
- # Convert to str to satisfy the type checker; this can be a Header object.
- spec = SpecifierSet(str(value))
- except InvalidSpecifier as e:
- message = "Package %r has an invalid Requires-Python: %s"
- logger.warning(message, self.raw_name, e)
- return SpecifierSet()
- return spec
-
- def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
- """Dependencies of this distribution.
-
- For modern .dist-info distributions, this is the collection of
- "Requires-Dist:" entries in distribution metadata.
- """
- raise NotImplementedError()
-
- def iter_provided_extras(self) -> Iterable[str]:
- """Extras provided by this distribution.
-
- For modern .dist-info distributions, this is the collection of
- "Provides-Extra:" entries in distribution metadata.
-
- The return value of this function is not particularly useful other than
- display purposes due to backward compatibility issues and the extra
- names being poorly normalized prior to PEP 685. If you want to perform
- logic operations on extras, use :func:`is_extra_provided` instead.
- """
- raise NotImplementedError()
-
- def is_extra_provided(self, extra: str) -> bool:
- """Check whether an extra is provided by this distribution.
-
- This is needed mostly for compatibility issues with pkg_resources not
- following the extra normalization rules defined in PEP 685.
- """
- raise NotImplementedError()
-
- def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
- try:
- text = self.read_text("RECORD")
- except FileNotFoundError:
- return None
- # This extra Path-str cast normalizes entries.
- return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
-
- def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
- try:
- text = self.read_text("installed-files.txt")
- except FileNotFoundError:
- return None
- paths = (p for p in text.splitlines(keepends=False) if p)
- root = self.location
- info = self.info_location
- if root is None or info is None:
- return paths
- try:
- info_rel = pathlib.Path(info).relative_to(root)
- except ValueError: # info is not relative to root.
- return paths
- if not info_rel.parts: # info *is* root.
- return paths
- return (
- _convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
- for p in paths
- )
-
- def iter_declared_entries(self) -> Optional[Iterator[str]]:
- """Iterate through file entries declared in this distribution.
-
- For modern .dist-info distributions, this is the files listed in the
- ``RECORD`` metadata file. For legacy setuptools distributions, this
- comes from ``installed-files.txt``, with entries normalized to be
- compatible with the format used by ``RECORD``.
-
- :return: An iterator for listed entries, or None if the distribution
- contains neither ``RECORD`` nor ``installed-files.txt``.
- """
- return (
- self._iter_declared_entries_from_record()
- or self._iter_declared_entries_from_legacy()
- )
-
- def _iter_requires_txt_entries(self) -> Iterator[RequiresEntry]:
- """Parse a ``requires.txt`` in an egg-info directory.
-
- This is an INI-ish format where an egg-info stores dependencies. A
- section name describes extra other environment markers, while each entry
- is an arbitrary string (not a key-value pair) representing a dependency
- as a requirement string (no markers).
-
- There is a construct in ``importlib.metadata`` called ``Sectioned`` that
- does mostly the same, but the format is currently considered private.
- """
- try:
- content = self.read_text("requires.txt")
- except FileNotFoundError:
- return
- extra = marker = "" # Section-less entries don't have markers.
- for line in content.splitlines():
- line = line.strip()
- if not line or line.startswith("#"): # Comment; ignored.
- continue
- if line.startswith("[") and line.endswith("]"): # A section header.
- extra, _, marker = line.strip("[]").partition(":")
- continue
- yield RequiresEntry(requirement=line, extra=extra, marker=marker)
-
- def _iter_egg_info_extras(self) -> Iterable[str]:
- """Get extras from the egg-info directory."""
- known_extras = {""}
- for entry in self._iter_requires_txt_entries():
- extra = canonicalize_name(entry.extra)
- if extra in known_extras:
- continue
- known_extras.add(extra)
- yield extra
-
- def _iter_egg_info_dependencies(self) -> Iterable[str]:
- """Get distribution dependencies from the egg-info directory.
-
- To ease parsing, this converts a legacy dependency entry into a PEP 508
- requirement string. Like ``_iter_requires_txt_entries()``, there is code
- in ``importlib.metadata`` that does mostly the same, but not do exactly
- what we need.
-
- Namely, ``importlib.metadata`` does not normalize the extra name before
- putting it into the requirement string, which causes marker comparison
- to fail because the dist-info format do normalize. This is consistent in
- all currently available PEP 517 backends, although not standardized.
- """
- for entry in self._iter_requires_txt_entries():
- extra = canonicalize_name(entry.extra)
- if extra and entry.marker:
- marker = f'({entry.marker}) and extra == "{extra}"'
- elif extra:
- marker = f'extra == "{extra}"'
- elif entry.marker:
- marker = entry.marker
- else:
- marker = ""
- if marker:
- yield f"{entry.requirement} ; {marker}"
- else:
- yield entry.requirement
-
- def _add_egg_info_requires(self, metadata: email.message.Message) -> None:
- """Add egg-info requires.txt information to the metadata."""
- if not metadata.get_all("Requires-Dist"):
- for dep in self._iter_egg_info_dependencies():
- metadata["Requires-Dist"] = dep
- if not metadata.get_all("Provides-Extra"):
- for extra in self._iter_egg_info_extras():
- metadata["Provides-Extra"] = extra
-
-
-class BaseEnvironment:
- """An environment containing distributions to introspect."""
-
- @classmethod
- def default(cls) -> "BaseEnvironment":
- raise NotImplementedError()
-
- @classmethod
- def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
- raise NotImplementedError()
-
- def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
- """Given a requirement name, return the installed distributions.
-
- The name may not be normalized. The implementation must canonicalize
- it for lookup.
- """
- raise NotImplementedError()
-
- def _iter_distributions(self) -> Iterator["BaseDistribution"]:
- """Iterate through installed distributions.
-
- This function should be implemented by subclass, but never called
- directly. Use the public ``iter_distribution()`` instead, which
- implements additional logic to make sure the distributions are valid.
- """
- raise NotImplementedError()
-
- def iter_all_distributions(self) -> Iterator[BaseDistribution]:
- """Iterate through all installed distributions without any filtering."""
- for dist in self._iter_distributions():
- # Make sure the distribution actually comes from a valid Python
- # packaging distribution. Pip's AdjacentTempDirectory leaves folders
- # e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
- # valid project name pattern is taken from PEP 508.
- project_name_valid = re.match(
- r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
- dist.canonical_name,
- flags=re.IGNORECASE,
- )
- if not project_name_valid:
- logger.warning(
- "Ignoring invalid distribution %s (%s)",
- dist.canonical_name,
- dist.location,
- )
- continue
- yield dist
-
- def iter_installed_distributions(
- self,
- local_only: bool = True,
- skip: Container[str] = stdlib_pkgs,
- include_editables: bool = True,
- editables_only: bool = False,
- user_only: bool = False,
- ) -> Iterator[BaseDistribution]:
- """Return a list of installed distributions.
-
- This is based on ``iter_all_distributions()`` with additional filtering
- options. Note that ``iter_installed_distributions()`` without arguments
- is *not* equal to ``iter_all_distributions()``, since some of the
- configurations exclude packages by default.
-
- :param local_only: If True (default), only return installations
- local to the current virtualenv, if in a virtualenv.
- :param skip: An iterable of canonicalized project names to ignore;
- defaults to ``stdlib_pkgs``.
- :param include_editables: If False, don't report editables.
- :param editables_only: If True, only report editables.
- :param user_only: If True, only report installations in the user
- site directory.
- """
- it = self.iter_all_distributions()
- if local_only:
- it = (d for d in it if d.local)
- if not include_editables:
- it = (d for d in it if not d.editable)
- if editables_only:
- it = (d for d in it if d.editable)
- if user_only:
- it = (d for d in it if d.in_usersite)
- return (d for d in it if d.canonical_name not in skip)
-
-
-class Wheel(Protocol):
- location: str
-
- def as_zipfile(self) -> zipfile.ZipFile:
- raise NotImplementedError()
-
-
-class FilesystemWheel(Wheel):
- def __init__(self, location: str) -> None:
- self.location = location
-
- def as_zipfile(self) -> zipfile.ZipFile:
- return zipfile.ZipFile(self.location, allowZip64=True)
-
-
-class MemoryWheel(Wheel):
- def __init__(self, location: str, stream: IO[bytes]) -> None:
- self.location = location
- self.stream = stream
-
- def as_zipfile(self) -> zipfile.ZipFile:
- return zipfile.ZipFile(self.stream, allowZip64=True)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py
deleted file mode 100644
index a779138..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from ._dists import Distribution
-from ._envs import Environment
-
-__all__ = ["NAME", "Distribution", "Environment"]
-
-NAME = "importlib"
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index a901050..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc
deleted file mode 100644
index e7efa21..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc
deleted file mode 100644
index 61bb231..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_dists.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc
deleted file mode 100644
index bfc48a8..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py
deleted file mode 100644
index 593bff2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_compat.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import importlib.metadata
-from typing import Any, Optional, Protocol, cast
-
-
-class BadMetadata(ValueError):
- def __init__(self, dist: importlib.metadata.Distribution, *, reason: str) -> None:
- self.dist = dist
- self.reason = reason
-
- def __str__(self) -> str:
- return f"Bad metadata in {self.dist} ({self.reason})"
-
-
-class BasePath(Protocol):
- """A protocol that various path objects conform.
-
- This exists because importlib.metadata uses both ``pathlib.Path`` and
- ``zipfile.Path``, and we need a common base for type hints (Union does not
- work well since ``zipfile.Path`` is too new for our linter setup).
-
- This does not mean to be exhaustive, but only contains things that present
- in both classes *that we need*.
- """
-
- @property
- def name(self) -> str:
- raise NotImplementedError()
-
- @property
- def parent(self) -> "BasePath":
- raise NotImplementedError()
-
-
-def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]:
- """Find the path to the distribution's metadata directory.
-
- HACK: This relies on importlib.metadata's private ``_path`` attribute. Not
- all distributions exist on disk, so importlib.metadata is correct to not
- expose the attribute as public. But pip's code base is old and not as clean,
- so we do this to avoid having to rewrite too many things. Hopefully we can
- eliminate this some day.
- """
- return getattr(d, "_path", None)
-
-
-def get_dist_name(dist: importlib.metadata.Distribution) -> str:
- """Get the distribution's project name.
-
- The ``name`` attribute is only available in Python 3.10 or later. We are
- targeting exactly that, but Mypy does not know this.
- """
- name = cast(Any, dist).name
- if not isinstance(name, str):
- raise BadMetadata(dist, reason="invalid metadata entry 'name'")
- return name
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_dists.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_dists.py
deleted file mode 100644
index 26370fa..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_dists.py
+++ /dev/null
@@ -1,227 +0,0 @@
-import email.message
-import importlib.metadata
-import os
-import pathlib
-import zipfile
-from typing import (
- Collection,
- Dict,
- Iterable,
- Iterator,
- Mapping,
- Optional,
- Sequence,
- cast,
-)
-
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.exceptions import InvalidWheel, UnsupportedWheel
-from pip._internal.metadata.base import (
- BaseDistribution,
- BaseEntryPoint,
- DistributionVersion,
- InfoPath,
- Wheel,
-)
-from pip._internal.utils.misc import normalize_path
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
-
-from ._compat import BasePath, get_dist_name
-
-
-class WheelDistribution(importlib.metadata.Distribution):
- """An ``importlib.metadata.Distribution`` read from a wheel.
-
- Although ``importlib.metadata.PathDistribution`` accepts ``zipfile.Path``,
- its implementation is too "lazy" for pip's needs (we can't keep the ZipFile
- handle open for the entire lifetime of the distribution object).
-
- This implementation eagerly reads the entire metadata directory into the
- memory instead, and operates from that.
- """
-
- def __init__(
- self,
- files: Mapping[pathlib.PurePosixPath, bytes],
- info_location: pathlib.PurePosixPath,
- ) -> None:
- self._files = files
- self.info_location = info_location
-
- @classmethod
- def from_zipfile(
- cls,
- zf: zipfile.ZipFile,
- name: str,
- location: str,
- ) -> "WheelDistribution":
- info_dir, _ = parse_wheel(zf, name)
- paths = (
- (name, pathlib.PurePosixPath(name.split("/", 1)[-1]))
- for name in zf.namelist()
- if name.startswith(f"{info_dir}/")
- )
- files = {
- relpath: read_wheel_metadata_file(zf, fullpath)
- for fullpath, relpath in paths
- }
- info_location = pathlib.PurePosixPath(location, info_dir)
- return cls(files, info_location)
-
- def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:
- # Only allow iterating through the metadata directory.
- if pathlib.PurePosixPath(str(path)) in self._files:
- return iter(self._files)
- raise FileNotFoundError(path)
-
- def read_text(self, filename: str) -> Optional[str]:
- try:
- data = self._files[pathlib.PurePosixPath(filename)]
- except KeyError:
- return None
- try:
- text = data.decode("utf-8")
- except UnicodeDecodeError as e:
- wheel = self.info_location.parent
- error = f"Error decoding metadata for {wheel}: {e} in {filename} file"
- raise UnsupportedWheel(error)
- return text
-
-
-class Distribution(BaseDistribution):
- def __init__(
- self,
- dist: importlib.metadata.Distribution,
- info_location: Optional[BasePath],
- installed_location: Optional[BasePath],
- ) -> None:
- self._dist = dist
- self._info_location = info_location
- self._installed_location = installed_location
-
- @classmethod
- def from_directory(cls, directory: str) -> BaseDistribution:
- info_location = pathlib.Path(directory)
- dist = importlib.metadata.Distribution.at(info_location)
- return cls(dist, info_location, info_location.parent)
-
- @classmethod
- def from_metadata_file_contents(
- cls,
- metadata_contents: bytes,
- filename: str,
- project_name: str,
- ) -> BaseDistribution:
- # Generate temp dir to contain the metadata file, and write the file contents.
- temp_dir = pathlib.Path(
- TempDirectory(kind="metadata", globally_managed=True).path
- )
- metadata_path = temp_dir / "METADATA"
- metadata_path.write_bytes(metadata_contents)
- # Construct dist pointing to the newly created directory.
- dist = importlib.metadata.Distribution.at(metadata_path.parent)
- return cls(dist, metadata_path.parent, None)
-
- @classmethod
- def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
- try:
- with wheel.as_zipfile() as zf:
- dist = WheelDistribution.from_zipfile(zf, name, wheel.location)
- except zipfile.BadZipFile as e:
- raise InvalidWheel(wheel.location, name) from e
- except UnsupportedWheel as e:
- raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
- return cls(dist, dist.info_location, pathlib.PurePosixPath(wheel.location))
-
- @property
- def location(self) -> Optional[str]:
- if self._info_location is None:
- return None
- return str(self._info_location.parent)
-
- @property
- def info_location(self) -> Optional[str]:
- if self._info_location is None:
- return None
- return str(self._info_location)
-
- @property
- def installed_location(self) -> Optional[str]:
- if self._installed_location is None:
- return None
- return normalize_path(str(self._installed_location))
-
- def _get_dist_name_from_location(self) -> Optional[str]:
- """Try to get the name from the metadata directory name.
-
- This is much faster than reading metadata.
- """
- if self._info_location is None:
- return None
- stem, suffix = os.path.splitext(self._info_location.name)
- if suffix not in (".dist-info", ".egg-info"):
- return None
- return stem.split("-", 1)[0]
-
- @property
- def canonical_name(self) -> NormalizedName:
- name = self._get_dist_name_from_location() or get_dist_name(self._dist)
- return canonicalize_name(name)
-
- @property
- def version(self) -> DistributionVersion:
- return parse_version(self._dist.version)
-
- def is_file(self, path: InfoPath) -> bool:
- return self._dist.read_text(str(path)) is not None
-
- def iter_distutils_script_names(self) -> Iterator[str]:
- # A distutils installation is always "flat" (not in e.g. egg form), so
- # if this distribution's info location is NOT a pathlib.Path (but e.g.
- # zipfile.Path), it can never contain any distutils scripts.
- if not isinstance(self._info_location, pathlib.Path):
- return
- for child in self._info_location.joinpath("scripts").iterdir():
- yield child.name
-
- def read_text(self, path: InfoPath) -> str:
- content = self._dist.read_text(str(path))
- if content is None:
- raise FileNotFoundError(path)
- return content
-
- def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
- # importlib.metadata's EntryPoint structure sasitfies BaseEntryPoint.
- return self._dist.entry_points
-
- def _metadata_impl(self) -> email.message.Message:
- # From Python 3.10+, importlib.metadata declares PackageMetadata as the
- # return type. This protocol is unfortunately a disaster now and misses
- # a ton of fields that we need, including get() and get_payload(). We
- # rely on the implementation that the object is actually a Message now,
- # until upstream can improve the protocol. (python/cpython#94952)
- return cast(email.message.Message, self._dist.metadata)
-
- def iter_provided_extras(self) -> Iterable[str]:
- return self.metadata.get_all("Provides-Extra", [])
-
- def is_extra_provided(self, extra: str) -> bool:
- return any(
- canonicalize_name(provided_extra) == canonicalize_name(extra)
- for provided_extra in self.metadata.get_all("Provides-Extra", [])
- )
-
- def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
- contexts: Sequence[Dict[str, str]] = [{"extra": e} for e in extras]
- for req_string in self.metadata.get_all("Requires-Dist", []):
- req = Requirement(req_string)
- if not req.marker:
- yield req
- elif not extras and req.marker.evaluate({"extra": ""}):
- yield req
- elif any(req.marker.evaluate(context) for context in contexts):
- yield req
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py
deleted file mode 100644
index 048dc55..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/_envs.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import functools
-import importlib.metadata
-import logging
-import os
-import pathlib
-import sys
-import zipfile
-import zipimport
-from typing import Iterator, List, Optional, Sequence, Set, Tuple
-
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-
-from pip._internal.metadata.base import BaseDistribution, BaseEnvironment
-from pip._internal.models.wheel import Wheel
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.filetypes import WHEEL_EXTENSION
-
-from ._compat import BadMetadata, BasePath, get_dist_name, get_info_location
-from ._dists import Distribution
-
-logger = logging.getLogger(__name__)
-
-
-def _looks_like_wheel(location: str) -> bool:
- if not location.endswith(WHEEL_EXTENSION):
- return False
- if not os.path.isfile(location):
- return False
- if not Wheel.wheel_file_re.match(os.path.basename(location)):
- return False
- return zipfile.is_zipfile(location)
-
-
-class _DistributionFinder:
- """Finder to locate distributions.
-
- The main purpose of this class is to memoize found distributions' names, so
- only one distribution is returned for each package name. At lot of pip code
- assumes this (because it is setuptools's behavior), and not doing the same
- can potentially cause a distribution in lower precedence path to override a
- higher precedence one if the caller is not careful.
-
- Eventually we probably want to make it possible to see lower precedence
- installations as well. It's useful feature, after all.
- """
-
- FoundResult = Tuple[importlib.metadata.Distribution, Optional[BasePath]]
-
- def __init__(self) -> None:
- self._found_names: Set[NormalizedName] = set()
-
- def _find_impl(self, location: str) -> Iterator[FoundResult]:
- """Find distributions in a location."""
- # Skip looking inside a wheel. Since a package inside a wheel is not
- # always valid (due to .data directories etc.), its .dist-info entry
- # should not be considered an installed distribution.
- if _looks_like_wheel(location):
- return
- # To know exactly where we find a distribution, we have to feed in the
- # paths one by one, instead of dumping the list to importlib.metadata.
- for dist in importlib.metadata.distributions(path=[location]):
- info_location = get_info_location(dist)
- try:
- raw_name = get_dist_name(dist)
- except BadMetadata as e:
- logger.warning("Skipping %s due to %s", info_location, e.reason)
- continue
- normalized_name = canonicalize_name(raw_name)
- if normalized_name in self._found_names:
- continue
- self._found_names.add(normalized_name)
- yield dist, info_location
-
- def find(self, location: str) -> Iterator[BaseDistribution]:
- """Find distributions in a location.
-
- The path can be either a directory, or a ZIP archive.
- """
- for dist, info_location in self._find_impl(location):
- if info_location is None:
- installed_location: Optional[BasePath] = None
- else:
- installed_location = info_location.parent
- yield Distribution(dist, info_location, installed_location)
-
- def find_linked(self, location: str) -> Iterator[BaseDistribution]:
- """Read location in egg-link files and return distributions in there.
-
- The path should be a directory; otherwise this returns nothing. This
- follows how setuptools does this for compatibility. The first non-empty
- line in the egg-link is read as a path (resolved against the egg-link's
- containing directory if relative). Distributions found at that linked
- location are returned.
- """
- path = pathlib.Path(location)
- if not path.is_dir():
- return
- for child in path.iterdir():
- if child.suffix != ".egg-link":
- continue
- with child.open() as f:
- lines = (line.strip() for line in f)
- target_rel = next((line for line in lines if line), "")
- if not target_rel:
- continue
- target_location = str(path.joinpath(target_rel))
- for dist, info_location in self._find_impl(target_location):
- yield Distribution(dist, info_location, path)
-
- def _find_eggs_in_dir(self, location: str) -> Iterator[BaseDistribution]:
- from pip._vendor.pkg_resources import find_distributions
-
- from pip._internal.metadata import pkg_resources as legacy
-
- with os.scandir(location) as it:
- for entry in it:
- if not entry.name.endswith(".egg"):
- continue
- for dist in find_distributions(entry.path):
- yield legacy.Distribution(dist)
-
- def _find_eggs_in_zip(self, location: str) -> Iterator[BaseDistribution]:
- from pip._vendor.pkg_resources import find_eggs_in_zip
-
- from pip._internal.metadata import pkg_resources as legacy
-
- try:
- importer = zipimport.zipimporter(location)
- except zipimport.ZipImportError:
- return
- for dist in find_eggs_in_zip(importer, location):
- yield legacy.Distribution(dist)
-
- def find_eggs(self, location: str) -> Iterator[BaseDistribution]:
- """Find eggs in a location.
-
- This actually uses the old *pkg_resources* backend. We likely want to
- deprecate this so we can eventually remove the *pkg_resources*
- dependency entirely. Before that, this should first emit a deprecation
- warning for some versions when using the fallback since importing
- *pkg_resources* is slow for those who don't need it.
- """
- if os.path.isdir(location):
- yield from self._find_eggs_in_dir(location)
- if zipfile.is_zipfile(location):
- yield from self._find_eggs_in_zip(location)
-
-
-@functools.lru_cache(maxsize=None) # Warn a distribution exactly once.
-def _emit_egg_deprecation(location: Optional[str]) -> None:
- deprecated(
- reason=f"Loading egg at {location} is deprecated.",
- replacement="to use pip for package installation.",
- gone_in="24.3",
- issue=12330,
- )
-
-
-class Environment(BaseEnvironment):
- def __init__(self, paths: Sequence[str]) -> None:
- self._paths = paths
-
- @classmethod
- def default(cls) -> BaseEnvironment:
- return cls(sys.path)
-
- @classmethod
- def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
- if paths is None:
- return cls(sys.path)
- return cls(paths)
-
- def _iter_distributions(self) -> Iterator[BaseDistribution]:
- finder = _DistributionFinder()
- for location in self._paths:
- yield from finder.find(location)
- for dist in finder.find_eggs(location):
- _emit_egg_deprecation(dist.location)
- yield dist
- # This must go last because that's how pkg_resources tie-breaks.
- yield from finder.find_linked(location)
-
- def get_distribution(self, name: str) -> Optional[BaseDistribution]:
- matches = (
- distribution
- for distribution in self.iter_all_distributions()
- if distribution.canonical_name == canonicalize_name(name)
- )
- return next(matches, None)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/metadata/pkg_resources.py b/venv/lib/python3.11/site-packages/pip/_internal/metadata/pkg_resources.py
deleted file mode 100644
index bb11e5b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/metadata/pkg_resources.py
+++ /dev/null
@@ -1,278 +0,0 @@
-import email.message
-import email.parser
-import logging
-import os
-import zipfile
-from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
-
-from pip._vendor import pkg_resources
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
-from pip._internal.utils.egg_link import egg_link_path_from_location
-from pip._internal.utils.misc import display_path, normalize_path
-from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
-
-from .base import (
- BaseDistribution,
- BaseEntryPoint,
- BaseEnvironment,
- DistributionVersion,
- InfoPath,
- Wheel,
-)
-
-__all__ = ["NAME", "Distribution", "Environment"]
-
-logger = logging.getLogger(__name__)
-
-NAME = "pkg_resources"
-
-
-class EntryPoint(NamedTuple):
- name: str
- value: str
- group: str
-
-
-class InMemoryMetadata:
- """IMetadataProvider that reads metadata files from a dictionary.
-
- This also maps metadata decoding exceptions to our internal exception type.
- """
-
- def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
- self._metadata = metadata
- self._wheel_name = wheel_name
-
- def has_metadata(self, name: str) -> bool:
- return name in self._metadata
-
- def get_metadata(self, name: str) -> str:
- try:
- return self._metadata[name].decode()
- except UnicodeDecodeError as e:
- # Augment the default error with the origin of the file.
- raise UnsupportedWheel(
- f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
- )
-
- def get_metadata_lines(self, name: str) -> Iterable[str]:
- return pkg_resources.yield_lines(self.get_metadata(name))
-
- def metadata_isdir(self, name: str) -> bool:
- return False
-
- def metadata_listdir(self, name: str) -> List[str]:
- return []
-
- def run_script(self, script_name: str, namespace: str) -> None:
- pass
-
-
-class Distribution(BaseDistribution):
- def __init__(self, dist: pkg_resources.Distribution) -> None:
- self._dist = dist
-
- @classmethod
- def from_directory(cls, directory: str) -> BaseDistribution:
- dist_dir = directory.rstrip(os.sep)
-
- # Build a PathMetadata object, from path to metadata. :wink:
- base_dir, dist_dir_name = os.path.split(dist_dir)
- metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
-
- # Determine the correct Distribution object type.
- if dist_dir.endswith(".egg-info"):
- dist_cls = pkg_resources.Distribution
- dist_name = os.path.splitext(dist_dir_name)[0]
- else:
- assert dist_dir.endswith(".dist-info")
- dist_cls = pkg_resources.DistInfoDistribution
- dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
-
- dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
- return cls(dist)
-
- @classmethod
- def from_metadata_file_contents(
- cls,
- metadata_contents: bytes,
- filename: str,
- project_name: str,
- ) -> BaseDistribution:
- metadata_dict = {
- "METADATA": metadata_contents,
- }
- dist = pkg_resources.DistInfoDistribution(
- location=filename,
- metadata=InMemoryMetadata(metadata_dict, filename),
- project_name=project_name,
- )
- return cls(dist)
-
- @classmethod
- def from_wheel(cls, wheel: Wheel, name: str) -> BaseDistribution:
- try:
- with wheel.as_zipfile() as zf:
- info_dir, _ = parse_wheel(zf, name)
- metadata_dict = {
- path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
- for path in zf.namelist()
- if path.startswith(f"{info_dir}/")
- }
- except zipfile.BadZipFile as e:
- raise InvalidWheel(wheel.location, name) from e
- except UnsupportedWheel as e:
- raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
- dist = pkg_resources.DistInfoDistribution(
- location=wheel.location,
- metadata=InMemoryMetadata(metadata_dict, wheel.location),
- project_name=name,
- )
- return cls(dist)
-
- @property
- def location(self) -> Optional[str]:
- return self._dist.location
-
- @property
- def installed_location(self) -> Optional[str]:
- egg_link = egg_link_path_from_location(self.raw_name)
- if egg_link:
- location = egg_link
- elif self.location:
- location = self.location
- else:
- return None
- return normalize_path(location)
-
- @property
- def info_location(self) -> Optional[str]:
- return self._dist.egg_info
-
- @property
- def installed_by_distutils(self) -> bool:
- # A distutils-installed distribution is provided by FileMetadata. This
- # provider has a "path" attribute not present anywhere else. Not the
- # best introspection logic, but pip has been doing this for a long time.
- try:
- return bool(self._dist._provider.path)
- except AttributeError:
- return False
-
- @property
- def canonical_name(self) -> NormalizedName:
- return canonicalize_name(self._dist.project_name)
-
- @property
- def version(self) -> DistributionVersion:
- return parse_version(self._dist.version)
-
- def is_file(self, path: InfoPath) -> bool:
- return self._dist.has_metadata(str(path))
-
- def iter_distutils_script_names(self) -> Iterator[str]:
- yield from self._dist.metadata_listdir("scripts")
-
- def read_text(self, path: InfoPath) -> str:
- name = str(path)
- if not self._dist.has_metadata(name):
- raise FileNotFoundError(name)
- content = self._dist.get_metadata(name)
- if content is None:
- raise NoneMetadataError(self, name)
- return content
-
- def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
- for group, entries in self._dist.get_entry_map().items():
- for name, entry_point in entries.items():
- name, _, value = str(entry_point).partition("=")
- yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
-
- def _metadata_impl(self) -> email.message.Message:
- """
- :raises NoneMetadataError: if the distribution reports `has_metadata()`
- True but `get_metadata()` returns None.
- """
- if isinstance(self._dist, pkg_resources.DistInfoDistribution):
- metadata_name = "METADATA"
- else:
- metadata_name = "PKG-INFO"
- try:
- metadata = self.read_text(metadata_name)
- except FileNotFoundError:
- if self.location:
- displaying_path = display_path(self.location)
- else:
- displaying_path = repr(self.location)
- logger.warning("No metadata found in %s", displaying_path)
- metadata = ""
- feed_parser = email.parser.FeedParser()
- feed_parser.feed(metadata)
- return feed_parser.close()
-
- def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
- if extras: # pkg_resources raises on invalid extras, so we sanitize.
- extras = frozenset(pkg_resources.safe_extra(e) for e in extras)
- extras = extras.intersection(self._dist.extras)
- return self._dist.requires(extras)
-
- def iter_provided_extras(self) -> Iterable[str]:
- return self._dist.extras
-
- def is_extra_provided(self, extra: str) -> bool:
- return pkg_resources.safe_extra(extra) in self._dist.extras
-
-
-class Environment(BaseEnvironment):
- def __init__(self, ws: pkg_resources.WorkingSet) -> None:
- self._ws = ws
-
- @classmethod
- def default(cls) -> BaseEnvironment:
- return cls(pkg_resources.working_set)
-
- @classmethod
- def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
- return cls(pkg_resources.WorkingSet(paths))
-
- def _iter_distributions(self) -> Iterator[BaseDistribution]:
- for dist in self._ws:
- yield Distribution(dist)
-
- def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
- """Find a distribution matching the ``name`` in the environment.
-
- This searches from *all* distributions available in the environment, to
- match the behavior of ``pkg_resources.get_distribution()``.
- """
- canonical_name = canonicalize_name(name)
- for dist in self.iter_all_distributions():
- if dist.canonical_name == canonical_name:
- return dist
- return None
-
- def get_distribution(self, name: str) -> Optional[BaseDistribution]:
- # Search the distribution by looking through the working set.
- dist = self._search_distribution(name)
- if dist:
- return dist
-
- # If distribution could not be found, call working_set.require to
- # update the working set, and try to find the distribution again.
- # This might happen for e.g. when you install a package twice, once
- # using setup.py develop and again using setup.py install. Now when
- # running pip uninstall twice, the package gets removed from the
- # working set in the first uninstall, so we have to populate the
- # working set again so that pip knows about it and the packages gets
- # picked up and is successfully uninstalled the second time too.
- try:
- # We didn't pass in any version specifiers, so this can never
- # raise pkg_resources.VersionConflict.
- self._ws.require(name)
- except pkg_resources.DistributionNotFound:
- return None
- return self._search_distribution(name)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/models/__init__.py
deleted file mode 100644
index 7855226..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""A package that contains models that represent entities.
-"""
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 16cd14b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc
deleted file mode 100644
index d8d1894..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/candidate.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc
deleted file mode 100644
index a7c8860..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc
deleted file mode 100644
index f742757..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/format_control.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc
deleted file mode 100644
index 89da93e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/index.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc
deleted file mode 100644
index 8c7f2b5..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/installation_report.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc
deleted file mode 100644
index af58b32..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/link.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc
deleted file mode 100644
index 1aec65a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/scheme.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc
deleted file mode 100644
index 69fbc27..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc
deleted file mode 100644
index 1cdd16f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc
deleted file mode 100644
index f12ec4d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/target_python.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc
deleted file mode 100644
index 7010c85..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/__pycache__/wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/candidate.py b/venv/lib/python3.11/site-packages/pip/_internal/models/candidate.py
deleted file mode 100644
index 9184a90..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/candidate.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from pip._vendor.packaging.version import parse as parse_version
-
-from pip._internal.models.link import Link
-from pip._internal.utils.models import KeyBasedCompareMixin
-
-
-class InstallationCandidate(KeyBasedCompareMixin):
- """Represents a potential "candidate" for installation."""
-
- __slots__ = ["name", "version", "link"]
-
- def __init__(self, name: str, version: str, link: Link) -> None:
- self.name = name
- self.version = parse_version(version)
- self.link = link
-
- super().__init__(
- key=(self.name, self.version, self.link),
- defining_class=InstallationCandidate,
- )
-
- def __repr__(self) -> str:
- return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
- self.name,
- self.version,
- self.link,
- )
-
- def __str__(self) -> str:
- return f"{self.name!r} candidate (version {self.version} at {self.link})"
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/direct_url.py b/venv/lib/python3.11/site-packages/pip/_internal/models/direct_url.py
deleted file mode 100644
index 0af884b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/direct_url.py
+++ /dev/null
@@ -1,235 +0,0 @@
-""" PEP 610 """
-import json
-import re
-import urllib.parse
-from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
-
-__all__ = [
- "DirectUrl",
- "DirectUrlValidationError",
- "DirInfo",
- "ArchiveInfo",
- "VcsInfo",
-]
-
-T = TypeVar("T")
-
-DIRECT_URL_METADATA_NAME = "direct_url.json"
-ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
-
-
-class DirectUrlValidationError(Exception):
- pass
-
-
-def _get(
- d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
-) -> Optional[T]:
- """Get value from dictionary and verify expected type."""
- if key not in d:
- return default
- value = d[key]
- if not isinstance(value, expected_type):
- raise DirectUrlValidationError(
- f"{value!r} has unexpected type for {key} (expected {expected_type})"
- )
- return value
-
-
-def _get_required(
- d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
-) -> T:
- value = _get(d, expected_type, key, default)
- if value is None:
- raise DirectUrlValidationError(f"{key} must have a value")
- return value
-
-
-def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
- infos = [info for info in infos if info is not None]
- if not infos:
- raise DirectUrlValidationError(
- "missing one of archive_info, dir_info, vcs_info"
- )
- if len(infos) > 1:
- raise DirectUrlValidationError(
- "more than one of archive_info, dir_info, vcs_info"
- )
- assert infos[0] is not None
- return infos[0]
-
-
-def _filter_none(**kwargs: Any) -> Dict[str, Any]:
- """Make dict excluding None values."""
- return {k: v for k, v in kwargs.items() if v is not None}
-
-
-class VcsInfo:
- name = "vcs_info"
-
- def __init__(
- self,
- vcs: str,
- commit_id: str,
- requested_revision: Optional[str] = None,
- ) -> None:
- self.vcs = vcs
- self.requested_revision = requested_revision
- self.commit_id = commit_id
-
- @classmethod
- def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
- if d is None:
- return None
- return cls(
- vcs=_get_required(d, str, "vcs"),
- commit_id=_get_required(d, str, "commit_id"),
- requested_revision=_get(d, str, "requested_revision"),
- )
-
- def _to_dict(self) -> Dict[str, Any]:
- return _filter_none(
- vcs=self.vcs,
- requested_revision=self.requested_revision,
- commit_id=self.commit_id,
- )
-
-
-class ArchiveInfo:
- name = "archive_info"
-
- def __init__(
- self,
- hash: Optional[str] = None,
- hashes: Optional[Dict[str, str]] = None,
- ) -> None:
- # set hashes before hash, since the hash setter will further populate hashes
- self.hashes = hashes
- self.hash = hash
-
- @property
- def hash(self) -> Optional[str]:
- return self._hash
-
- @hash.setter
- def hash(self, value: Optional[str]) -> None:
- if value is not None:
- # Auto-populate the hashes key to upgrade to the new format automatically.
- # We don't back-populate the legacy hash key from hashes.
- try:
- hash_name, hash_value = value.split("=", 1)
- except ValueError:
- raise DirectUrlValidationError(
- f"invalid archive_info.hash format: {value!r}"
- )
- if self.hashes is None:
- self.hashes = {hash_name: hash_value}
- elif hash_name not in self.hashes:
- self.hashes = self.hashes.copy()
- self.hashes[hash_name] = hash_value
- self._hash = value
-
- @classmethod
- def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
- if d is None:
- return None
- return cls(hash=_get(d, str, "hash"), hashes=_get(d, dict, "hashes"))
-
- def _to_dict(self) -> Dict[str, Any]:
- return _filter_none(hash=self.hash, hashes=self.hashes)
-
-
-class DirInfo:
- name = "dir_info"
-
- def __init__(
- self,
- editable: bool = False,
- ) -> None:
- self.editable = editable
-
- @classmethod
- def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
- if d is None:
- return None
- return cls(editable=_get_required(d, bool, "editable", default=False))
-
- def _to_dict(self) -> Dict[str, Any]:
- return _filter_none(editable=self.editable or None)
-
-
-InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
-
-
-class DirectUrl:
- def __init__(
- self,
- url: str,
- info: InfoType,
- subdirectory: Optional[str] = None,
- ) -> None:
- self.url = url
- self.info = info
- self.subdirectory = subdirectory
-
- def _remove_auth_from_netloc(self, netloc: str) -> str:
- if "@" not in netloc:
- return netloc
- user_pass, netloc_no_user_pass = netloc.split("@", 1)
- if (
- isinstance(self.info, VcsInfo)
- and self.info.vcs == "git"
- and user_pass == "git"
- ):
- return netloc
- if ENV_VAR_RE.match(user_pass):
- return netloc
- return netloc_no_user_pass
-
- @property
- def redacted_url(self) -> str:
- """url with user:password part removed unless it is formed with
- environment variables as specified in PEP 610, or it is ``git``
- in the case of a git URL.
- """
- purl = urllib.parse.urlsplit(self.url)
- netloc = self._remove_auth_from_netloc(purl.netloc)
- surl = urllib.parse.urlunsplit(
- (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
- )
- return surl
-
- def validate(self) -> None:
- self.from_dict(self.to_dict())
-
- @classmethod
- def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
- return DirectUrl(
- url=_get_required(d, str, "url"),
- subdirectory=_get(d, str, "subdirectory"),
- info=_exactly_one_of(
- [
- ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
- DirInfo._from_dict(_get(d, dict, "dir_info")),
- VcsInfo._from_dict(_get(d, dict, "vcs_info")),
- ]
- ),
- )
-
- def to_dict(self) -> Dict[str, Any]:
- res = _filter_none(
- url=self.redacted_url,
- subdirectory=self.subdirectory,
- )
- res[self.info.name] = self.info._to_dict()
- return res
-
- @classmethod
- def from_json(cls, s: str) -> "DirectUrl":
- return cls.from_dict(json.loads(s))
-
- def to_json(self) -> str:
- return json.dumps(self.to_dict(), sort_keys=True)
-
- def is_local_editable(self) -> bool:
- return isinstance(self.info, DirInfo) and self.info.editable
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/format_control.py b/venv/lib/python3.11/site-packages/pip/_internal/models/format_control.py
deleted file mode 100644
index ccd1127..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/format_control.py
+++ /dev/null
@@ -1,78 +0,0 @@
-from typing import FrozenSet, Optional, Set
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.exceptions import CommandError
-
-
-class FormatControl:
- """Helper for managing formats from which a package can be installed."""
-
- __slots__ = ["no_binary", "only_binary"]
-
- def __init__(
- self,
- no_binary: Optional[Set[str]] = None,
- only_binary: Optional[Set[str]] = None,
- ) -> None:
- if no_binary is None:
- no_binary = set()
- if only_binary is None:
- only_binary = set()
-
- self.no_binary = no_binary
- self.only_binary = only_binary
-
- def __eq__(self, other: object) -> bool:
- if not isinstance(other, self.__class__):
- return NotImplemented
-
- if self.__slots__ != other.__slots__:
- return False
-
- return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self.no_binary}, {self.only_binary})"
-
- @staticmethod
- def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
- if value.startswith("-"):
- raise CommandError(
- "--no-binary / --only-binary option requires 1 argument."
- )
- new = value.split(",")
- while ":all:" in new:
- other.clear()
- target.clear()
- target.add(":all:")
- del new[: new.index(":all:") + 1]
- # Without a none, we want to discard everything as :all: covers it
- if ":none:" not in new:
- return
- for name in new:
- if name == ":none:":
- target.clear()
- continue
- name = canonicalize_name(name)
- other.discard(name)
- target.add(name)
-
- def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
- result = {"binary", "source"}
- if canonical_name in self.only_binary:
- result.discard("source")
- elif canonical_name in self.no_binary:
- result.discard("binary")
- elif ":all:" in self.only_binary:
- result.discard("source")
- elif ":all:" in self.no_binary:
- result.discard("binary")
- return frozenset(result)
-
- def disallow_binaries(self) -> None:
- self.handle_mutual_excludes(
- ":all:",
- self.no_binary,
- self.only_binary,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/index.py b/venv/lib/python3.11/site-packages/pip/_internal/models/index.py
deleted file mode 100644
index b94c325..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/index.py
+++ /dev/null
@@ -1,28 +0,0 @@
-import urllib.parse
-
-
-class PackageIndex:
- """Represents a Package Index and provides easier access to endpoints"""
-
- __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
-
- def __init__(self, url: str, file_storage_domain: str) -> None:
- super().__init__()
- self.url = url
- self.netloc = urllib.parse.urlsplit(url).netloc
- self.simple_url = self._url_for_path("simple")
- self.pypi_url = self._url_for_path("pypi")
-
- # This is part of a temporary hack used to block installs of PyPI
- # packages which depend on external urls only necessary until PyPI can
- # block such packages themselves
- self.file_storage_domain = file_storage_domain
-
- def _url_for_path(self, path: str) -> str:
- return urllib.parse.urljoin(self.url, path)
-
-
-PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
-TestPyPI = PackageIndex(
- "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
-)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/installation_report.py b/venv/lib/python3.11/site-packages/pip/_internal/models/installation_report.py
deleted file mode 100644
index b9c6330..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/installation_report.py
+++ /dev/null
@@ -1,56 +0,0 @@
-from typing import Any, Dict, Sequence
-
-from pip._vendor.packaging.markers import default_environment
-
-from pip import __version__
-from pip._internal.req.req_install import InstallRequirement
-
-
-class InstallationReport:
- def __init__(self, install_requirements: Sequence[InstallRequirement]):
- self._install_requirements = install_requirements
-
- @classmethod
- def _install_req_to_dict(cls, ireq: InstallRequirement) -> Dict[str, Any]:
- assert ireq.download_info, f"No download_info for {ireq}"
- res = {
- # PEP 610 json for the download URL. download_info.archive_info.hashes may
- # be absent when the requirement was installed from the wheel cache
- # and the cache entry was populated by an older pip version that did not
- # record origin.json.
- "download_info": ireq.download_info.to_dict(),
- # is_direct is true if the requirement was a direct URL reference (which
- # includes editable requirements), and false if the requirement was
- # downloaded from a PEP 503 index or --find-links.
- "is_direct": ireq.is_direct,
- # is_yanked is true if the requirement was yanked from the index, but
- # was still selected by pip to conform to PEP 592.
- "is_yanked": ireq.link.is_yanked if ireq.link else False,
- # requested is true if the requirement was specified by the user (aka
- # top level requirement), and false if it was installed as a dependency of a
- # requirement. https://peps.python.org/pep-0376/#requested
- "requested": ireq.user_supplied,
- # PEP 566 json encoding for metadata
- # https://www.python.org/dev/peps/pep-0566/#json-compatible-metadata
- "metadata": ireq.get_dist().metadata_dict,
- }
- if ireq.user_supplied and ireq.extras:
- # For top level requirements, the list of requested extras, if any.
- res["requested_extras"] = sorted(ireq.extras)
- return res
-
- def to_dict(self) -> Dict[str, Any]:
- return {
- "version": "1",
- "pip_version": __version__,
- "install": [
- self._install_req_to_dict(ireq) for ireq in self._install_requirements
- ],
- # https://peps.python.org/pep-0508/#environment-markers
- # TODO: currently, the resolver uses the default environment to evaluate
- # environment markers, so that is what we report here. In the future, it
- # should also take into account options such as --python-version or
- # --platform, perhaps under the form of an environment_override field?
- # https://github.com/pypa/pip/issues/11198
- "environment": default_environment(),
- }
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/link.py b/venv/lib/python3.11/site-packages/pip/_internal/models/link.py
deleted file mode 100644
index 73041b8..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/link.py
+++ /dev/null
@@ -1,579 +0,0 @@
-import functools
-import itertools
-import logging
-import os
-import posixpath
-import re
-import urllib.parse
-from dataclasses import dataclass
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- List,
- Mapping,
- NamedTuple,
- Optional,
- Tuple,
- Union,
-)
-
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.filetypes import WHEEL_EXTENSION
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.misc import (
- pairwise,
- redact_auth_from_url,
- split_auth_from_netloc,
- splitext,
-)
-from pip._internal.utils.models import KeyBasedCompareMixin
-from pip._internal.utils.urls import path_to_url, url_to_path
-
-if TYPE_CHECKING:
- from pip._internal.index.collector import IndexContent
-
-logger = logging.getLogger(__name__)
-
-
-# Order matters, earlier hashes have a precedence over later hashes for what
-# we will pick to use.
-_SUPPORTED_HASHES = ("sha512", "sha384", "sha256", "sha224", "sha1", "md5")
-
-
-@dataclass(frozen=True)
-class LinkHash:
- """Links to content may have embedded hash values. This class parses those.
-
- `name` must be any member of `_SUPPORTED_HASHES`.
-
- This class can be converted to and from `ArchiveInfo`. While ArchiveInfo intends to
- be JSON-serializable to conform to PEP 610, this class contains the logic for
- parsing a hash name and value for correctness, and then checking whether that hash
- conforms to a schema with `.is_hash_allowed()`."""
-
- name: str
- value: str
-
- _hash_url_fragment_re = re.compile(
- # NB: we do not validate that the second group (.*) is a valid hex
- # digest. Instead, we simply keep that string in this class, and then check it
- # against Hashes when hash-checking is needed. This is easier to debug than
- # proactively discarding an invalid hex digest, as we handle incorrect hashes
- # and malformed hashes in the same place.
- r"[#&]({choices})=([^&]*)".format(
- choices="|".join(re.escape(hash_name) for hash_name in _SUPPORTED_HASHES)
- ),
- )
-
- def __post_init__(self) -> None:
- assert self.name in _SUPPORTED_HASHES
-
- @classmethod
- @functools.lru_cache(maxsize=None)
- def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
- """Search a string for a checksum algorithm name and encoded output value."""
- match = cls._hash_url_fragment_re.search(url)
- if match is None:
- return None
- name, value = match.groups()
- return cls(name=name, value=value)
-
- def as_dict(self) -> Dict[str, str]:
- return {self.name: self.value}
-
- def as_hashes(self) -> Hashes:
- """Return a Hashes instance which checks only for the current hash."""
- return Hashes({self.name: [self.value]})
-
- def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
- """
- Return True if the current hash is allowed by `hashes`.
- """
- if hashes is None:
- return False
- return hashes.is_hash_allowed(self.name, hex_digest=self.value)
-
-
-@dataclass(frozen=True)
-class MetadataFile:
- """Information about a core metadata file associated with a distribution."""
-
- hashes: Optional[Dict[str, str]]
-
- def __post_init__(self) -> None:
- if self.hashes is not None:
- assert all(name in _SUPPORTED_HASHES for name in self.hashes)
-
-
-def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
- # Remove any unsupported hash types from the mapping. If this leaves no
- # supported hashes, return None
- if hashes is None:
- return None
- hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
- if not hashes:
- return None
- return hashes
-
-
-def _clean_url_path_part(part: str) -> str:
- """
- Clean a "part" of a URL path (i.e. after splitting on "@" characters).
- """
- # We unquote prior to quoting to make sure nothing is double quoted.
- return urllib.parse.quote(urllib.parse.unquote(part))
-
-
-def _clean_file_url_path(part: str) -> str:
- """
- Clean the first part of a URL path that corresponds to a local
- filesystem path (i.e. the first part after splitting on "@" characters).
- """
- # We unquote prior to quoting to make sure nothing is double quoted.
- # Also, on Windows the path part might contain a drive letter which
- # should not be quoted. On Linux where drive letters do not
- # exist, the colon should be quoted. We rely on urllib.request
- # to do the right thing here.
- return urllib.request.pathname2url(urllib.request.url2pathname(part))
-
-
-# percent-encoded: /
-_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
-
-
-def _clean_url_path(path: str, is_local_path: bool) -> str:
- """
- Clean the path portion of a URL.
- """
- if is_local_path:
- clean_func = _clean_file_url_path
- else:
- clean_func = _clean_url_path_part
-
- # Split on the reserved characters prior to cleaning so that
- # revision strings in VCS URLs are properly preserved.
- parts = _reserved_chars_re.split(path)
-
- cleaned_parts = []
- for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
- cleaned_parts.append(clean_func(to_clean))
- # Normalize %xx escapes (e.g. %2f -> %2F)
- cleaned_parts.append(reserved.upper())
-
- return "".join(cleaned_parts)
-
-
-def _ensure_quoted_url(url: str) -> str:
- """
- Make sure a link is fully quoted.
- For example, if ' ' occurs in the URL, it will be replaced with "%20",
- and without double-quoting other characters.
- """
- # Split the URL into parts according to the general structure
- # `scheme://netloc/path;parameters?query#fragment`.
- result = urllib.parse.urlparse(url)
- # If the netloc is empty, then the URL refers to a local filesystem path.
- is_local_path = not result.netloc
- path = _clean_url_path(result.path, is_local_path=is_local_path)
- return urllib.parse.urlunparse(result._replace(path=path))
-
-
-class Link(KeyBasedCompareMixin):
- """Represents a parsed link from a Package Index's simple URL"""
-
- __slots__ = [
- "_parsed_url",
- "_url",
- "_hashes",
- "comes_from",
- "requires_python",
- "yanked_reason",
- "metadata_file_data",
- "cache_link_parsing",
- "egg_fragment",
- ]
-
- def __init__(
- self,
- url: str,
- comes_from: Optional[Union[str, "IndexContent"]] = None,
- requires_python: Optional[str] = None,
- yanked_reason: Optional[str] = None,
- metadata_file_data: Optional[MetadataFile] = None,
- cache_link_parsing: bool = True,
- hashes: Optional[Mapping[str, str]] = None,
- ) -> None:
- """
- :param url: url of the resource pointed to (href of the link)
- :param comes_from: instance of IndexContent where the link was found,
- or string.
- :param requires_python: String containing the `Requires-Python`
- metadata field, specified in PEP 345. This may be specified by
- a data-requires-python attribute in the HTML link tag, as
- described in PEP 503.
- :param yanked_reason: the reason the file has been yanked, if the
- file has been yanked, or None if the file hasn't been yanked.
- This is the value of the "data-yanked" attribute, if present, in
- a simple repository HTML link. If the file has been yanked but
- no reason was provided, this should be the empty string. See
- PEP 592 for more information and the specification.
- :param metadata_file_data: the metadata attached to the file, or None if
- no such metadata is provided. This argument, if not None, indicates
- that a separate metadata file exists, and also optionally supplies
- hashes for that file.
- :param cache_link_parsing: A flag that is used elsewhere to determine
- whether resources retrieved from this link should be cached. PyPI
- URLs should generally have this set to False, for example.
- :param hashes: A mapping of hash names to digests to allow us to
- determine the validity of a download.
- """
-
- # The comes_from, requires_python, and metadata_file_data arguments are
- # only used by classmethods of this class, and are not used in client
- # code directly.
-
- # url can be a UNC windows share
- if url.startswith("\\\\"):
- url = path_to_url(url)
-
- self._parsed_url = urllib.parse.urlsplit(url)
- # Store the url as a private attribute to prevent accidentally
- # trying to set a new value.
- self._url = url
-
- link_hash = LinkHash.find_hash_url_fragment(url)
- hashes_from_link = {} if link_hash is None else link_hash.as_dict()
- if hashes is None:
- self._hashes = hashes_from_link
- else:
- self._hashes = {**hashes, **hashes_from_link}
-
- self.comes_from = comes_from
- self.requires_python = requires_python if requires_python else None
- self.yanked_reason = yanked_reason
- self.metadata_file_data = metadata_file_data
-
- super().__init__(key=url, defining_class=Link)
-
- self.cache_link_parsing = cache_link_parsing
- self.egg_fragment = self._egg_fragment()
-
- @classmethod
- def from_json(
- cls,
- file_data: Dict[str, Any],
- page_url: str,
- ) -> Optional["Link"]:
- """
- Convert an pypi json document from a simple repository page into a Link.
- """
- file_url = file_data.get("url")
- if file_url is None:
- return None
-
- url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
- pyrequire = file_data.get("requires-python")
- yanked_reason = file_data.get("yanked")
- hashes = file_data.get("hashes", {})
-
- # PEP 714: Indexes must use the name core-metadata, but
- # clients should support the old name as a fallback for compatibility.
- metadata_info = file_data.get("core-metadata")
- if metadata_info is None:
- metadata_info = file_data.get("dist-info-metadata")
-
- # The metadata info value may be a boolean, or a dict of hashes.
- if isinstance(metadata_info, dict):
- # The file exists, and hashes have been supplied
- metadata_file_data = MetadataFile(supported_hashes(metadata_info))
- elif metadata_info:
- # The file exists, but there are no hashes
- metadata_file_data = MetadataFile(None)
- else:
- # False or not present: the file does not exist
- metadata_file_data = None
-
- # The Link.yanked_reason expects an empty string instead of a boolean.
- if yanked_reason and not isinstance(yanked_reason, str):
- yanked_reason = ""
- # The Link.yanked_reason expects None instead of False.
- elif not yanked_reason:
- yanked_reason = None
-
- return cls(
- url,
- comes_from=page_url,
- requires_python=pyrequire,
- yanked_reason=yanked_reason,
- hashes=hashes,
- metadata_file_data=metadata_file_data,
- )
-
- @classmethod
- def from_element(
- cls,
- anchor_attribs: Dict[str, Optional[str]],
- page_url: str,
- base_url: str,
- ) -> Optional["Link"]:
- """
- Convert an anchor element's attributes in a simple repository page to a Link.
- """
- href = anchor_attribs.get("href")
- if not href:
- return None
-
- url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
- pyrequire = anchor_attribs.get("data-requires-python")
- yanked_reason = anchor_attribs.get("data-yanked")
-
- # PEP 714: Indexes must use the name data-core-metadata, but
- # clients should support the old name as a fallback for compatibility.
- metadata_info = anchor_attribs.get("data-core-metadata")
- if metadata_info is None:
- metadata_info = anchor_attribs.get("data-dist-info-metadata")
- # The metadata info value may be the string "true", or a string of
- # the form "hashname=hashval"
- if metadata_info == "true":
- # The file exists, but there are no hashes
- metadata_file_data = MetadataFile(None)
- elif metadata_info is None:
- # The file does not exist
- metadata_file_data = None
- else:
- # The file exists, and hashes have been supplied
- hashname, sep, hashval = metadata_info.partition("=")
- if sep == "=":
- metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
- else:
- # Error - data is wrong. Treat as no hashes supplied.
- logger.debug(
- "Index returned invalid data-dist-info-metadata value: %s",
- metadata_info,
- )
- metadata_file_data = MetadataFile(None)
-
- return cls(
- url,
- comes_from=page_url,
- requires_python=pyrequire,
- yanked_reason=yanked_reason,
- metadata_file_data=metadata_file_data,
- )
-
- def __str__(self) -> str:
- if self.requires_python:
- rp = f" (requires-python:{self.requires_python})"
- else:
- rp = ""
- if self.comes_from:
- return f"{redact_auth_from_url(self._url)} (from {self.comes_from}){rp}"
- else:
- return redact_auth_from_url(str(self._url))
-
- def __repr__(self) -> str:
- return f"<Link {self}>"
-
- @property
- def url(self) -> str:
- return self._url
-
- @property
- def filename(self) -> str:
- path = self.path.rstrip("/")
- name = posixpath.basename(path)
- if not name:
- # Make sure we don't leak auth information if the netloc
- # includes a username and password.
- netloc, user_pass = split_auth_from_netloc(self.netloc)
- return netloc
-
- name = urllib.parse.unquote(name)
- assert name, f"URL {self._url!r} produced no filename"
- return name
-
- @property
- def file_path(self) -> str:
- return url_to_path(self.url)
-
- @property
- def scheme(self) -> str:
- return self._parsed_url.scheme
-
- @property
- def netloc(self) -> str:
- """
- This can contain auth information.
- """
- return self._parsed_url.netloc
-
- @property
- def path(self) -> str:
- return urllib.parse.unquote(self._parsed_url.path)
-
- def splitext(self) -> Tuple[str, str]:
- return splitext(posixpath.basename(self.path.rstrip("/")))
-
- @property
- def ext(self) -> str:
- return self.splitext()[1]
-
- @property
- def url_without_fragment(self) -> str:
- scheme, netloc, path, query, fragment = self._parsed_url
- return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
-
- _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
-
- # Per PEP 508.
- _project_name_re = re.compile(
- r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
- )
-
- def _egg_fragment(self) -> Optional[str]:
- match = self._egg_fragment_re.search(self._url)
- if not match:
- return None
-
- # An egg fragment looks like a PEP 508 project name, along with
- # an optional extras specifier. Anything else is invalid.
- project_name = match.group(1)
- if not self._project_name_re.match(project_name):
- deprecated(
- reason=f"{self} contains an egg fragment with a non-PEP 508 name",
- replacement="to use the req @ url syntax, and remove the egg fragment",
- gone_in="25.0",
- issue=11617,
- )
-
- return project_name
-
- _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
-
- @property
- def subdirectory_fragment(self) -> Optional[str]:
- match = self._subdirectory_fragment_re.search(self._url)
- if not match:
- return None
- return match.group(1)
-
- def metadata_link(self) -> Optional["Link"]:
- """Return a link to the associated core metadata file (if any)."""
- if self.metadata_file_data is None:
- return None
- metadata_url = f"{self.url_without_fragment}.metadata"
- if self.metadata_file_data.hashes is None:
- return Link(metadata_url)
- return Link(metadata_url, hashes=self.metadata_file_data.hashes)
-
- def as_hashes(self) -> Hashes:
- return Hashes({k: [v] for k, v in self._hashes.items()})
-
- @property
- def hash(self) -> Optional[str]:
- return next(iter(self._hashes.values()), None)
-
- @property
- def hash_name(self) -> Optional[str]:
- return next(iter(self._hashes), None)
-
- @property
- def show_url(self) -> str:
- return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
-
- @property
- def is_file(self) -> bool:
- return self.scheme == "file"
-
- def is_existing_dir(self) -> bool:
- return self.is_file and os.path.isdir(self.file_path)
-
- @property
- def is_wheel(self) -> bool:
- return self.ext == WHEEL_EXTENSION
-
- @property
- def is_vcs(self) -> bool:
- from pip._internal.vcs import vcs
-
- return self.scheme in vcs.all_schemes
-
- @property
- def is_yanked(self) -> bool:
- return self.yanked_reason is not None
-
- @property
- def has_hash(self) -> bool:
- return bool(self._hashes)
-
- def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
- """
- Return True if the link has a hash and it is allowed by `hashes`.
- """
- if hashes is None:
- return False
- return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
-
-
-class _CleanResult(NamedTuple):
- """Convert link for equivalency check.
-
- This is used in the resolver to check whether two URL-specified requirements
- likely point to the same distribution and can be considered equivalent. This
- equivalency logic avoids comparing URLs literally, which can be too strict
- (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
-
- Currently this does three things:
-
- 1. Drop the basic auth part. This is technically wrong since a server can
- serve different content based on auth, but if it does that, it is even
- impossible to guarantee two URLs without auth are equivalent, since
- the user can input different auth information when prompted. So the
- practical solution is to assume the auth doesn't affect the response.
- 2. Parse the query to avoid the ordering issue. Note that ordering under the
- same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
- still considered different.
- 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
- hash values, since it should have no impact the downloaded content. Note
- that this drops the "egg=" part historically used to denote the requested
- project (and extras), which is wrong in the strictest sense, but too many
- people are supplying it inconsistently to cause superfluous resolution
- conflicts, so we choose to also ignore them.
- """
-
- parsed: urllib.parse.SplitResult
- query: Dict[str, List[str]]
- subdirectory: str
- hashes: Dict[str, str]
-
-
-def _clean_link(link: Link) -> _CleanResult:
- parsed = link._parsed_url
- netloc = parsed.netloc.rsplit("@", 1)[-1]
- # According to RFC 8089, an empty host in file: means localhost.
- if parsed.scheme == "file" and not netloc:
- netloc = "localhost"
- fragment = urllib.parse.parse_qs(parsed.fragment)
- if "egg" in fragment:
- logger.debug("Ignoring egg= fragment in %s", link)
- try:
- # If there are multiple subdirectory values, use the first one.
- # This matches the behavior of Link.subdirectory_fragment.
- subdirectory = fragment["subdirectory"][0]
- except (IndexError, KeyError):
- subdirectory = ""
- # If there are multiple hash values under the same algorithm, use the
- # first one. This matches the behavior of Link.hash_value.
- hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
- return _CleanResult(
- parsed=parsed._replace(netloc=netloc, query="", fragment=""),
- query=urllib.parse.parse_qs(parsed.query),
- subdirectory=subdirectory,
- hashes=hashes,
- )
-
-
-@functools.lru_cache(maxsize=None)
-def links_equivalent(link1: Link, link2: Link) -> bool:
- return _clean_link(link1) == _clean_link(link2)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/scheme.py b/venv/lib/python3.11/site-packages/pip/_internal/models/scheme.py
deleted file mode 100644
index f51190a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/scheme.py
+++ /dev/null
@@ -1,31 +0,0 @@
-"""
-For types associated with installation schemes.
-
-For a general overview of available schemes and their context, see
-https://docs.python.org/3/install/index.html#alternate-installation.
-"""
-
-
-SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
-
-
-class Scheme:
- """A Scheme holds paths which are used as the base directories for
- artifacts associated with a Python package.
- """
-
- __slots__ = SCHEME_KEYS
-
- def __init__(
- self,
- platlib: str,
- purelib: str,
- headers: str,
- scripts: str,
- data: str,
- ) -> None:
- self.platlib = platlib
- self.purelib = purelib
- self.headers = headers
- self.scripts = scripts
- self.data = data
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/search_scope.py b/venv/lib/python3.11/site-packages/pip/_internal/models/search_scope.py
deleted file mode 100644
index fe61e81..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/search_scope.py
+++ /dev/null
@@ -1,132 +0,0 @@
-import itertools
-import logging
-import os
-import posixpath
-import urllib.parse
-from typing import List
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.models.index import PyPI
-from pip._internal.utils.compat import has_tls
-from pip._internal.utils.misc import normalize_path, redact_auth_from_url
-
-logger = logging.getLogger(__name__)
-
-
-class SearchScope:
-
- """
- Encapsulates the locations that pip is configured to search.
- """
-
- __slots__ = ["find_links", "index_urls", "no_index"]
-
- @classmethod
- def create(
- cls,
- find_links: List[str],
- index_urls: List[str],
- no_index: bool,
- ) -> "SearchScope":
- """
- Create a SearchScope object after normalizing the `find_links`.
- """
- # Build find_links. If an argument starts with ~, it may be
- # a local file relative to a home directory. So try normalizing
- # it and if it exists, use the normalized version.
- # This is deliberately conservative - it might be fine just to
- # blindly normalize anything starting with a ~...
- built_find_links: List[str] = []
- for link in find_links:
- if link.startswith("~"):
- new_link = normalize_path(link)
- if os.path.exists(new_link):
- link = new_link
- built_find_links.append(link)
-
- # If we don't have TLS enabled, then WARN if anyplace we're looking
- # relies on TLS.
- if not has_tls():
- for link in itertools.chain(index_urls, built_find_links):
- parsed = urllib.parse.urlparse(link)
- if parsed.scheme == "https":
- logger.warning(
- "pip is configured with locations that require "
- "TLS/SSL, however the ssl module in Python is not "
- "available."
- )
- break
-
- return cls(
- find_links=built_find_links,
- index_urls=index_urls,
- no_index=no_index,
- )
-
- def __init__(
- self,
- find_links: List[str],
- index_urls: List[str],
- no_index: bool,
- ) -> None:
- self.find_links = find_links
- self.index_urls = index_urls
- self.no_index = no_index
-
- def get_formatted_locations(self) -> str:
- lines = []
- redacted_index_urls = []
- if self.index_urls and self.index_urls != [PyPI.simple_url]:
- for url in self.index_urls:
- redacted_index_url = redact_auth_from_url(url)
-
- # Parse the URL
- purl = urllib.parse.urlsplit(redacted_index_url)
-
- # URL is generally invalid if scheme and netloc is missing
- # there are issues with Python and URL parsing, so this test
- # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
- # always parse invalid URLs correctly - it should raise
- # exceptions for malformed URLs
- if not purl.scheme and not purl.netloc:
- logger.warning(
- 'The index url "%s" seems invalid, please provide a scheme.',
- redacted_index_url,
- )
-
- redacted_index_urls.append(redacted_index_url)
-
- lines.append(
- "Looking in indexes: {}".format(", ".join(redacted_index_urls))
- )
-
- if self.find_links:
- lines.append(
- "Looking in links: {}".format(
- ", ".join(redact_auth_from_url(url) for url in self.find_links)
- )
- )
- return "\n".join(lines)
-
- def get_index_urls_locations(self, project_name: str) -> List[str]:
- """Returns the locations found via self.index_urls
-
- Checks the url_name on the main (first in the list) index and
- use this url_name to produce all locations
- """
-
- def mkurl_pypi_url(url: str) -> str:
- loc = posixpath.join(
- url, urllib.parse.quote(canonicalize_name(project_name))
- )
- # For maximum compatibility with easy_install, ensure the path
- # ends in a trailing slash. Although this isn't in the spec
- # (and PyPI can handle it without the slash) some other index
- # implementations might break if they relied on easy_install's
- # behavior.
- if not loc.endswith("/"):
- loc = loc + "/"
- return loc
-
- return [mkurl_pypi_url(url) for url in self.index_urls]
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/selection_prefs.py b/venv/lib/python3.11/site-packages/pip/_internal/models/selection_prefs.py
deleted file mode 100644
index 977bc4c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/selection_prefs.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from typing import Optional
-
-from pip._internal.models.format_control import FormatControl
-
-
-class SelectionPreferences:
- """
- Encapsulates the candidate selection preferences for downloading
- and installing files.
- """
-
- __slots__ = [
- "allow_yanked",
- "allow_all_prereleases",
- "format_control",
- "prefer_binary",
- "ignore_requires_python",
- ]
-
- # Don't include an allow_yanked default value to make sure each call
- # site considers whether yanked releases are allowed. This also causes
- # that decision to be made explicit in the calling code, which helps
- # people when reading the code.
- def __init__(
- self,
- allow_yanked: bool,
- allow_all_prereleases: bool = False,
- format_control: Optional[FormatControl] = None,
- prefer_binary: bool = False,
- ignore_requires_python: Optional[bool] = None,
- ) -> None:
- """Create a SelectionPreferences object.
-
- :param allow_yanked: Whether files marked as yanked (in the sense
- of PEP 592) are permitted to be candidates for install.
- :param format_control: A FormatControl object or None. Used to control
- the selection of source packages / binary packages when consulting
- the index and links.
- :param prefer_binary: Whether to prefer an old, but valid, binary
- dist over a new source dist.
- :param ignore_requires_python: Whether to ignore incompatible
- "Requires-Python" values in links. Defaults to False.
- """
- if ignore_requires_python is None:
- ignore_requires_python = False
-
- self.allow_yanked = allow_yanked
- self.allow_all_prereleases = allow_all_prereleases
- self.format_control = format_control
- self.prefer_binary = prefer_binary
- self.ignore_requires_python = ignore_requires_python
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/target_python.py b/venv/lib/python3.11/site-packages/pip/_internal/models/target_python.py
deleted file mode 100644
index 67ea5da..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/target_python.py
+++ /dev/null
@@ -1,122 +0,0 @@
-import sys
-from typing import List, Optional, Set, Tuple
-
-from pip._vendor.packaging.tags import Tag
-
-from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
-from pip._internal.utils.misc import normalize_version_info
-
-
-class TargetPython:
-
- """
- Encapsulates the properties of a Python interpreter one is targeting
- for a package install, download, etc.
- """
-
- __slots__ = [
- "_given_py_version_info",
- "abis",
- "implementation",
- "platforms",
- "py_version",
- "py_version_info",
- "_valid_tags",
- "_valid_tags_set",
- ]
-
- def __init__(
- self,
- platforms: Optional[List[str]] = None,
- py_version_info: Optional[Tuple[int, ...]] = None,
- abis: Optional[List[str]] = None,
- implementation: Optional[str] = None,
- ) -> None:
- """
- :param platforms: A list of strings or None. If None, searches for
- packages that are supported by the current system. Otherwise, will
- find packages that can be built on the platforms passed in. These
- packages will only be downloaded for distribution: they will
- not be built locally.
- :param py_version_info: An optional tuple of ints representing the
- Python version information to use (e.g. `sys.version_info[:3]`).
- This can have length 1, 2, or 3 when provided.
- :param abis: A list of strings or None. This is passed to
- compatibility_tags.py's get_supported() function as is.
- :param implementation: A string or None. This is passed to
- compatibility_tags.py's get_supported() function as is.
- """
- # Store the given py_version_info for when we call get_supported().
- self._given_py_version_info = py_version_info
-
- if py_version_info is None:
- py_version_info = sys.version_info[:3]
- else:
- py_version_info = normalize_version_info(py_version_info)
-
- py_version = ".".join(map(str, py_version_info[:2]))
-
- self.abis = abis
- self.implementation = implementation
- self.platforms = platforms
- self.py_version = py_version
- self.py_version_info = py_version_info
-
- # This is used to cache the return value of get_(un)sorted_tags.
- self._valid_tags: Optional[List[Tag]] = None
- self._valid_tags_set: Optional[Set[Tag]] = None
-
- def format_given(self) -> str:
- """
- Format the given, non-None attributes for display.
- """
- display_version = None
- if self._given_py_version_info is not None:
- display_version = ".".join(
- str(part) for part in self._given_py_version_info
- )
-
- key_values = [
- ("platforms", self.platforms),
- ("version_info", display_version),
- ("abis", self.abis),
- ("implementation", self.implementation),
- ]
- return " ".join(
- f"{key}={value!r}" for key, value in key_values if value is not None
- )
-
- def get_sorted_tags(self) -> List[Tag]:
- """
- Return the supported PEP 425 tags to check wheel candidates against.
-
- The tags are returned in order of preference (most preferred first).
- """
- if self._valid_tags is None:
- # Pass versions=None if no py_version_info was given since
- # versions=None uses special default logic.
- py_version_info = self._given_py_version_info
- if py_version_info is None:
- version = None
- else:
- version = version_info_to_nodot(py_version_info)
-
- tags = get_supported(
- version=version,
- platforms=self.platforms,
- abis=self.abis,
- impl=self.implementation,
- )
- self._valid_tags = tags
-
- return self._valid_tags
-
- def get_unsorted_tags(self) -> Set[Tag]:
- """Exactly the same as get_sorted_tags, but returns a set.
-
- This is important for performance.
- """
- if self._valid_tags_set is None:
- self._valid_tags_set = set(self.get_sorted_tags())
-
- return self._valid_tags_set
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/models/wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/models/wheel.py
deleted file mode 100644
index a5dc12b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/models/wheel.py
+++ /dev/null
@@ -1,92 +0,0 @@
-"""Represents a wheel file and provides access to the various parts of the
-name that have meaning.
-"""
-import re
-from typing import Dict, Iterable, List
-
-from pip._vendor.packaging.tags import Tag
-
-from pip._internal.exceptions import InvalidWheelFilename
-
-
-class Wheel:
- """A wheel file"""
-
- wheel_file_re = re.compile(
- r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
- ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
- \.whl|\.dist-info)$""",
- re.VERBOSE,
- )
-
- def __init__(self, filename: str) -> None:
- """
- :raises InvalidWheelFilename: when the filename is invalid for a wheel
- """
- wheel_info = self.wheel_file_re.match(filename)
- if not wheel_info:
- raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
- self.filename = filename
- self.name = wheel_info.group("name").replace("_", "-")
- # we'll assume "_" means "-" due to wheel naming scheme
- # (https://github.com/pypa/pip/issues/1150)
- self.version = wheel_info.group("ver").replace("_", "-")
- self.build_tag = wheel_info.group("build")
- self.pyversions = wheel_info.group("pyver").split(".")
- self.abis = wheel_info.group("abi").split(".")
- self.plats = wheel_info.group("plat").split(".")
-
- # All the tag combinations from this file
- self.file_tags = {
- Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
- }
-
- def get_formatted_file_tags(self) -> List[str]:
- """Return the wheel's tags as a sorted list of strings."""
- return sorted(str(tag) for tag in self.file_tags)
-
- def support_index_min(self, tags: List[Tag]) -> int:
- """Return the lowest index that one of the wheel's file_tag combinations
- achieves in the given list of supported tags.
-
- For example, if there are 8 supported tags and one of the file tags
- is first in the list, then return 0.
-
- :param tags: the PEP 425 tags to check the wheel against, in order
- with most preferred first.
-
- :raises ValueError: If none of the wheel's file tags match one of
- the supported tags.
- """
- try:
- return next(i for i, t in enumerate(tags) if t in self.file_tags)
- except StopIteration:
- raise ValueError()
-
- def find_most_preferred_tag(
- self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
- ) -> int:
- """Return the priority of the most preferred tag that one of the wheel's file
- tag combinations achieves in the given list of supported tags using the given
- tag_to_priority mapping, where lower priorities are more-preferred.
-
- This is used in place of support_index_min in some cases in order to avoid
- an expensive linear scan of a large list of tags.
-
- :param tags: the PEP 425 tags to check the wheel against.
- :param tag_to_priority: a mapping from tag to priority of that tag, where
- lower is more preferred.
-
- :raises ValueError: If none of the wheel's file tags match one of
- the supported tags.
- """
- return min(
- tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
- )
-
- def supported(self, tags: Iterable[Tag]) -> bool:
- """Return whether the wheel is compatible with one of the given tags.
-
- :param tags: the PEP 425 tags to check the wheel against.
- """
- return not self.file_tags.isdisjoint(tags)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/network/__init__.py
deleted file mode 100644
index b51bde9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""Contains purely network-related utilities.
-"""
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index ec24904..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc
deleted file mode 100644
index 133e9b5..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/auth.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc
deleted file mode 100644
index 81014c1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/cache.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc
deleted file mode 100644
index 3123483..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/download.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc
deleted file mode 100644
index 17052b7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc
deleted file mode 100644
index e5dda57..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/session.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc
deleted file mode 100644
index 4ede2f9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/utils.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc
deleted file mode 100644
index 2bc7f9e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/auth.py b/venv/lib/python3.11/site-packages/pip/_internal/network/auth.py
deleted file mode 100644
index 94a82fa..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/auth.py
+++ /dev/null
@@ -1,561 +0,0 @@
-"""Network Authentication Helpers
-
-Contains interface (MultiDomainBasicAuth) and associated glue code for
-providing credentials in the context of network requests.
-"""
-import logging
-import os
-import shutil
-import subprocess
-import sysconfig
-import typing
-import urllib.parse
-from abc import ABC, abstractmethod
-from functools import lru_cache
-from os.path import commonprefix
-from pathlib import Path
-from typing import Any, Dict, List, NamedTuple, Optional, Tuple
-
-from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
-from pip._vendor.requests.models import Request, Response
-from pip._vendor.requests.utils import get_netrc_auth
-
-from pip._internal.utils.logging import getLogger
-from pip._internal.utils.misc import (
- ask,
- ask_input,
- ask_password,
- remove_auth_from_url,
- split_auth_netloc_from_url,
-)
-from pip._internal.vcs.versioncontrol import AuthInfo
-
-logger = getLogger(__name__)
-
-KEYRING_DISABLED = False
-
-
-class Credentials(NamedTuple):
- url: str
- username: str
- password: str
-
-
-class KeyRingBaseProvider(ABC):
- """Keyring base provider interface"""
-
- has_keyring: bool
-
- @abstractmethod
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
- ...
-
- @abstractmethod
- def save_auth_info(self, url: str, username: str, password: str) -> None:
- ...
-
-
-class KeyRingNullProvider(KeyRingBaseProvider):
- """Keyring null provider"""
-
- has_keyring = False
-
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
- return None
-
- def save_auth_info(self, url: str, username: str, password: str) -> None:
- return None
-
-
-class KeyRingPythonProvider(KeyRingBaseProvider):
- """Keyring interface which uses locally imported `keyring`"""
-
- has_keyring = True
-
- def __init__(self) -> None:
- import keyring
-
- self.keyring = keyring
-
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
- # Support keyring's get_credential interface which supports getting
- # credentials without a username. This is only available for
- # keyring>=15.2.0.
- if hasattr(self.keyring, "get_credential"):
- logger.debug("Getting credentials from keyring for %s", url)
- cred = self.keyring.get_credential(url, username)
- if cred is not None:
- return cred.username, cred.password
- return None
-
- if username is not None:
- logger.debug("Getting password from keyring for %s", url)
- password = self.keyring.get_password(url, username)
- if password:
- return username, password
- return None
-
- def save_auth_info(self, url: str, username: str, password: str) -> None:
- self.keyring.set_password(url, username, password)
-
-
-class KeyRingCliProvider(KeyRingBaseProvider):
- """Provider which uses `keyring` cli
-
- Instead of calling the keyring package installed alongside pip
- we call keyring on the command line which will enable pip to
- use which ever installation of keyring is available first in
- PATH.
- """
-
- has_keyring = True
-
- def __init__(self, cmd: str) -> None:
- self.keyring = cmd
-
- def get_auth_info(self, url: str, username: Optional[str]) -> Optional[AuthInfo]:
- # This is the default implementation of keyring.get_credential
- # https://github.com/jaraco/keyring/blob/97689324abcf01bd1793d49063e7ca01e03d7d07/keyring/backend.py#L134-L139
- if username is not None:
- password = self._get_password(url, username)
- if password is not None:
- return username, password
- return None
-
- def save_auth_info(self, url: str, username: str, password: str) -> None:
- return self._set_password(url, username, password)
-
- def _get_password(self, service_name: str, username: str) -> Optional[str]:
- """Mirror the implementation of keyring.get_password using cli"""
- if self.keyring is None:
- return None
-
- cmd = [self.keyring, "get", service_name, username]
- env = os.environ.copy()
- env["PYTHONIOENCODING"] = "utf-8"
- res = subprocess.run(
- cmd,
- stdin=subprocess.DEVNULL,
- stdout=subprocess.PIPE,
- env=env,
- )
- if res.returncode:
- return None
- return res.stdout.decode("utf-8").strip(os.linesep)
-
- def _set_password(self, service_name: str, username: str, password: str) -> None:
- """Mirror the implementation of keyring.set_password using cli"""
- if self.keyring is None:
- return None
- env = os.environ.copy()
- env["PYTHONIOENCODING"] = "utf-8"
- subprocess.run(
- [self.keyring, "set", service_name, username],
- input=f"{password}{os.linesep}".encode("utf-8"),
- env=env,
- check=True,
- )
- return None
-
-
-@lru_cache(maxsize=None)
-def get_keyring_provider(provider: str) -> KeyRingBaseProvider:
- logger.verbose("Keyring provider requested: %s", provider)
-
- # keyring has previously failed and been disabled
- if KEYRING_DISABLED:
- provider = "disabled"
- if provider in ["import", "auto"]:
- try:
- impl = KeyRingPythonProvider()
- logger.verbose("Keyring provider set: import")
- return impl
- except ImportError:
- pass
- except Exception as exc:
- # In the event of an unexpected exception
- # we should warn the user
- msg = "Installed copy of keyring fails with exception %s"
- if provider == "auto":
- msg = msg + ", trying to find a keyring executable as a fallback"
- logger.warning(msg, exc, exc_info=logger.isEnabledFor(logging.DEBUG))
- if provider in ["subprocess", "auto"]:
- cli = shutil.which("keyring")
- if cli and cli.startswith(sysconfig.get_path("scripts")):
- # all code within this function is stolen from shutil.which implementation
- @typing.no_type_check
- def PATH_as_shutil_which_determines_it() -> str:
- path = os.environ.get("PATH", None)
- if path is None:
- try:
- path = os.confstr("CS_PATH")
- except (AttributeError, ValueError):
- # os.confstr() or CS_PATH is not available
- path = os.defpath
- # bpo-35755: Don't use os.defpath if the PATH environment variable is
- # set to an empty string
-
- return path
-
- scripts = Path(sysconfig.get_path("scripts"))
-
- paths = []
- for path in PATH_as_shutil_which_determines_it().split(os.pathsep):
- p = Path(path)
- try:
- if not p.samefile(scripts):
- paths.append(path)
- except FileNotFoundError:
- pass
-
- path = os.pathsep.join(paths)
-
- cli = shutil.which("keyring", path=path)
-
- if cli:
- logger.verbose("Keyring provider set: subprocess with executable %s", cli)
- return KeyRingCliProvider(cli)
-
- logger.verbose("Keyring provider set: disabled")
- return KeyRingNullProvider()
-
-
-class MultiDomainBasicAuth(AuthBase):
- def __init__(
- self,
- prompting: bool = True,
- index_urls: Optional[List[str]] = None,
- keyring_provider: str = "auto",
- ) -> None:
- self.prompting = prompting
- self.index_urls = index_urls
- self.keyring_provider = keyring_provider # type: ignore[assignment]
- self.passwords: Dict[str, AuthInfo] = {}
- # When the user is prompted to enter credentials and keyring is
- # available, we will offer to save them. If the user accepts,
- # this value is set to the credentials they entered. After the
- # request authenticates, the caller should call
- # ``save_credentials`` to save these.
- self._credentials_to_save: Optional[Credentials] = None
-
- @property
- def keyring_provider(self) -> KeyRingBaseProvider:
- return get_keyring_provider(self._keyring_provider)
-
- @keyring_provider.setter
- def keyring_provider(self, provider: str) -> None:
- # The free function get_keyring_provider has been decorated with
- # functools.cache. If an exception occurs in get_keyring_auth that
- # cache will be cleared and keyring disabled, take that into account
- # if you want to remove this indirection.
- self._keyring_provider = provider
-
- @property
- def use_keyring(self) -> bool:
- # We won't use keyring when --no-input is passed unless
- # a specific provider is requested because it might require
- # user interaction
- return self.prompting or self._keyring_provider not in ["auto", "disabled"]
-
- def _get_keyring_auth(
- self,
- url: Optional[str],
- username: Optional[str],
- ) -> Optional[AuthInfo]:
- """Return the tuple auth for a given url from keyring."""
- # Do nothing if no url was provided
- if not url:
- return None
-
- try:
- return self.keyring_provider.get_auth_info(url, username)
- except Exception as exc:
- logger.warning(
- "Keyring is skipped due to an exception: %s",
- str(exc),
- )
- global KEYRING_DISABLED
- KEYRING_DISABLED = True
- get_keyring_provider.cache_clear()
- return None
-
- def _get_index_url(self, url: str) -> Optional[str]:
- """Return the original index URL matching the requested URL.
-
- Cached or dynamically generated credentials may work against
- the original index URL rather than just the netloc.
-
- The provided url should have had its username and password
- removed already. If the original index url had credentials then
- they will be included in the return value.
-
- Returns None if no matching index was found, or if --no-index
- was specified by the user.
- """
- if not url or not self.index_urls:
- return None
-
- url = remove_auth_from_url(url).rstrip("/") + "/"
- parsed_url = urllib.parse.urlsplit(url)
-
- candidates = []
-
- for index in self.index_urls:
- index = index.rstrip("/") + "/"
- parsed_index = urllib.parse.urlsplit(remove_auth_from_url(index))
- if parsed_url == parsed_index:
- return index
-
- if parsed_url.netloc != parsed_index.netloc:
- continue
-
- candidate = urllib.parse.urlsplit(index)
- candidates.append(candidate)
-
- if not candidates:
- return None
-
- candidates.sort(
- reverse=True,
- key=lambda candidate: commonprefix(
- [
- parsed_url.path,
- candidate.path,
- ]
- ).rfind("/"),
- )
-
- return urllib.parse.urlunsplit(candidates[0])
-
- def _get_new_credentials(
- self,
- original_url: str,
- *,
- allow_netrc: bool = True,
- allow_keyring: bool = False,
- ) -> AuthInfo:
- """Find and return credentials for the specified URL."""
- # Split the credentials and netloc from the url.
- url, netloc, url_user_password = split_auth_netloc_from_url(
- original_url,
- )
-
- # Start with the credentials embedded in the url
- username, password = url_user_password
- if username is not None and password is not None:
- logger.debug("Found credentials in url for %s", netloc)
- return url_user_password
-
- # Find a matching index url for this request
- index_url = self._get_index_url(url)
- if index_url:
- # Split the credentials from the url.
- index_info = split_auth_netloc_from_url(index_url)
- if index_info:
- index_url, _, index_url_user_password = index_info
- logger.debug("Found index url %s", index_url)
-
- # If an index URL was found, try its embedded credentials
- if index_url and index_url_user_password[0] is not None:
- username, password = index_url_user_password
- if username is not None and password is not None:
- logger.debug("Found credentials in index url for %s", netloc)
- return index_url_user_password
-
- # Get creds from netrc if we still don't have them
- if allow_netrc:
- netrc_auth = get_netrc_auth(original_url)
- if netrc_auth:
- logger.debug("Found credentials in netrc for %s", netloc)
- return netrc_auth
-
- # If we don't have a password and keyring is available, use it.
- if allow_keyring:
- # The index url is more specific than the netloc, so try it first
- # fmt: off
- kr_auth = (
- self._get_keyring_auth(index_url, username) or
- self._get_keyring_auth(netloc, username)
- )
- # fmt: on
- if kr_auth:
- logger.debug("Found credentials in keyring for %s", netloc)
- return kr_auth
-
- return username, password
-
- def _get_url_and_credentials(
- self, original_url: str
- ) -> Tuple[str, Optional[str], Optional[str]]:
- """Return the credentials to use for the provided URL.
-
- If allowed, netrc and keyring may be used to obtain the
- correct credentials.
-
- Returns (url_without_credentials, username, password). Note
- that even if the original URL contains credentials, this
- function may return a different username and password.
- """
- url, netloc, _ = split_auth_netloc_from_url(original_url)
-
- # Try to get credentials from original url
- username, password = self._get_new_credentials(original_url)
-
- # If credentials not found, use any stored credentials for this netloc.
- # Do this if either the username or the password is missing.
- # This accounts for the situation in which the user has specified
- # the username in the index url, but the password comes from keyring.
- if (username is None or password is None) and netloc in self.passwords:
- un, pw = self.passwords[netloc]
- # It is possible that the cached credentials are for a different username,
- # in which case the cache should be ignored.
- if username is None or username == un:
- username, password = un, pw
-
- if username is not None or password is not None:
- # Convert the username and password if they're None, so that
- # this netloc will show up as "cached" in the conditional above.
- # Further, HTTPBasicAuth doesn't accept None, so it makes sense to
- # cache the value that is going to be used.
- username = username or ""
- password = password or ""
-
- # Store any acquired credentials.
- self.passwords[netloc] = (username, password)
-
- assert (
- # Credentials were found
- (username is not None and password is not None)
- # Credentials were not found
- or (username is None and password is None)
- ), f"Could not load credentials from url: {original_url}"
-
- return url, username, password
-
- def __call__(self, req: Request) -> Request:
- # Get credentials for this request
- url, username, password = self._get_url_and_credentials(req.url)
-
- # Set the url of the request to the url without any credentials
- req.url = url
-
- if username is not None and password is not None:
- # Send the basic auth with this request
- req = HTTPBasicAuth(username, password)(req)
-
- # Attach a hook to handle 401 responses
- req.register_hook("response", self.handle_401)
-
- return req
-
- # Factored out to allow for easy patching in tests
- def _prompt_for_password(
- self, netloc: str
- ) -> Tuple[Optional[str], Optional[str], bool]:
- username = ask_input(f"User for {netloc}: ") if self.prompting else None
- if not username:
- return None, None, False
- if self.use_keyring:
- auth = self._get_keyring_auth(netloc, username)
- if auth and auth[0] is not None and auth[1] is not None:
- return auth[0], auth[1], False
- password = ask_password("Password: ")
- return username, password, True
-
- # Factored out to allow for easy patching in tests
- def _should_save_password_to_keyring(self) -> bool:
- if (
- not self.prompting
- or not self.use_keyring
- or not self.keyring_provider.has_keyring
- ):
- return False
- return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
-
- def handle_401(self, resp: Response, **kwargs: Any) -> Response:
- # We only care about 401 responses, anything else we want to just
- # pass through the actual response
- if resp.status_code != 401:
- return resp
-
- username, password = None, None
-
- # Query the keyring for credentials:
- if self.use_keyring:
- username, password = self._get_new_credentials(
- resp.url,
- allow_netrc=False,
- allow_keyring=True,
- )
-
- # We are not able to prompt the user so simply return the response
- if not self.prompting and not username and not password:
- return resp
-
- parsed = urllib.parse.urlparse(resp.url)
-
- # Prompt the user for a new username and password
- save = False
- if not username and not password:
- username, password, save = self._prompt_for_password(parsed.netloc)
-
- # Store the new username and password to use for future requests
- self._credentials_to_save = None
- if username is not None and password is not None:
- self.passwords[parsed.netloc] = (username, password)
-
- # Prompt to save the password to keyring
- if save and self._should_save_password_to_keyring():
- self._credentials_to_save = Credentials(
- url=parsed.netloc,
- username=username,
- password=password,
- )
-
- # Consume content and release the original connection to allow our new
- # request to reuse the same one.
- # The result of the assignment isn't used, it's just needed to consume
- # the content.
- _ = resp.content
- resp.raw.release_conn()
-
- # Add our new username and password to the request
- req = HTTPBasicAuth(username or "", password or "")(resp.request)
- req.register_hook("response", self.warn_on_401)
-
- # On successful request, save the credentials that were used to
- # keyring. (Note that if the user responded "no" above, this member
- # is not set and nothing will be saved.)
- if self._credentials_to_save:
- req.register_hook("response", self.save_credentials)
-
- # Send our new request
- new_resp = resp.connection.send(req, **kwargs)
- new_resp.history.append(resp)
-
- return new_resp
-
- def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
- """Response callback to warn about incorrect credentials."""
- if resp.status_code == 401:
- logger.warning(
- "401 Error, Credentials not correct for %s",
- resp.request.url,
- )
-
- def save_credentials(self, resp: Response, **kwargs: Any) -> None:
- """Response callback to save credentials on success."""
- assert (
- self.keyring_provider.has_keyring
- ), "should never reach here without keyring"
-
- creds = self._credentials_to_save
- self._credentials_to_save = None
- if creds and resp.status_code < 400:
- try:
- logger.info("Saving credentials to keyring")
- self.keyring_provider.save_auth_info(
- creds.url, creds.username, creds.password
- )
- except Exception:
- logger.exception("Failed to save credentials")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/cache.py b/venv/lib/python3.11/site-packages/pip/_internal/network/cache.py
deleted file mode 100644
index 4d0fb54..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/cache.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""HTTP cache implementation.
-"""
-
-import os
-from contextlib import contextmanager
-from datetime import datetime
-from typing import BinaryIO, Generator, Optional, Union
-
-from pip._vendor.cachecontrol.cache import SeparateBodyBaseCache
-from pip._vendor.cachecontrol.caches import SeparateBodyFileCache
-from pip._vendor.requests.models import Response
-
-from pip._internal.utils.filesystem import adjacent_tmp_file, replace
-from pip._internal.utils.misc import ensure_dir
-
-
-def is_from_cache(response: Response) -> bool:
- return getattr(response, "from_cache", False)
-
-
-@contextmanager
-def suppressed_cache_errors() -> Generator[None, None, None]:
- """If we can't access the cache then we can just skip caching and process
- requests as if caching wasn't enabled.
- """
- try:
- yield
- except OSError:
- pass
-
-
-class SafeFileCache(SeparateBodyBaseCache):
- """
- A file based cache which is safe to use even when the target directory may
- not be accessible or writable.
-
- There is a race condition when two processes try to write and/or read the
- same entry at the same time, since each entry consists of two separate
- files (https://github.com/psf/cachecontrol/issues/324). We therefore have
- additional logic that makes sure that both files to be present before
- returning an entry; this fixes the read side of the race condition.
-
- For the write side, we assume that the server will only ever return the
- same data for the same URL, which ought to be the case for files pip is
- downloading. PyPI does not have a mechanism to swap out a wheel for
- another wheel, for example. If this assumption is not true, the
- CacheControl issue will need to be fixed.
- """
-
- def __init__(self, directory: str) -> None:
- assert directory is not None, "Cache directory must not be None."
- super().__init__()
- self.directory = directory
-
- def _get_cache_path(self, name: str) -> str:
- # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
- # class for backwards-compatibility and to avoid using a non-public
- # method.
- hashed = SeparateBodyFileCache.encode(name)
- parts = list(hashed[:5]) + [hashed]
- return os.path.join(self.directory, *parts)
-
- def get(self, key: str) -> Optional[bytes]:
- # The cache entry is only valid if both metadata and body exist.
- metadata_path = self._get_cache_path(key)
- body_path = metadata_path + ".body"
- if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
- return None
- with suppressed_cache_errors():
- with open(metadata_path, "rb") as f:
- return f.read()
-
- def _write(self, path: str, data: bytes) -> None:
- with suppressed_cache_errors():
- ensure_dir(os.path.dirname(path))
-
- with adjacent_tmp_file(path) as f:
- f.write(data)
-
- replace(f.name, path)
-
- def set(
- self, key: str, value: bytes, expires: Union[int, datetime, None] = None
- ) -> None:
- path = self._get_cache_path(key)
- self._write(path, value)
-
- def delete(self, key: str) -> None:
- path = self._get_cache_path(key)
- with suppressed_cache_errors():
- os.remove(path)
- with suppressed_cache_errors():
- os.remove(path + ".body")
-
- def get_body(self, key: str) -> Optional[BinaryIO]:
- # The cache entry is only valid if both metadata and body exist.
- metadata_path = self._get_cache_path(key)
- body_path = metadata_path + ".body"
- if not (os.path.exists(metadata_path) and os.path.exists(body_path)):
- return None
- with suppressed_cache_errors():
- return open(body_path, "rb")
-
- def set_body(self, key: str, body: bytes) -> None:
- path = self._get_cache_path(key) + ".body"
- self._write(path, body)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/download.py b/venv/lib/python3.11/site-packages/pip/_internal/network/download.py
deleted file mode 100644
index d1d4354..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/download.py
+++ /dev/null
@@ -1,186 +0,0 @@
-"""Download files with progress indicators.
-"""
-import email.message
-import logging
-import mimetypes
-import os
-from typing import Iterable, Optional, Tuple
-
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
-
-from pip._internal.cli.progress_bars import get_download_progress_renderer
-from pip._internal.exceptions import NetworkConnectionError
-from pip._internal.models.index import PyPI
-from pip._internal.models.link import Link
-from pip._internal.network.cache import is_from_cache
-from pip._internal.network.session import PipSession
-from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
-from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
-
-logger = logging.getLogger(__name__)
-
-
-def _get_http_response_size(resp: Response) -> Optional[int]:
- try:
- return int(resp.headers["content-length"])
- except (ValueError, KeyError, TypeError):
- return None
-
-
-def _prepare_download(
- resp: Response,
- link: Link,
- progress_bar: str,
-) -> Iterable[bytes]:
- total_length = _get_http_response_size(resp)
-
- if link.netloc == PyPI.file_storage_domain:
- url = link.show_url
- else:
- url = link.url_without_fragment
-
- logged_url = redact_auth_from_url(url)
-
- if total_length:
- logged_url = f"{logged_url} ({format_size(total_length)})"
-
- if is_from_cache(resp):
- logger.info("Using cached %s", logged_url)
- else:
- logger.info("Downloading %s", logged_url)
-
- if logger.getEffectiveLevel() > logging.INFO:
- show_progress = False
- elif is_from_cache(resp):
- show_progress = False
- elif not total_length:
- show_progress = True
- elif total_length > (40 * 1000):
- show_progress = True
- else:
- show_progress = False
-
- chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
-
- if not show_progress:
- return chunks
-
- renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
- return renderer(chunks)
-
-
-def sanitize_content_filename(filename: str) -> str:
- """
- Sanitize the "filename" value from a Content-Disposition header.
- """
- return os.path.basename(filename)
-
-
-def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
- """
- Parse the "filename" value from a Content-Disposition header, and
- return the default filename if the result is empty.
- """
- m = email.message.Message()
- m["content-type"] = content_disposition
- filename = m.get_param("filename")
- if filename:
- # We need to sanitize the filename to prevent directory traversal
- # in case the filename contains ".." path parts.
- filename = sanitize_content_filename(str(filename))
- return filename or default_filename
-
-
-def _get_http_response_filename(resp: Response, link: Link) -> str:
- """Get an ideal filename from the given HTTP response, falling back to
- the link filename if not provided.
- """
- filename = link.filename # fallback
- # Have a look at the Content-Disposition header for a better guess
- content_disposition = resp.headers.get("content-disposition")
- if content_disposition:
- filename = parse_content_disposition(content_disposition, filename)
- ext: Optional[str] = splitext(filename)[1]
- if not ext:
- ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
- if ext:
- filename += ext
- if not ext and link.url != resp.url:
- ext = os.path.splitext(resp.url)[1]
- if ext:
- filename += ext
- return filename
-
-
-def _http_get_download(session: PipSession, link: Link) -> Response:
- target_url = link.url.split("#", 1)[0]
- resp = session.get(target_url, headers=HEADERS, stream=True)
- raise_for_status(resp)
- return resp
-
-
-class Downloader:
- def __init__(
- self,
- session: PipSession,
- progress_bar: str,
- ) -> None:
- self._session = session
- self._progress_bar = progress_bar
-
- def __call__(self, link: Link, location: str) -> Tuple[str, str]:
- """Download the file given by link into location."""
- try:
- resp = _http_get_download(self._session, link)
- except NetworkConnectionError as e:
- assert e.response is not None
- logger.critical(
- "HTTP error %s while getting %s", e.response.status_code, link
- )
- raise
-
- filename = _get_http_response_filename(resp, link)
- filepath = os.path.join(location, filename)
-
- chunks = _prepare_download(resp, link, self._progress_bar)
- with open(filepath, "wb") as content_file:
- for chunk in chunks:
- content_file.write(chunk)
- content_type = resp.headers.get("Content-Type", "")
- return filepath, content_type
-
-
-class BatchDownloader:
- def __init__(
- self,
- session: PipSession,
- progress_bar: str,
- ) -> None:
- self._session = session
- self._progress_bar = progress_bar
-
- def __call__(
- self, links: Iterable[Link], location: str
- ) -> Iterable[Tuple[Link, Tuple[str, str]]]:
- """Download the files given by links into location."""
- for link in links:
- try:
- resp = _http_get_download(self._session, link)
- except NetworkConnectionError as e:
- assert e.response is not None
- logger.critical(
- "HTTP error %s while getting %s",
- e.response.status_code,
- link,
- )
- raise
-
- filename = _get_http_response_filename(resp, link)
- filepath = os.path.join(location, filename)
-
- chunks = _prepare_download(resp, link, self._progress_bar)
- with open(filepath, "wb") as content_file:
- for chunk in chunks:
- content_file.write(chunk)
- content_type = resp.headers.get("Content-Type", "")
- yield link, (filepath, content_type)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/lazy_wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/network/lazy_wheel.py
deleted file mode 100644
index 82ec50d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/lazy_wheel.py
+++ /dev/null
@@ -1,210 +0,0 @@
-"""Lazy ZIP over HTTP"""
-
-__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
-
-from bisect import bisect_left, bisect_right
-from contextlib import contextmanager
-from tempfile import NamedTemporaryFile
-from typing import Any, Dict, Generator, List, Optional, Tuple
-from zipfile import BadZipFile, ZipFile
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
-
-from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
-from pip._internal.network.session import PipSession
-from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
-
-
-class HTTPRangeRequestUnsupported(Exception):
- pass
-
-
-def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
- """Return a distribution object from the given wheel URL.
-
- This uses HTTP range requests to only fetch the portion of the wheel
- containing metadata, just enough for the object to be constructed.
- If such requests are not supported, HTTPRangeRequestUnsupported
- is raised.
- """
- with LazyZipOverHTTP(url, session) as zf:
- # For read-only ZIP files, ZipFile only needs methods read,
- # seek, seekable and tell, not the whole IO protocol.
- wheel = MemoryWheel(zf.name, zf) # type: ignore
- # After context manager exit, wheel.name
- # is an invalid file by intention.
- return get_wheel_distribution(wheel, canonicalize_name(name))
-
-
-class LazyZipOverHTTP:
- """File-like object mapped to a ZIP file over HTTP.
-
- This uses HTTP range requests to lazily fetch the file's content,
- which is supposed to be fed to ZipFile. If such requests are not
- supported by the server, raise HTTPRangeRequestUnsupported
- during initialization.
- """
-
- def __init__(
- self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
- ) -> None:
- head = session.head(url, headers=HEADERS)
- raise_for_status(head)
- assert head.status_code == 200
- self._session, self._url, self._chunk_size = session, url, chunk_size
- self._length = int(head.headers["Content-Length"])
- self._file = NamedTemporaryFile()
- self.truncate(self._length)
- self._left: List[int] = []
- self._right: List[int] = []
- if "bytes" not in head.headers.get("Accept-Ranges", "none"):
- raise HTTPRangeRequestUnsupported("range request is not supported")
- self._check_zip()
-
- @property
- def mode(self) -> str:
- """Opening mode, which is always rb."""
- return "rb"
-
- @property
- def name(self) -> str:
- """Path to the underlying file."""
- return self._file.name
-
- def seekable(self) -> bool:
- """Return whether random access is supported, which is True."""
- return True
-
- def close(self) -> None:
- """Close the file."""
- self._file.close()
-
- @property
- def closed(self) -> bool:
- """Whether the file is closed."""
- return self._file.closed
-
- def read(self, size: int = -1) -> bytes:
- """Read up to size bytes from the object and return them.
-
- As a convenience, if size is unspecified or -1,
- all bytes until EOF are returned. Fewer than
- size bytes may be returned if EOF is reached.
- """
- download_size = max(size, self._chunk_size)
- start, length = self.tell(), self._length
- stop = length if size < 0 else min(start + download_size, length)
- start = max(0, stop - download_size)
- self._download(start, stop - 1)
- return self._file.read(size)
-
- def readable(self) -> bool:
- """Return whether the file is readable, which is True."""
- return True
-
- def seek(self, offset: int, whence: int = 0) -> int:
- """Change stream position and return the new absolute position.
-
- Seek to offset relative position indicated by whence:
- * 0: Start of stream (the default). pos should be >= 0;
- * 1: Current position - pos may be negative;
- * 2: End of stream - pos usually negative.
- """
- return self._file.seek(offset, whence)
-
- def tell(self) -> int:
- """Return the current position."""
- return self._file.tell()
-
- def truncate(self, size: Optional[int] = None) -> int:
- """Resize the stream to the given size in bytes.
-
- If size is unspecified resize to the current position.
- The current stream position isn't changed.
-
- Return the new file size.
- """
- return self._file.truncate(size)
-
- def writable(self) -> bool:
- """Return False."""
- return False
-
- def __enter__(self) -> "LazyZipOverHTTP":
- self._file.__enter__()
- return self
-
- def __exit__(self, *exc: Any) -> None:
- self._file.__exit__(*exc)
-
- @contextmanager
- def _stay(self) -> Generator[None, None, None]:
- """Return a context manager keeping the position.
-
- At the end of the block, seek back to original position.
- """
- pos = self.tell()
- try:
- yield
- finally:
- self.seek(pos)
-
- def _check_zip(self) -> None:
- """Check and download until the file is a valid ZIP."""
- end = self._length - 1
- for start in reversed(range(0, end, self._chunk_size)):
- self._download(start, end)
- with self._stay():
- try:
- # For read-only ZIP files, ZipFile only needs
- # methods read, seek, seekable and tell.
- ZipFile(self) # type: ignore
- except BadZipFile:
- pass
- else:
- break
-
- def _stream_response(
- self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
- ) -> Response:
- """Return HTTP response to a range request from start to end."""
- headers = base_headers.copy()
- headers["Range"] = f"bytes={start}-{end}"
- # TODO: Get range requests to be correctly cached
- headers["Cache-Control"] = "no-cache"
- return self._session.get(self._url, headers=headers, stream=True)
-
- def _merge(
- self, start: int, end: int, left: int, right: int
- ) -> Generator[Tuple[int, int], None, None]:
- """Return a generator of intervals to be fetched.
-
- Args:
- start (int): Start of needed interval
- end (int): End of needed interval
- left (int): Index of first overlapping downloaded data
- right (int): Index after last overlapping downloaded data
- """
- lslice, rslice = self._left[left:right], self._right[left:right]
- i = start = min([start] + lslice[:1])
- end = max([end] + rslice[-1:])
- for j, k in zip(lslice, rslice):
- if j > i:
- yield i, j - 1
- i = k + 1
- if i <= end:
- yield i, end
- self._left[left:right], self._right[left:right] = [start], [end]
-
- def _download(self, start: int, end: int) -> None:
- """Download bytes from start to end inclusively."""
- with self._stay():
- left = bisect_left(self._right, start)
- right = bisect_right(self._left, end)
- for start, end in self._merge(start, end, left, right):
- response = self._stream_response(start, end)
- response.raise_for_status()
- self.seek(start)
- for chunk in response_chunks(response, self._chunk_size):
- self._file.write(chunk)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/session.py b/venv/lib/python3.11/site-packages/pip/_internal/network/session.py
deleted file mode 100644
index f17efc5..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/session.py
+++ /dev/null
@@ -1,520 +0,0 @@
-"""PipSession and supporting code, containing all pip-specific
-network request configuration and behavior.
-"""
-
-import email.utils
-import io
-import ipaddress
-import json
-import logging
-import mimetypes
-import os
-import platform
-import shutil
-import subprocess
-import sys
-import urllib.parse
-import warnings
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- Generator,
- List,
- Mapping,
- Optional,
- Sequence,
- Tuple,
- Union,
-)
-
-from pip._vendor import requests, urllib3
-from pip._vendor.cachecontrol import CacheControlAdapter as _BaseCacheControlAdapter
-from pip._vendor.requests.adapters import DEFAULT_POOLBLOCK, BaseAdapter
-from pip._vendor.requests.adapters import HTTPAdapter as _BaseHTTPAdapter
-from pip._vendor.requests.models import PreparedRequest, Response
-from pip._vendor.requests.structures import CaseInsensitiveDict
-from pip._vendor.urllib3.connectionpool import ConnectionPool
-from pip._vendor.urllib3.exceptions import InsecureRequestWarning
-
-from pip import __version__
-from pip._internal.metadata import get_default_environment
-from pip._internal.models.link import Link
-from pip._internal.network.auth import MultiDomainBasicAuth
-from pip._internal.network.cache import SafeFileCache
-
-# Import ssl from compat so the initial import occurs in only one place.
-from pip._internal.utils.compat import has_tls
-from pip._internal.utils.glibc import libc_ver
-from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
-from pip._internal.utils.urls import url_to_path
-
-if TYPE_CHECKING:
- from ssl import SSLContext
-
- from pip._vendor.urllib3.poolmanager import PoolManager
-
-
-logger = logging.getLogger(__name__)
-
-SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
-
-
-# Ignore warning raised when using --trusted-host.
-warnings.filterwarnings("ignore", category=InsecureRequestWarning)
-
-
-SECURE_ORIGINS: List[SecureOrigin] = [
- # protocol, hostname, port
- # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
- ("https", "*", "*"),
- ("*", "localhost", "*"),
- ("*", "127.0.0.0/8", "*"),
- ("*", "::1/128", "*"),
- ("file", "*", None),
- # ssh is always secure.
- ("ssh", "*", "*"),
-]
-
-
-# These are environment variables present when running under various
-# CI systems. For each variable, some CI systems that use the variable
-# are indicated. The collection was chosen so that for each of a number
-# of popular systems, at least one of the environment variables is used.
-# This list is used to provide some indication of and lower bound for
-# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
-# For more background, see: https://github.com/pypa/pip/issues/5499
-CI_ENVIRONMENT_VARIABLES = (
- # Azure Pipelines
- "BUILD_BUILDID",
- # Jenkins
- "BUILD_ID",
- # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
- "CI",
- # Explicit environment variable.
- "PIP_IS_CI",
-)
-
-
-def looks_like_ci() -> bool:
- """
- Return whether it looks like pip is running under CI.
- """
- # We don't use the method of checking for a tty (e.g. using isatty())
- # because some CI systems mimic a tty (e.g. Travis CI). Thus that
- # method doesn't provide definitive information in either direction.
- return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
-
-
-def user_agent() -> str:
- """
- Return a string representing the user agent.
- """
- data: Dict[str, Any] = {
- "installer": {"name": "pip", "version": __version__},
- "python": platform.python_version(),
- "implementation": {
- "name": platform.python_implementation(),
- },
- }
-
- if data["implementation"]["name"] == "CPython":
- data["implementation"]["version"] = platform.python_version()
- elif data["implementation"]["name"] == "PyPy":
- pypy_version_info = sys.pypy_version_info # type: ignore
- if pypy_version_info.releaselevel == "final":
- pypy_version_info = pypy_version_info[:3]
- data["implementation"]["version"] = ".".join(
- [str(x) for x in pypy_version_info]
- )
- elif data["implementation"]["name"] == "Jython":
- # Complete Guess
- data["implementation"]["version"] = platform.python_version()
- elif data["implementation"]["name"] == "IronPython":
- # Complete Guess
- data["implementation"]["version"] = platform.python_version()
-
- if sys.platform.startswith("linux"):
- from pip._vendor import distro
-
- linux_distribution = distro.name(), distro.version(), distro.codename()
- distro_infos: Dict[str, Any] = dict(
- filter(
- lambda x: x[1],
- zip(["name", "version", "id"], linux_distribution),
- )
- )
- libc = dict(
- filter(
- lambda x: x[1],
- zip(["lib", "version"], libc_ver()),
- )
- )
- if libc:
- distro_infos["libc"] = libc
- if distro_infos:
- data["distro"] = distro_infos
-
- if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
- data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
-
- if platform.system():
- data.setdefault("system", {})["name"] = platform.system()
-
- if platform.release():
- data.setdefault("system", {})["release"] = platform.release()
-
- if platform.machine():
- data["cpu"] = platform.machine()
-
- if has_tls():
- import _ssl as ssl
-
- data["openssl_version"] = ssl.OPENSSL_VERSION
-
- setuptools_dist = get_default_environment().get_distribution("setuptools")
- if setuptools_dist is not None:
- data["setuptools_version"] = str(setuptools_dist.version)
-
- if shutil.which("rustc") is not None:
- # If for any reason `rustc --version` fails, silently ignore it
- try:
- rustc_output = subprocess.check_output(
- ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
- )
- except Exception:
- pass
- else:
- if rustc_output.startswith(b"rustc "):
- # The format of `rustc --version` is:
- # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
- # We extract just the middle (1.52.1) part
- data["rustc_version"] = rustc_output.split(b" ")[1].decode()
-
- # Use None rather than False so as not to give the impression that
- # pip knows it is not being run under CI. Rather, it is a null or
- # inconclusive result. Also, we include some value rather than no
- # value to make it easier to know that the check has been run.
- data["ci"] = True if looks_like_ci() else None
-
- user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
- if user_data is not None:
- data["user_data"] = user_data
-
- return "{data[installer][name]}/{data[installer][version]} {json}".format(
- data=data,
- json=json.dumps(data, separators=(",", ":"), sort_keys=True),
- )
-
-
-class LocalFSAdapter(BaseAdapter):
- def send(
- self,
- request: PreparedRequest,
- stream: bool = False,
- timeout: Optional[Union[float, Tuple[float, float]]] = None,
- verify: Union[bool, str] = True,
- cert: Optional[Union[str, Tuple[str, str]]] = None,
- proxies: Optional[Mapping[str, str]] = None,
- ) -> Response:
- pathname = url_to_path(request.url)
-
- resp = Response()
- resp.status_code = 200
- resp.url = request.url
-
- try:
- stats = os.stat(pathname)
- except OSError as exc:
- # format the exception raised as a io.BytesIO object,
- # to return a better error message:
- resp.status_code = 404
- resp.reason = type(exc).__name__
- resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
- else:
- modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
- content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
- resp.headers = CaseInsensitiveDict(
- {
- "Content-Type": content_type,
- "Content-Length": stats.st_size,
- "Last-Modified": modified,
- }
- )
-
- resp.raw = open(pathname, "rb")
- resp.close = resp.raw.close
-
- return resp
-
- def close(self) -> None:
- pass
-
-
-class _SSLContextAdapterMixin:
- """Mixin to add the ``ssl_context`` constructor argument to HTTP adapters.
-
- The additional argument is forwarded directly to the pool manager. This allows us
- to dynamically decide what SSL store to use at runtime, which is used to implement
- the optional ``truststore`` backend.
- """
-
- def __init__(
- self,
- *,
- ssl_context: Optional["SSLContext"] = None,
- **kwargs: Any,
- ) -> None:
- self._ssl_context = ssl_context
- super().__init__(**kwargs)
-
- def init_poolmanager(
- self,
- connections: int,
- maxsize: int,
- block: bool = DEFAULT_POOLBLOCK,
- **pool_kwargs: Any,
- ) -> "PoolManager":
- if self._ssl_context is not None:
- pool_kwargs.setdefault("ssl_context", self._ssl_context)
- return super().init_poolmanager( # type: ignore[misc]
- connections=connections,
- maxsize=maxsize,
- block=block,
- **pool_kwargs,
- )
-
-
-class HTTPAdapter(_SSLContextAdapterMixin, _BaseHTTPAdapter):
- pass
-
-
-class CacheControlAdapter(_SSLContextAdapterMixin, _BaseCacheControlAdapter):
- pass
-
-
-class InsecureHTTPAdapter(HTTPAdapter):
- def cert_verify(
- self,
- conn: ConnectionPool,
- url: str,
- verify: Union[bool, str],
- cert: Optional[Union[str, Tuple[str, str]]],
- ) -> None:
- super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
-
-
-class InsecureCacheControlAdapter(CacheControlAdapter):
- def cert_verify(
- self,
- conn: ConnectionPool,
- url: str,
- verify: Union[bool, str],
- cert: Optional[Union[str, Tuple[str, str]]],
- ) -> None:
- super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
-
-
-class PipSession(requests.Session):
- timeout: Optional[int] = None
-
- def __init__(
- self,
- *args: Any,
- retries: int = 0,
- cache: Optional[str] = None,
- trusted_hosts: Sequence[str] = (),
- index_urls: Optional[List[str]] = None,
- ssl_context: Optional["SSLContext"] = None,
- **kwargs: Any,
- ) -> None:
- """
- :param trusted_hosts: Domains not to emit warnings for when not using
- HTTPS.
- """
- super().__init__(*args, **kwargs)
-
- # Namespace the attribute with "pip_" just in case to prevent
- # possible conflicts with the base class.
- self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
-
- # Attach our User Agent to the request
- self.headers["User-Agent"] = user_agent()
-
- # Attach our Authentication handler to the session
- self.auth = MultiDomainBasicAuth(index_urls=index_urls)
-
- # Create our urllib3.Retry instance which will allow us to customize
- # how we handle retries.
- retries = urllib3.Retry(
- # Set the total number of retries that a particular request can
- # have.
- total=retries,
- # A 503 error from PyPI typically means that the Fastly -> Origin
- # connection got interrupted in some way. A 503 error in general
- # is typically considered a transient error so we'll go ahead and
- # retry it.
- # A 500 may indicate transient error in Amazon S3
- # A 502 may be a transient error from a CDN like CloudFlare or CloudFront
- # A 520 or 527 - may indicate transient error in CloudFlare
- status_forcelist=[500, 502, 503, 520, 527],
- # Add a small amount of back off between failed requests in
- # order to prevent hammering the service.
- backoff_factor=0.25,
- ) # type: ignore
-
- # Our Insecure HTTPAdapter disables HTTPS validation. It does not
- # support caching so we'll use it for all http:// URLs.
- # If caching is disabled, we will also use it for
- # https:// hosts that we've marked as ignoring
- # TLS errors for (trusted-hosts).
- insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
-
- # We want to _only_ cache responses on securely fetched origins or when
- # the host is specified as trusted. We do this because
- # we can't validate the response of an insecurely/untrusted fetched
- # origin, and we don't want someone to be able to poison the cache and
- # require manual eviction from the cache to fix it.
- if cache:
- secure_adapter = CacheControlAdapter(
- cache=SafeFileCache(cache),
- max_retries=retries,
- ssl_context=ssl_context,
- )
- self._trusted_host_adapter = InsecureCacheControlAdapter(
- cache=SafeFileCache(cache),
- max_retries=retries,
- )
- else:
- secure_adapter = HTTPAdapter(max_retries=retries, ssl_context=ssl_context)
- self._trusted_host_adapter = insecure_adapter
-
- self.mount("https://", secure_adapter)
- self.mount("http://", insecure_adapter)
-
- # Enable file:// urls
- self.mount("file://", LocalFSAdapter())
-
- for host in trusted_hosts:
- self.add_trusted_host(host, suppress_logging=True)
-
- def update_index_urls(self, new_index_urls: List[str]) -> None:
- """
- :param new_index_urls: New index urls to update the authentication
- handler with.
- """
- self.auth.index_urls = new_index_urls
-
- def add_trusted_host(
- self, host: str, source: Optional[str] = None, suppress_logging: bool = False
- ) -> None:
- """
- :param host: It is okay to provide a host that has previously been
- added.
- :param source: An optional source string, for logging where the host
- string came from.
- """
- if not suppress_logging:
- msg = f"adding trusted host: {host!r}"
- if source is not None:
- msg += f" (from {source})"
- logger.info(msg)
-
- parsed_host, parsed_port = parse_netloc(host)
- if parsed_host is None:
- raise ValueError(f"Trusted host URL must include a host part: {host!r}")
- if (parsed_host, parsed_port) not in self.pip_trusted_origins:
- self.pip_trusted_origins.append((parsed_host, parsed_port))
-
- self.mount(
- build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
- )
- self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
- if not parsed_port:
- self.mount(
- build_url_from_netloc(host, scheme="http") + ":",
- self._trusted_host_adapter,
- )
- # Mount wildcard ports for the same host.
- self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
-
- def iter_secure_origins(self) -> Generator[SecureOrigin, None, None]:
- yield from SECURE_ORIGINS
- for host, port in self.pip_trusted_origins:
- yield ("*", host, "*" if port is None else port)
-
- def is_secure_origin(self, location: Link) -> bool:
- # Determine if this url used a secure transport mechanism
- parsed = urllib.parse.urlparse(str(location))
- origin_protocol, origin_host, origin_port = (
- parsed.scheme,
- parsed.hostname,
- parsed.port,
- )
-
- # The protocol to use to see if the protocol matches.
- # Don't count the repository type as part of the protocol: in
- # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
- # the last scheme.)
- origin_protocol = origin_protocol.rsplit("+", 1)[-1]
-
- # Determine if our origin is a secure origin by looking through our
- # hardcoded list of secure origins, as well as any additional ones
- # configured on this PackageFinder instance.
- for secure_origin in self.iter_secure_origins():
- secure_protocol, secure_host, secure_port = secure_origin
- if origin_protocol != secure_protocol and secure_protocol != "*":
- continue
-
- try:
- addr = ipaddress.ip_address(origin_host or "")
- network = ipaddress.ip_network(secure_host)
- except ValueError:
- # We don't have both a valid address or a valid network, so
- # we'll check this origin against hostnames.
- if (
- origin_host
- and origin_host.lower() != secure_host.lower()
- and secure_host != "*"
- ):
- continue
- else:
- # We have a valid address and network, so see if the address
- # is contained within the network.
- if addr not in network:
- continue
-
- # Check to see if the port matches.
- if (
- origin_port != secure_port
- and secure_port != "*"
- and secure_port is not None
- ):
- continue
-
- # If we've gotten here, then this origin matches the current
- # secure origin and we should return True
- return True
-
- # If we've gotten to this point, then the origin isn't secure and we
- # will not accept it as a valid location to search. We will however
- # log a warning that we are ignoring it.
- logger.warning(
- "The repository located at %s is not a trusted or secure host and "
- "is being ignored. If this repository is available via HTTPS we "
- "recommend you use HTTPS instead, otherwise you may silence "
- "this warning and allow it anyway with '--trusted-host %s'.",
- origin_host,
- origin_host,
- )
-
- return False
-
- def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
- # Allow setting a default timeout on a session
- kwargs.setdefault("timeout", self.timeout)
- # Allow setting a default proxies on a session
- kwargs.setdefault("proxies", self.proxies)
-
- # Dispatch the actual request
- return super().request(method, url, *args, **kwargs)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/utils.py b/venv/lib/python3.11/site-packages/pip/_internal/network/utils.py
deleted file mode 100644
index 134848a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/utils.py
+++ /dev/null
@@ -1,96 +0,0 @@
-from typing import Dict, Generator
-
-from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
-
-from pip._internal.exceptions import NetworkConnectionError
-
-# The following comments and HTTP headers were originally added by
-# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03.
-#
-# We use Accept-Encoding: identity here because requests defaults to
-# accepting compressed responses. This breaks in a variety of ways
-# depending on how the server is configured.
-# - Some servers will notice that the file isn't a compressible file
-# and will leave the file alone and with an empty Content-Encoding
-# - Some servers will notice that the file is already compressed and
-# will leave the file alone, adding a Content-Encoding: gzip header
-# - Some servers won't notice anything at all and will take a file
-# that's already been compressed and compress it again, and set
-# the Content-Encoding: gzip header
-# By setting this to request only the identity encoding we're hoping
-# to eliminate the third case. Hopefully there does not exist a server
-# which when given a file will notice it is already compressed and that
-# you're not asking for a compressed file and will then decompress it
-# before sending because if that's the case I don't think it'll ever be
-# possible to make this work.
-HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
-
-
-def raise_for_status(resp: Response) -> None:
- http_error_msg = ""
- if isinstance(resp.reason, bytes):
- # We attempt to decode utf-8 first because some servers
- # choose to localize their reason strings. If the string
- # isn't utf-8, we fall back to iso-8859-1 for all other
- # encodings.
- try:
- reason = resp.reason.decode("utf-8")
- except UnicodeDecodeError:
- reason = resp.reason.decode("iso-8859-1")
- else:
- reason = resp.reason
-
- if 400 <= resp.status_code < 500:
- http_error_msg = (
- f"{resp.status_code} Client Error: {reason} for url: {resp.url}"
- )
-
- elif 500 <= resp.status_code < 600:
- http_error_msg = (
- f"{resp.status_code} Server Error: {reason} for url: {resp.url}"
- )
-
- if http_error_msg:
- raise NetworkConnectionError(http_error_msg, response=resp)
-
-
-def response_chunks(
- response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
-) -> Generator[bytes, None, None]:
- """Given a requests Response, provide the data chunks."""
- try:
- # Special case for urllib3.
- for chunk in response.raw.stream(
- chunk_size,
- # We use decode_content=False here because we don't
- # want urllib3 to mess with the raw bytes we get
- # from the server. If we decompress inside of
- # urllib3 then we cannot verify the checksum
- # because the checksum will be of the compressed
- # file. This breakage will only occur if the
- # server adds a Content-Encoding header, which
- # depends on how the server was configured:
- # - Some servers will notice that the file isn't a
- # compressible file and will leave the file alone
- # and with an empty Content-Encoding
- # - Some servers will notice that the file is
- # already compressed and will leave the file
- # alone and will add a Content-Encoding: gzip
- # header
- # - Some servers won't notice anything at all and
- # will take a file that's already been compressed
- # and compress it again and set the
- # Content-Encoding: gzip header
- #
- # By setting this not to decode automatically we
- # hope to eliminate problems with the second case.
- decode_content=False,
- ):
- yield chunk
- except AttributeError:
- # Standard file-like object.
- while True:
- chunk = response.raw.read(chunk_size)
- if not chunk:
- break
- yield chunk
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/network/xmlrpc.py b/venv/lib/python3.11/site-packages/pip/_internal/network/xmlrpc.py
deleted file mode 100644
index 22ec8d2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/network/xmlrpc.py
+++ /dev/null
@@ -1,62 +0,0 @@
-"""xmlrpclib.Transport implementation
-"""
-
-import logging
-import urllib.parse
-import xmlrpc.client
-from typing import TYPE_CHECKING, Tuple
-
-from pip._internal.exceptions import NetworkConnectionError
-from pip._internal.network.session import PipSession
-from pip._internal.network.utils import raise_for_status
-
-if TYPE_CHECKING:
- from xmlrpc.client import _HostType, _Marshallable
-
- from _typeshed import SizedBuffer
-
-logger = logging.getLogger(__name__)
-
-
-class PipXmlrpcTransport(xmlrpc.client.Transport):
- """Provide a `xmlrpclib.Transport` implementation via a `PipSession`
- object.
- """
-
- def __init__(
- self, index_url: str, session: PipSession, use_datetime: bool = False
- ) -> None:
- super().__init__(use_datetime)
- index_parts = urllib.parse.urlparse(index_url)
- self._scheme = index_parts.scheme
- self._session = session
-
- def request(
- self,
- host: "_HostType",
- handler: str,
- request_body: "SizedBuffer",
- verbose: bool = False,
- ) -> Tuple["_Marshallable", ...]:
- assert isinstance(host, str)
- parts = (self._scheme, host, handler, None, None, None)
- url = urllib.parse.urlunparse(parts)
- try:
- headers = {"Content-Type": "text/xml"}
- response = self._session.post(
- url,
- data=request_body,
- headers=headers,
- stream=True,
- )
- raise_for_status(response)
- self.verbose = verbose
- return self.parse_response(response.raw)
- except NetworkConnectionError as exc:
- assert exc.response
- logger.critical(
- "HTTP error %s while getting %s",
- exc.response.status_code,
- url,
- )
- raise
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 4439be7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc
deleted file mode 100644
index 5678f8f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/check.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc
deleted file mode 100644
index 2c850cd..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc
deleted file mode 100644
index 07536cf..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index aa8b9b4..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc
deleted file mode 100644
index 9fa988b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/build_tracker.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc
deleted file mode 100644
index 4f895a1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc
deleted file mode 100644
index 064dcf5..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc
deleted file mode 100644
index 6843880..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc
deleted file mode 100644
index dfe9cb2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc
deleted file mode 100644
index 8c85fb0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc
deleted file mode 100644
index 918e730..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py
deleted file mode 100644
index 3791932..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/build_tracker.py
+++ /dev/null
@@ -1,139 +0,0 @@
-import contextlib
-import hashlib
-import logging
-import os
-from types import TracebackType
-from typing import Dict, Generator, Optional, Set, Type, Union
-
-from pip._internal.models.link import Link
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-@contextlib.contextmanager
-def update_env_context_manager(**changes: str) -> Generator[None, None, None]:
- target = os.environ
-
- # Save values from the target and change them.
- non_existent_marker = object()
- saved_values: Dict[str, Union[object, str]] = {}
- for name, new_value in changes.items():
- try:
- saved_values[name] = target[name]
- except KeyError:
- saved_values[name] = non_existent_marker
- target[name] = new_value
-
- try:
- yield
- finally:
- # Restore original values in the target.
- for name, original_value in saved_values.items():
- if original_value is non_existent_marker:
- del target[name]
- else:
- assert isinstance(original_value, str) # for mypy
- target[name] = original_value
-
-
-@contextlib.contextmanager
-def get_build_tracker() -> Generator["BuildTracker", None, None]:
- root = os.environ.get("PIP_BUILD_TRACKER")
- with contextlib.ExitStack() as ctx:
- if root is None:
- root = ctx.enter_context(TempDirectory(kind="build-tracker")).path
- ctx.enter_context(update_env_context_manager(PIP_BUILD_TRACKER=root))
- logger.debug("Initialized build tracking at %s", root)
-
- with BuildTracker(root) as tracker:
- yield tracker
-
-
-class TrackerId(str):
- """Uniquely identifying string provided to the build tracker."""
-
-
-class BuildTracker:
- """Ensure that an sdist cannot request itself as a setup requirement.
-
- When an sdist is prepared, it identifies its setup requirements in the
- context of ``BuildTracker.track()``. If a requirement shows up recursively, this
- raises an exception.
-
- This stops fork bombs embedded in malicious packages."""
-
- def __init__(self, root: str) -> None:
- self._root = root
- self._entries: Dict[TrackerId, InstallRequirement] = {}
- logger.debug("Created build tracker: %s", self._root)
-
- def __enter__(self) -> "BuildTracker":
- logger.debug("Entered build tracker: %s", self._root)
- return self
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> None:
- self.cleanup()
-
- def _entry_path(self, key: TrackerId) -> str:
- hashed = hashlib.sha224(key.encode()).hexdigest()
- return os.path.join(self._root, hashed)
-
- def add(self, req: InstallRequirement, key: TrackerId) -> None:
- """Add an InstallRequirement to build tracking."""
-
- # Get the file to write information about this requirement.
- entry_path = self._entry_path(key)
-
- # Try reading from the file. If it exists and can be read from, a build
- # is already in progress, so a LookupError is raised.
- try:
- with open(entry_path) as fp:
- contents = fp.read()
- except FileNotFoundError:
- pass
- else:
- message = "{} is already being built: {}".format(req.link, contents)
- raise LookupError(message)
-
- # If we're here, req should really not be building already.
- assert key not in self._entries
-
- # Start tracking this requirement.
- with open(entry_path, "w", encoding="utf-8") as fp:
- fp.write(str(req))
- self._entries[key] = req
-
- logger.debug("Added %s to build tracker %r", req, self._root)
-
- def remove(self, req: InstallRequirement, key: TrackerId) -> None:
- """Remove an InstallRequirement from build tracking."""
-
- # Delete the created file and the corresponding entry.
- os.unlink(self._entry_path(key))
- del self._entries[key]
-
- logger.debug("Removed %s from build tracker %r", req, self._root)
-
- def cleanup(self) -> None:
- for key, req in list(self._entries.items()):
- self.remove(req, key)
-
- logger.debug("Removed build tracker: %r", self._root)
-
- @contextlib.contextmanager
- def track(self, req: InstallRequirement, key: str) -> Generator[None, None, None]:
- """Ensure that `key` cannot install itself as a setup requirement.
-
- :raises LookupError: If `key` was already provided in a parent invocation of
- the context introduced by this method."""
- tracker_id = TrackerId(key)
- self.add(req, tracker_id)
- yield
- self.remove(req, tracker_id)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py
deleted file mode 100644
index c66ac35..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""Metadata generation logic for source distributions.
-"""
-
-import os
-
-from pip._vendor.pyproject_hooks import BuildBackendHookCaller
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.exceptions import (
- InstallationSubprocessError,
- MetadataGenerationFailed,
-)
-from pip._internal.utils.subprocess import runner_with_spinner_message
-from pip._internal.utils.temp_dir import TempDirectory
-
-
-def generate_metadata(
- build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
-) -> str:
- """Generate metadata using mechanisms described in PEP 517.
-
- Returns the generated metadata directory.
- """
- metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
-
- metadata_dir = metadata_tmpdir.path
-
- with build_env:
- # Note that BuildBackendHookCaller implements a fallback for
- # prepare_metadata_for_build_wheel, so we don't have to
- # consider the possibility that this hook doesn't exist.
- runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
- with backend.subprocess_runner(runner):
- try:
- distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
- except InstallationSubprocessError as error:
- raise MetadataGenerationFailed(package_details=details) from error
-
- return os.path.join(metadata_dir, distinfo_dir)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py
deleted file mode 100644
index 27c69f0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_editable.py
+++ /dev/null
@@ -1,41 +0,0 @@
-"""Metadata generation logic for source distributions.
-"""
-
-import os
-
-from pip._vendor.pyproject_hooks import BuildBackendHookCaller
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.exceptions import (
- InstallationSubprocessError,
- MetadataGenerationFailed,
-)
-from pip._internal.utils.subprocess import runner_with_spinner_message
-from pip._internal.utils.temp_dir import TempDirectory
-
-
-def generate_editable_metadata(
- build_env: BuildEnvironment, backend: BuildBackendHookCaller, details: str
-) -> str:
- """Generate metadata using mechanisms described in PEP 660.
-
- Returns the generated metadata directory.
- """
- metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
-
- metadata_dir = metadata_tmpdir.path
-
- with build_env:
- # Note that BuildBackendHookCaller implements a fallback for
- # prepare_metadata_for_build_wheel/editable, so we don't have to
- # consider the possibility that this hook doesn't exist.
- runner = runner_with_spinner_message(
- "Preparing editable metadata (pyproject.toml)"
- )
- with backend.subprocess_runner(runner):
- try:
- distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
- except InstallationSubprocessError as error:
- raise MetadataGenerationFailed(package_details=details) from error
-
- return os.path.join(metadata_dir, distinfo_dir)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py
deleted file mode 100644
index e60988d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/metadata_legacy.py
+++ /dev/null
@@ -1,74 +0,0 @@
-"""Metadata generation logic for legacy source distributions.
-"""
-
-import logging
-import os
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.cli.spinners import open_spinner
-from pip._internal.exceptions import (
- InstallationError,
- InstallationSubprocessError,
- MetadataGenerationFailed,
-)
-from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
-from pip._internal.utils.subprocess import call_subprocess
-from pip._internal.utils.temp_dir import TempDirectory
-
-logger = logging.getLogger(__name__)
-
-
-def _find_egg_info(directory: str) -> str:
- """Find an .egg-info subdirectory in `directory`."""
- filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
-
- if not filenames:
- raise InstallationError(f"No .egg-info directory found in {directory}")
-
- if len(filenames) > 1:
- raise InstallationError(
- "More than one .egg-info directory found in {}".format(directory)
- )
-
- return os.path.join(directory, filenames[0])
-
-
-def generate_metadata(
- build_env: BuildEnvironment,
- setup_py_path: str,
- source_dir: str,
- isolated: bool,
- details: str,
-) -> str:
- """Generate metadata using setup.py-based defacto mechanisms.
-
- Returns the generated metadata directory.
- """
- logger.debug(
- "Running setup.py (path:%s) egg_info for package %s",
- setup_py_path,
- details,
- )
-
- egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
-
- args = make_setuptools_egg_info_args(
- setup_py_path,
- egg_info_dir=egg_info_dir,
- no_user_config=isolated,
- )
-
- with build_env:
- with open_spinner("Preparing metadata (setup.py)") as spinner:
- try:
- call_subprocess(
- args,
- cwd=source_dir,
- command_desc="python setup.py egg_info",
- spinner=spinner,
- )
- except InstallationSubprocessError as error:
- raise MetadataGenerationFailed(package_details=details) from error
-
- # Return the .egg-info directory.
- return _find_egg_info(egg_info_dir)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py
deleted file mode 100644
index 064811a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel.py
+++ /dev/null
@@ -1,37 +0,0 @@
-import logging
-import os
-from typing import Optional
-
-from pip._vendor.pyproject_hooks import BuildBackendHookCaller
-
-from pip._internal.utils.subprocess import runner_with_spinner_message
-
-logger = logging.getLogger(__name__)
-
-
-def build_wheel_pep517(
- name: str,
- backend: BuildBackendHookCaller,
- metadata_directory: str,
- tempd: str,
-) -> Optional[str]:
- """Build one InstallRequirement using the PEP 517 build process.
-
- Returns path to wheel if successfully built. Otherwise, returns None.
- """
- assert metadata_directory is not None
- try:
- logger.debug("Destination directory: %s", tempd)
-
- runner = runner_with_spinner_message(
- f"Building wheel for {name} (pyproject.toml)"
- )
- with backend.subprocess_runner(runner):
- wheel_name = backend.build_wheel(
- tempd,
- metadata_directory=metadata_directory,
- )
- except Exception:
- logger.error("Failed building wheel for %s", name)
- return None
- return os.path.join(tempd, wheel_name)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py
deleted file mode 100644
index 719d69d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_editable.py
+++ /dev/null
@@ -1,46 +0,0 @@
-import logging
-import os
-from typing import Optional
-
-from pip._vendor.pyproject_hooks import BuildBackendHookCaller, HookMissing
-
-from pip._internal.utils.subprocess import runner_with_spinner_message
-
-logger = logging.getLogger(__name__)
-
-
-def build_wheel_editable(
- name: str,
- backend: BuildBackendHookCaller,
- metadata_directory: str,
- tempd: str,
-) -> Optional[str]:
- """Build one InstallRequirement using the PEP 660 build process.
-
- Returns path to wheel if successfully built. Otherwise, returns None.
- """
- assert metadata_directory is not None
- try:
- logger.debug("Destination directory: %s", tempd)
-
- runner = runner_with_spinner_message(
- f"Building editable for {name} (pyproject.toml)"
- )
- with backend.subprocess_runner(runner):
- try:
- wheel_name = backend.build_editable(
- tempd,
- metadata_directory=metadata_directory,
- )
- except HookMissing as e:
- logger.error(
- "Cannot build editable %s because the build "
- "backend does not have the %s hook",
- name,
- e,
- )
- return None
- except Exception:
- logger.error("Failed building editable for %s", name)
- return None
- return os.path.join(tempd, wheel_name)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py
deleted file mode 100644
index c5f0492..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/build/wheel_legacy.py
+++ /dev/null
@@ -1,102 +0,0 @@
-import logging
-import os.path
-from typing import List, Optional
-
-from pip._internal.cli.spinners import open_spinner
-from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
-from pip._internal.utils.subprocess import call_subprocess, format_command_args
-
-logger = logging.getLogger(__name__)
-
-
-def format_command_result(
- command_args: List[str],
- command_output: str,
-) -> str:
- """Format command information for logging."""
- command_desc = format_command_args(command_args)
- text = f"Command arguments: {command_desc}\n"
-
- if not command_output:
- text += "Command output: None"
- elif logger.getEffectiveLevel() > logging.DEBUG:
- text += "Command output: [use --verbose to show]"
- else:
- if not command_output.endswith("\n"):
- command_output += "\n"
- text += f"Command output:\n{command_output}"
-
- return text
-
-
-def get_legacy_build_wheel_path(
- names: List[str],
- temp_dir: str,
- name: str,
- command_args: List[str],
- command_output: str,
-) -> Optional[str]:
- """Return the path to the wheel in the temporary build directory."""
- # Sort for determinism.
- names = sorted(names)
- if not names:
- msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
- msg += format_command_result(command_args, command_output)
- logger.warning(msg)
- return None
-
- if len(names) > 1:
- msg = (
- "Legacy build of wheel for {!r} created more than one file.\n"
- "Filenames (choosing first): {}\n"
- ).format(name, names)
- msg += format_command_result(command_args, command_output)
- logger.warning(msg)
-
- return os.path.join(temp_dir, names[0])
-
-
-def build_wheel_legacy(
- name: str,
- setup_py_path: str,
- source_dir: str,
- global_options: List[str],
- build_options: List[str],
- tempd: str,
-) -> Optional[str]:
- """Build one unpacked package using the "legacy" build process.
-
- Returns path to wheel if successfully built. Otherwise, returns None.
- """
- wheel_args = make_setuptools_bdist_wheel_args(
- setup_py_path,
- global_options=global_options,
- build_options=build_options,
- destination_dir=tempd,
- )
-
- spin_message = f"Building wheel for {name} (setup.py)"
- with open_spinner(spin_message) as spinner:
- logger.debug("Destination directory: %s", tempd)
-
- try:
- output = call_subprocess(
- wheel_args,
- command_desc="python setup.py bdist_wheel",
- cwd=source_dir,
- spinner=spinner,
- )
- except Exception:
- spinner.finish("error")
- logger.error("Failed building wheel for %s", name)
- return None
-
- names = os.listdir(tempd)
- wheel_path = get_legacy_build_wheel_path(
- names=names,
- temp_dir=tempd,
- name=name,
- command_args=wheel_args,
- command_output=output,
- )
- return wheel_path
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/check.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/check.py
deleted file mode 100644
index 90c6a58..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/check.py
+++ /dev/null
@@ -1,187 +0,0 @@
-"""Validation of dependencies of packages
-"""
-
-import logging
-from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
-
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.specifiers import LegacySpecifier
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.packaging.version import LegacyVersion
-
-from pip._internal.distributions import make_distribution_for_install_requirement
-from pip._internal.metadata import get_default_environment
-from pip._internal.metadata.base import DistributionVersion
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.deprecation import deprecated
-
-logger = logging.getLogger(__name__)
-
-
-class PackageDetails(NamedTuple):
- version: DistributionVersion
- dependencies: List[Requirement]
-
-
-# Shorthands
-PackageSet = Dict[NormalizedName, PackageDetails]
-Missing = Tuple[NormalizedName, Requirement]
-Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
-
-MissingDict = Dict[NormalizedName, List[Missing]]
-ConflictingDict = Dict[NormalizedName, List[Conflicting]]
-CheckResult = Tuple[MissingDict, ConflictingDict]
-ConflictDetails = Tuple[PackageSet, CheckResult]
-
-
-def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
- """Converts a list of distributions into a PackageSet."""
- package_set = {}
- problems = False
- env = get_default_environment()
- for dist in env.iter_installed_distributions(local_only=False, skip=()):
- name = dist.canonical_name
- try:
- dependencies = list(dist.iter_dependencies())
- package_set[name] = PackageDetails(dist.version, dependencies)
- except (OSError, ValueError) as e:
- # Don't crash on unreadable or broken metadata.
- logger.warning("Error parsing requirements for %s: %s", name, e)
- problems = True
- return package_set, problems
-
-
-def check_package_set(
- package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
-) -> CheckResult:
- """Check if a package set is consistent
-
- If should_ignore is passed, it should be a callable that takes a
- package name and returns a boolean.
- """
-
- warn_legacy_versions_and_specifiers(package_set)
-
- missing = {}
- conflicting = {}
-
- for package_name, package_detail in package_set.items():
- # Info about dependencies of package_name
- missing_deps: Set[Missing] = set()
- conflicting_deps: Set[Conflicting] = set()
-
- if should_ignore and should_ignore(package_name):
- continue
-
- for req in package_detail.dependencies:
- name = canonicalize_name(req.name)
-
- # Check if it's missing
- if name not in package_set:
- missed = True
- if req.marker is not None:
- missed = req.marker.evaluate({"extra": ""})
- if missed:
- missing_deps.add((name, req))
- continue
-
- # Check if there's a conflict
- version = package_set[name].version
- if not req.specifier.contains(version, prereleases=True):
- conflicting_deps.add((name, version, req))
-
- if missing_deps:
- missing[package_name] = sorted(missing_deps, key=str)
- if conflicting_deps:
- conflicting[package_name] = sorted(conflicting_deps, key=str)
-
- return missing, conflicting
-
-
-def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
- """For checking if the dependency graph would be consistent after \
- installing given requirements
- """
- # Start from the current state
- package_set, _ = create_package_set_from_installed()
- # Install packages
- would_be_installed = _simulate_installation_of(to_install, package_set)
-
- # Only warn about directly-dependent packages; create a whitelist of them
- whitelist = _create_whitelist(would_be_installed, package_set)
-
- return (
- package_set,
- check_package_set(
- package_set, should_ignore=lambda name: name not in whitelist
- ),
- )
-
-
-def _simulate_installation_of(
- to_install: List[InstallRequirement], package_set: PackageSet
-) -> Set[NormalizedName]:
- """Computes the version of packages after installing to_install."""
- # Keep track of packages that were installed
- installed = set()
-
- # Modify it as installing requirement_set would (assuming no errors)
- for inst_req in to_install:
- abstract_dist = make_distribution_for_install_requirement(inst_req)
- dist = abstract_dist.get_metadata_distribution()
- name = dist.canonical_name
- package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
-
- installed.add(name)
-
- return installed
-
-
-def _create_whitelist(
- would_be_installed: Set[NormalizedName], package_set: PackageSet
-) -> Set[NormalizedName]:
- packages_affected = set(would_be_installed)
-
- for package_name in package_set:
- if package_name in packages_affected:
- continue
-
- for req in package_set[package_name].dependencies:
- if canonicalize_name(req.name) in packages_affected:
- packages_affected.add(package_name)
- break
-
- return packages_affected
-
-
-def warn_legacy_versions_and_specifiers(package_set: PackageSet) -> None:
- for project_name, package_details in package_set.items():
- if isinstance(package_details.version, LegacyVersion):
- deprecated(
- reason=(
- f"{project_name} {package_details.version} "
- f"has a non-standard version number."
- ),
- replacement=(
- f"to upgrade to a newer version of {project_name} "
- f"or contact the author to suggest that they "
- f"release a version with a conforming version number"
- ),
- issue=12063,
- gone_in="24.1",
- )
- for dep in package_details.dependencies:
- if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
- deprecated(
- reason=(
- f"{project_name} {package_details.version} "
- f"has a non-standard dependency specifier {dep}."
- ),
- replacement=(
- f"to upgrade to a newer version of {project_name} "
- f"or contact the author to suggest that they "
- f"release a version with a conforming dependency specifiers"
- ),
- issue=12063,
- gone_in="24.1",
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/freeze.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/freeze.py
deleted file mode 100644
index 3544568..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/freeze.py
+++ /dev/null
@@ -1,255 +0,0 @@
-import collections
-import logging
-import os
-from typing import Container, Dict, Generator, Iterable, List, NamedTuple, Optional, Set
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import Version
-
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.metadata import BaseDistribution, get_environment
-from pip._internal.req.constructors import (
- install_req_from_editable,
- install_req_from_line,
-)
-from pip._internal.req.req_file import COMMENT_RE
-from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
-
-logger = logging.getLogger(__name__)
-
-
-class _EditableInfo(NamedTuple):
- requirement: str
- comments: List[str]
-
-
-def freeze(
- requirement: Optional[List[str]] = None,
- local_only: bool = False,
- user_only: bool = False,
- paths: Optional[List[str]] = None,
- isolated: bool = False,
- exclude_editable: bool = False,
- skip: Container[str] = (),
-) -> Generator[str, None, None]:
- installations: Dict[str, FrozenRequirement] = {}
-
- dists = get_environment(paths).iter_installed_distributions(
- local_only=local_only,
- skip=(),
- user_only=user_only,
- )
- for dist in dists:
- req = FrozenRequirement.from_dist(dist)
- if exclude_editable and req.editable:
- continue
- installations[req.canonical_name] = req
-
- if requirement:
- # the options that don't get turned into an InstallRequirement
- # should only be emitted once, even if the same option is in multiple
- # requirements files, so we need to keep track of what has been emitted
- # so that we don't emit it again if it's seen again
- emitted_options: Set[str] = set()
- # keep track of which files a requirement is in so that we can
- # give an accurate warning if a requirement appears multiple times.
- req_files: Dict[str, List[str]] = collections.defaultdict(list)
- for req_file_path in requirement:
- with open(req_file_path) as req_file:
- for line in req_file:
- if (
- not line.strip()
- or line.strip().startswith("#")
- or line.startswith(
- (
- "-r",
- "--requirement",
- "-f",
- "--find-links",
- "-i",
- "--index-url",
- "--pre",
- "--trusted-host",
- "--process-dependency-links",
- "--extra-index-url",
- "--use-feature",
- )
- )
- ):
- line = line.rstrip()
- if line not in emitted_options:
- emitted_options.add(line)
- yield line
- continue
-
- if line.startswith("-e") or line.startswith("--editable"):
- if line.startswith("-e"):
- line = line[2:].strip()
- else:
- line = line[len("--editable") :].strip().lstrip("=")
- line_req = install_req_from_editable(
- line,
- isolated=isolated,
- )
- else:
- line_req = install_req_from_line(
- COMMENT_RE.sub("", line).strip(),
- isolated=isolated,
- )
-
- if not line_req.name:
- logger.info(
- "Skipping line in requirement file [%s] because "
- "it's not clear what it would install: %s",
- req_file_path,
- line.strip(),
- )
- logger.info(
- " (add #egg=PackageName to the URL to avoid"
- " this warning)"
- )
- else:
- line_req_canonical_name = canonicalize_name(line_req.name)
- if line_req_canonical_name not in installations:
- # either it's not installed, or it is installed
- # but has been processed already
- if not req_files[line_req.name]:
- logger.warning(
- "Requirement file [%s] contains %s, but "
- "package %r is not installed",
- req_file_path,
- COMMENT_RE.sub("", line).strip(),
- line_req.name,
- )
- else:
- req_files[line_req.name].append(req_file_path)
- else:
- yield str(installations[line_req_canonical_name]).rstrip()
- del installations[line_req_canonical_name]
- req_files[line_req.name].append(req_file_path)
-
- # Warn about requirements that were included multiple times (in a
- # single requirements file or in different requirements files).
- for name, files in req_files.items():
- if len(files) > 1:
- logger.warning(
- "Requirement %s included multiple times [%s]",
- name,
- ", ".join(sorted(set(files))),
- )
-
- yield ("## The following requirements were added by pip freeze:")
- for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
- if installation.canonical_name not in skip:
- yield str(installation).rstrip()
-
-
-def _format_as_name_version(dist: BaseDistribution) -> str:
- dist_version = dist.version
- if isinstance(dist_version, Version):
- return f"{dist.raw_name}=={dist_version}"
- return f"{dist.raw_name}==={dist_version}"
-
-
-def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
- """
- Compute and return values (req, comments) for use in
- FrozenRequirement.from_dist().
- """
- editable_project_location = dist.editable_project_location
- assert editable_project_location
- location = os.path.normcase(os.path.abspath(editable_project_location))
-
- from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
-
- vcs_backend = vcs.get_backend_for_dir(location)
-
- if vcs_backend is None:
- display = _format_as_name_version(dist)
- logger.debug(
- 'No VCS found for editable requirement "%s" in: %r',
- display,
- location,
- )
- return _EditableInfo(
- requirement=location,
- comments=[f"# Editable install with no version control ({display})"],
- )
-
- vcs_name = type(vcs_backend).__name__
-
- try:
- req = vcs_backend.get_src_requirement(location, dist.raw_name)
- except RemoteNotFoundError:
- display = _format_as_name_version(dist)
- return _EditableInfo(
- requirement=location,
- comments=[f"# Editable {vcs_name} install with no remote ({display})"],
- )
- except RemoteNotValidError as ex:
- display = _format_as_name_version(dist)
- return _EditableInfo(
- requirement=location,
- comments=[
- f"# Editable {vcs_name} install ({display}) with either a deleted "
- f"local remote or invalid URI:",
- f"# '{ex.url}'",
- ],
- )
- except BadCommand:
- logger.warning(
- "cannot determine version of editable source in %s "
- "(%s command not found in path)",
- location,
- vcs_backend.name,
- )
- return _EditableInfo(requirement=location, comments=[])
- except InstallationError as exc:
- logger.warning("Error when trying to get requirement for VCS system %s", exc)
- else:
- return _EditableInfo(requirement=req, comments=[])
-
- logger.warning("Could not determine repository location of %s", location)
-
- return _EditableInfo(
- requirement=location,
- comments=["## !! Could not determine repository location"],
- )
-
-
-class FrozenRequirement:
- def __init__(
- self,
- name: str,
- req: str,
- editable: bool,
- comments: Iterable[str] = (),
- ) -> None:
- self.name = name
- self.canonical_name = canonicalize_name(name)
- self.req = req
- self.editable = editable
- self.comments = comments
-
- @classmethod
- def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
- editable = dist.editable
- if editable:
- req, comments = _get_editable_info(dist)
- else:
- comments = []
- direct_url = dist.direct_url
- if direct_url:
- # if PEP 610 metadata is present, use it
- req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
- else:
- # name==version requirement
- req = _format_as_name_version(dist)
-
- return cls(dist.raw_name, req, editable, comments=comments)
-
- def __str__(self) -> str:
- req = self.req
- if self.editable:
- req = f"-e {req}"
- return "\n".join(list(self.comments) + [str(req)]) + "\n"
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py
deleted file mode 100644
index 24d6a5d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-"""For modules related to installing packages.
-"""
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index b8a5965..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc
deleted file mode 100644
index f62cb29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc
deleted file mode 100644
index 88b5111..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/editable_legacy.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/install/editable_legacy.py
deleted file mode 100644
index bebe24e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/editable_legacy.py
+++ /dev/null
@@ -1,46 +0,0 @@
-"""Legacy editable installation process, i.e. `setup.py develop`.
-"""
-import logging
-from typing import Optional, Sequence
-
-from pip._internal.build_env import BuildEnvironment
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.setuptools_build import make_setuptools_develop_args
-from pip._internal.utils.subprocess import call_subprocess
-
-logger = logging.getLogger(__name__)
-
-
-def install_editable(
- *,
- global_options: Sequence[str],
- prefix: Optional[str],
- home: Optional[str],
- use_user_site: bool,
- name: str,
- setup_py_path: str,
- isolated: bool,
- build_env: BuildEnvironment,
- unpacked_source_directory: str,
-) -> None:
- """Install a package in editable mode. Most arguments are pass-through
- to setuptools.
- """
- logger.info("Running setup.py develop for %s", name)
-
- args = make_setuptools_develop_args(
- setup_py_path,
- global_options=global_options,
- no_user_config=isolated,
- prefix=prefix,
- home=home,
- use_user_site=use_user_site,
- )
-
- with indent_log():
- with build_env:
- call_subprocess(
- args,
- command_desc="python setup.py develop",
- cwd=unpacked_source_directory,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/install/wheel.py
deleted file mode 100644
index f67180c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/install/wheel.py
+++ /dev/null
@@ -1,734 +0,0 @@
-"""Support for installing and building the "wheel" binary package format.
-"""
-
-import collections
-import compileall
-import contextlib
-import csv
-import importlib
-import logging
-import os.path
-import re
-import shutil
-import sys
-import warnings
-from base64 import urlsafe_b64encode
-from email.message import Message
-from itertools import chain, filterfalse, starmap
-from typing import (
- IO,
- TYPE_CHECKING,
- Any,
- BinaryIO,
- Callable,
- Dict,
- Generator,
- Iterable,
- Iterator,
- List,
- NewType,
- Optional,
- Sequence,
- Set,
- Tuple,
- Union,
- cast,
-)
-from zipfile import ZipFile, ZipInfo
-
-from pip._vendor.distlib.scripts import ScriptMaker
-from pip._vendor.distlib.util import get_export_entry
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.locations import get_major_minor_version
-from pip._internal.metadata import (
- BaseDistribution,
- FilesystemWheel,
- get_wheel_distribution,
-)
-from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
-from pip._internal.models.scheme import SCHEME_KEYS, Scheme
-from pip._internal.utils.filesystem import adjacent_tmp_file, replace
-from pip._internal.utils.misc import captured_stdout, ensure_dir, hash_file, partition
-from pip._internal.utils.unpacking import (
- current_umask,
- is_within_directory,
- set_extracted_file_to_default_mode_plus_executable,
- zip_item_is_executable,
-)
-from pip._internal.utils.wheel import parse_wheel
-
-if TYPE_CHECKING:
- from typing import Protocol
-
- class File(Protocol):
- src_record_path: "RecordPath"
- dest_path: str
- changed: bool
-
- def save(self) -> None:
- pass
-
-
-logger = logging.getLogger(__name__)
-
-RecordPath = NewType("RecordPath", str)
-InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
-
-
-def rehash(path: str, blocksize: int = 1 << 20) -> Tuple[str, str]:
- """Return (encoded_digest, length) for path using hashlib.sha256()"""
- h, length = hash_file(path, blocksize)
- digest = "sha256=" + urlsafe_b64encode(h.digest()).decode("latin1").rstrip("=")
- return (digest, str(length))
-
-
-def csv_io_kwargs(mode: str) -> Dict[str, Any]:
- """Return keyword arguments to properly open a CSV file
- in the given mode.
- """
- return {"mode": mode, "newline": "", "encoding": "utf-8"}
-
-
-def fix_script(path: str) -> bool:
- """Replace #!python with #!/path/to/python
- Return True if file was changed.
- """
- # XXX RECORD hashes will need to be updated
- assert os.path.isfile(path)
-
- with open(path, "rb") as script:
- firstline = script.readline()
- if not firstline.startswith(b"#!python"):
- return False
- exename = sys.executable.encode(sys.getfilesystemencoding())
- firstline = b"#!" + exename + os.linesep.encode("ascii")
- rest = script.read()
- with open(path, "wb") as script:
- script.write(firstline)
- script.write(rest)
- return True
-
-
-def wheel_root_is_purelib(metadata: Message) -> bool:
- return metadata.get("Root-Is-Purelib", "").lower() == "true"
-
-
-def get_entrypoints(dist: BaseDistribution) -> Tuple[Dict[str, str], Dict[str, str]]:
- console_scripts = {}
- gui_scripts = {}
- for entry_point in dist.iter_entry_points():
- if entry_point.group == "console_scripts":
- console_scripts[entry_point.name] = entry_point.value
- elif entry_point.group == "gui_scripts":
- gui_scripts[entry_point.name] = entry_point.value
- return console_scripts, gui_scripts
-
-
-def message_about_scripts_not_on_PATH(scripts: Sequence[str]) -> Optional[str]:
- """Determine if any scripts are not on PATH and format a warning.
- Returns a warning message if one or more scripts are not on PATH,
- otherwise None.
- """
- if not scripts:
- return None
-
- # Group scripts by the path they were installed in
- grouped_by_dir: Dict[str, Set[str]] = collections.defaultdict(set)
- for destfile in scripts:
- parent_dir = os.path.dirname(destfile)
- script_name = os.path.basename(destfile)
- grouped_by_dir[parent_dir].add(script_name)
-
- # We don't want to warn for directories that are on PATH.
- not_warn_dirs = [
- os.path.normcase(os.path.normpath(i)).rstrip(os.sep)
- for i in os.environ.get("PATH", "").split(os.pathsep)
- ]
- # If an executable sits with sys.executable, we don't warn for it.
- # This covers the case of venv invocations without activating the venv.
- not_warn_dirs.append(
- os.path.normcase(os.path.normpath(os.path.dirname(sys.executable)))
- )
- warn_for: Dict[str, Set[str]] = {
- parent_dir: scripts
- for parent_dir, scripts in grouped_by_dir.items()
- if os.path.normcase(os.path.normpath(parent_dir)) not in not_warn_dirs
- }
- if not warn_for:
- return None
-
- # Format a message
- msg_lines = []
- for parent_dir, dir_scripts in warn_for.items():
- sorted_scripts: List[str] = sorted(dir_scripts)
- if len(sorted_scripts) == 1:
- start_text = f"script {sorted_scripts[0]} is"
- else:
- start_text = "scripts {} are".format(
- ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
- )
-
- msg_lines.append(
- f"The {start_text} installed in '{parent_dir}' which is not on PATH."
- )
-
- last_line_fmt = (
- "Consider adding {} to PATH or, if you prefer "
- "to suppress this warning, use --no-warn-script-location."
- )
- if len(msg_lines) == 1:
- msg_lines.append(last_line_fmt.format("this directory"))
- else:
- msg_lines.append(last_line_fmt.format("these directories"))
-
- # Add a note if any directory starts with ~
- warn_for_tilde = any(
- i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
- )
- if warn_for_tilde:
- tilde_warning_msg = (
- "NOTE: The current PATH contains path(s) starting with `~`, "
- "which may not be expanded by all applications."
- )
- msg_lines.append(tilde_warning_msg)
-
- # Returns the formatted multiline message
- return "\n".join(msg_lines)
-
-
-def _normalized_outrows(
- outrows: Iterable[InstalledCSVRow],
-) -> List[Tuple[str, str, str]]:
- """Normalize the given rows of a RECORD file.
-
- Items in each row are converted into str. Rows are then sorted to make
- the value more predictable for tests.
-
- Each row is a 3-tuple (path, hash, size) and corresponds to a record of
- a RECORD file (see PEP 376 and PEP 427 for details). For the rows
- passed to this function, the size can be an integer as an int or string,
- or the empty string.
- """
- # Normally, there should only be one row per path, in which case the
- # second and third elements don't come into play when sorting.
- # However, in cases in the wild where a path might happen to occur twice,
- # we don't want the sort operation to trigger an error (but still want
- # determinism). Since the third element can be an int or string, we
- # coerce each element to a string to avoid a TypeError in this case.
- # For additional background, see--
- # https://github.com/pypa/pip/issues/5868
- return sorted(
- (record_path, hash_, str(size)) for record_path, hash_, size in outrows
- )
-
-
-def _record_to_fs_path(record_path: RecordPath, lib_dir: str) -> str:
- return os.path.join(lib_dir, record_path)
-
-
-def _fs_to_record_path(path: str, lib_dir: str) -> RecordPath:
- # On Windows, do not handle relative paths if they belong to different
- # logical disks
- if os.path.splitdrive(path)[0].lower() == os.path.splitdrive(lib_dir)[0].lower():
- path = os.path.relpath(path, lib_dir)
-
- path = path.replace(os.path.sep, "/")
- return cast("RecordPath", path)
-
-
-def get_csv_rows_for_installed(
- old_csv_rows: List[List[str]],
- installed: Dict[RecordPath, RecordPath],
- changed: Set[RecordPath],
- generated: List[str],
- lib_dir: str,
-) -> List[InstalledCSVRow]:
- """
- :param installed: A map from archive RECORD path to installation RECORD
- path.
- """
- installed_rows: List[InstalledCSVRow] = []
- for row in old_csv_rows:
- if len(row) > 3:
- logger.warning("RECORD line has more than three elements: %s", row)
- old_record_path = cast("RecordPath", row[0])
- new_record_path = installed.pop(old_record_path, old_record_path)
- if new_record_path in changed:
- digest, length = rehash(_record_to_fs_path(new_record_path, lib_dir))
- else:
- digest = row[1] if len(row) > 1 else ""
- length = row[2] if len(row) > 2 else ""
- installed_rows.append((new_record_path, digest, length))
- for f in generated:
- path = _fs_to_record_path(f, lib_dir)
- digest, length = rehash(f)
- installed_rows.append((path, digest, length))
- return installed_rows + [
- (installed_record_path, "", "") for installed_record_path in installed.values()
- ]
-
-
-def get_console_script_specs(console: Dict[str, str]) -> List[str]:
- """
- Given the mapping from entrypoint name to callable, return the relevant
- console script specs.
- """
- # Don't mutate caller's version
- console = console.copy()
-
- scripts_to_generate = []
-
- # Special case pip and setuptools to generate versioned wrappers
- #
- # The issue is that some projects (specifically, pip and setuptools) use
- # code in setup.py to create "versioned" entry points - pip2.7 on Python
- # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
- # the wheel metadata at build time, and so if the wheel is installed with
- # a *different* version of Python the entry points will be wrong. The
- # correct fix for this is to enhance the metadata to be able to describe
- # such versioned entry points, but that won't happen till Metadata 2.0 is
- # available.
- # In the meantime, projects using versioned entry points will either have
- # incorrect versioned entry points, or they will not be able to distribute
- # "universal" wheels (i.e., they will need a wheel per Python version).
- #
- # Because setuptools and pip are bundled with _ensurepip and virtualenv,
- # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
- # override the versioned entry points in the wheel and generate the
- # correct ones. This code is purely a short-term measure until Metadata 2.0
- # is available.
- #
- # To add the level of hack in this section of code, in order to support
- # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
- # variable which will control which version scripts get installed.
- #
- # ENSUREPIP_OPTIONS=altinstall
- # - Only pipX.Y and easy_install-X.Y will be generated and installed
- # ENSUREPIP_OPTIONS=install
- # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
- # that this option is technically if ENSUREPIP_OPTIONS is set and is
- # not altinstall
- # DEFAULT
- # - The default behavior is to install pip, pipX, pipX.Y, easy_install
- # and easy_install-X.Y.
- pip_script = console.pop("pip", None)
- if pip_script:
- if "ENSUREPIP_OPTIONS" not in os.environ:
- scripts_to_generate.append("pip = " + pip_script)
-
- if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
- scripts_to_generate.append(f"pip{sys.version_info[0]} = {pip_script}")
-
- scripts_to_generate.append(f"pip{get_major_minor_version()} = {pip_script}")
- # Delete any other versioned pip entry points
- pip_ep = [k for k in console if re.match(r"pip(\d+(\.\d+)?)?$", k)]
- for k in pip_ep:
- del console[k]
- easy_install_script = console.pop("easy_install", None)
- if easy_install_script:
- if "ENSUREPIP_OPTIONS" not in os.environ:
- scripts_to_generate.append("easy_install = " + easy_install_script)
-
- scripts_to_generate.append(
- f"easy_install-{get_major_minor_version()} = {easy_install_script}"
- )
- # Delete any other versioned easy_install entry points
- easy_install_ep = [
- k for k in console if re.match(r"easy_install(-\d+\.\d+)?$", k)
- ]
- for k in easy_install_ep:
- del console[k]
-
- # Generate the console entry points specified in the wheel
- scripts_to_generate.extend(starmap("{} = {}".format, console.items()))
-
- return scripts_to_generate
-
-
-class ZipBackedFile:
- def __init__(
- self, src_record_path: RecordPath, dest_path: str, zip_file: ZipFile
- ) -> None:
- self.src_record_path = src_record_path
- self.dest_path = dest_path
- self._zip_file = zip_file
- self.changed = False
-
- def _getinfo(self) -> ZipInfo:
- return self._zip_file.getinfo(self.src_record_path)
-
- def save(self) -> None:
- # directory creation is lazy and after file filtering
- # to ensure we don't install empty dirs; empty dirs can't be
- # uninstalled.
- parent_dir = os.path.dirname(self.dest_path)
- ensure_dir(parent_dir)
-
- # When we open the output file below, any existing file is truncated
- # before we start writing the new contents. This is fine in most
- # cases, but can cause a segfault if pip has loaded a shared
- # object (e.g. from pyopenssl through its vendored urllib3)
- # Since the shared object is mmap'd an attempt to call a
- # symbol in it will then cause a segfault. Unlinking the file
- # allows writing of new contents while allowing the process to
- # continue to use the old copy.
- if os.path.exists(self.dest_path):
- os.unlink(self.dest_path)
-
- zipinfo = self._getinfo()
-
- with self._zip_file.open(zipinfo) as f:
- with open(self.dest_path, "wb") as dest:
- shutil.copyfileobj(f, dest)
-
- if zip_item_is_executable(zipinfo):
- set_extracted_file_to_default_mode_plus_executable(self.dest_path)
-
-
-class ScriptFile:
- def __init__(self, file: "File") -> None:
- self._file = file
- self.src_record_path = self._file.src_record_path
- self.dest_path = self._file.dest_path
- self.changed = False
-
- def save(self) -> None:
- self._file.save()
- self.changed = fix_script(self.dest_path)
-
-
-class MissingCallableSuffix(InstallationError):
- def __init__(self, entry_point: str) -> None:
- super().__init__(
- f"Invalid script entry point: {entry_point} - A callable "
- "suffix is required. Cf https://packaging.python.org/"
- "specifications/entry-points/#use-for-scripts for more "
- "information."
- )
-
-
-def _raise_for_invalid_entrypoint(specification: str) -> None:
- entry = get_export_entry(specification)
- if entry is not None and entry.suffix is None:
- raise MissingCallableSuffix(str(entry))
-
-
-class PipScriptMaker(ScriptMaker):
- def make(
- self, specification: str, options: Optional[Dict[str, Any]] = None
- ) -> List[str]:
- _raise_for_invalid_entrypoint(specification)
- return super().make(specification, options)
-
-
-def _install_wheel(
- name: str,
- wheel_zip: ZipFile,
- wheel_path: str,
- scheme: Scheme,
- pycompile: bool = True,
- warn_script_location: bool = True,
- direct_url: Optional[DirectUrl] = None,
- requested: bool = False,
-) -> None:
- """Install a wheel.
-
- :param name: Name of the project to install
- :param wheel_zip: open ZipFile for wheel being installed
- :param scheme: Distutils scheme dictating the install directories
- :param req_description: String used in place of the requirement, for
- logging
- :param pycompile: Whether to byte-compile installed Python files
- :param warn_script_location: Whether to check that scripts are installed
- into a directory on PATH
- :raises UnsupportedWheel:
- * when the directory holds an unpacked wheel with incompatible
- Wheel-Version
- * when the .dist-info dir does not match the wheel
- """
- info_dir, metadata = parse_wheel(wheel_zip, name)
-
- if wheel_root_is_purelib(metadata):
- lib_dir = scheme.purelib
- else:
- lib_dir = scheme.platlib
-
- # Record details of the files moved
- # installed = files copied from the wheel to the destination
- # changed = files changed while installing (scripts #! line typically)
- # generated = files newly generated during the install (script wrappers)
- installed: Dict[RecordPath, RecordPath] = {}
- changed: Set[RecordPath] = set()
- generated: List[str] = []
-
- def record_installed(
- srcfile: RecordPath, destfile: str, modified: bool = False
- ) -> None:
- """Map archive RECORD paths to installation RECORD paths."""
- newpath = _fs_to_record_path(destfile, lib_dir)
- installed[srcfile] = newpath
- if modified:
- changed.add(newpath)
-
- def is_dir_path(path: RecordPath) -> bool:
- return path.endswith("/")
-
- def assert_no_path_traversal(dest_dir_path: str, target_path: str) -> None:
- if not is_within_directory(dest_dir_path, target_path):
- message = (
- "The wheel {!r} has a file {!r} trying to install"
- " outside the target directory {!r}"
- )
- raise InstallationError(
- message.format(wheel_path, target_path, dest_dir_path)
- )
-
- def root_scheme_file_maker(
- zip_file: ZipFile, dest: str
- ) -> Callable[[RecordPath], "File"]:
- def make_root_scheme_file(record_path: RecordPath) -> "File":
- normed_path = os.path.normpath(record_path)
- dest_path = os.path.join(dest, normed_path)
- assert_no_path_traversal(dest, dest_path)
- return ZipBackedFile(record_path, dest_path, zip_file)
-
- return make_root_scheme_file
-
- def data_scheme_file_maker(
- zip_file: ZipFile, scheme: Scheme
- ) -> Callable[[RecordPath], "File"]:
- scheme_paths = {key: getattr(scheme, key) for key in SCHEME_KEYS}
-
- def make_data_scheme_file(record_path: RecordPath) -> "File":
- normed_path = os.path.normpath(record_path)
- try:
- _, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
- except ValueError:
- message = (
- "Unexpected file in {}: {!r}. .data directory contents"
- " should be named like: '<scheme key>/<path>'."
- ).format(wheel_path, record_path)
- raise InstallationError(message)
-
- try:
- scheme_path = scheme_paths[scheme_key]
- except KeyError:
- valid_scheme_keys = ", ".join(sorted(scheme_paths))
- message = (
- "Unknown scheme key used in {}: {} (for file {!r}). .data"
- " directory contents should be in subdirectories named"
- " with a valid scheme key ({})"
- ).format(wheel_path, scheme_key, record_path, valid_scheme_keys)
- raise InstallationError(message)
-
- dest_path = os.path.join(scheme_path, dest_subpath)
- assert_no_path_traversal(scheme_path, dest_path)
- return ZipBackedFile(record_path, dest_path, zip_file)
-
- return make_data_scheme_file
-
- def is_data_scheme_path(path: RecordPath) -> bool:
- return path.split("/", 1)[0].endswith(".data")
-
- paths = cast(List[RecordPath], wheel_zip.namelist())
- file_paths = filterfalse(is_dir_path, paths)
- root_scheme_paths, data_scheme_paths = partition(is_data_scheme_path, file_paths)
-
- make_root_scheme_file = root_scheme_file_maker(wheel_zip, lib_dir)
- files: Iterator[File] = map(make_root_scheme_file, root_scheme_paths)
-
- def is_script_scheme_path(path: RecordPath) -> bool:
- parts = path.split("/", 2)
- return len(parts) > 2 and parts[0].endswith(".data") and parts[1] == "scripts"
-
- other_scheme_paths, script_scheme_paths = partition(
- is_script_scheme_path, data_scheme_paths
- )
-
- make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
- other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
- files = chain(files, other_scheme_files)
-
- # Get the defined entry points
- distribution = get_wheel_distribution(
- FilesystemWheel(wheel_path),
- canonicalize_name(name),
- )
- console, gui = get_entrypoints(distribution)
-
- def is_entrypoint_wrapper(file: "File") -> bool:
- # EP, EP.exe and EP-script.py are scripts generated for
- # entry point EP by setuptools
- path = file.dest_path
- name = os.path.basename(path)
- if name.lower().endswith(".exe"):
- matchname = name[:-4]
- elif name.lower().endswith("-script.py"):
- matchname = name[:-10]
- elif name.lower().endswith(".pya"):
- matchname = name[:-4]
- else:
- matchname = name
- # Ignore setuptools-generated scripts
- return matchname in console or matchname in gui
-
- script_scheme_files: Iterator[File] = map(
- make_data_scheme_file, script_scheme_paths
- )
- script_scheme_files = filterfalse(is_entrypoint_wrapper, script_scheme_files)
- script_scheme_files = map(ScriptFile, script_scheme_files)
- files = chain(files, script_scheme_files)
-
- for file in files:
- file.save()
- record_installed(file.src_record_path, file.dest_path, file.changed)
-
- def pyc_source_file_paths() -> Generator[str, None, None]:
- # We de-duplicate installation paths, since there can be overlap (e.g.
- # file in .data maps to same location as file in wheel root).
- # Sorting installation paths makes it easier to reproduce and debug
- # issues related to permissions on existing files.
- for installed_path in sorted(set(installed.values())):
- full_installed_path = os.path.join(lib_dir, installed_path)
- if not os.path.isfile(full_installed_path):
- continue
- if not full_installed_path.endswith(".py"):
- continue
- yield full_installed_path
-
- def pyc_output_path(path: str) -> str:
- """Return the path the pyc file would have been written to."""
- return importlib.util.cache_from_source(path)
-
- # Compile all of the pyc files for the installed files
- if pycompile:
- with captured_stdout() as stdout:
- with warnings.catch_warnings():
- warnings.filterwarnings("ignore")
- for path in pyc_source_file_paths():
- success = compileall.compile_file(path, force=True, quiet=True)
- if success:
- pyc_path = pyc_output_path(path)
- assert os.path.exists(pyc_path)
- pyc_record_path = cast(
- "RecordPath", pyc_path.replace(os.path.sep, "/")
- )
- record_installed(pyc_record_path, pyc_path)
- logger.debug(stdout.getvalue())
-
- maker = PipScriptMaker(None, scheme.scripts)
-
- # Ensure old scripts are overwritten.
- # See https://github.com/pypa/pip/issues/1800
- maker.clobber = True
-
- # Ensure we don't generate any variants for scripts because this is almost
- # never what somebody wants.
- # See https://bitbucket.org/pypa/distlib/issue/35/
- maker.variants = {""}
-
- # This is required because otherwise distlib creates scripts that are not
- # executable.
- # See https://bitbucket.org/pypa/distlib/issue/32/
- maker.set_mode = True
-
- # Generate the console and GUI entry points specified in the wheel
- scripts_to_generate = get_console_script_specs(console)
-
- gui_scripts_to_generate = list(starmap("{} = {}".format, gui.items()))
-
- generated_console_scripts = maker.make_multiple(scripts_to_generate)
- generated.extend(generated_console_scripts)
-
- generated.extend(maker.make_multiple(gui_scripts_to_generate, {"gui": True}))
-
- if warn_script_location:
- msg = message_about_scripts_not_on_PATH(generated_console_scripts)
- if msg is not None:
- logger.warning(msg)
-
- generated_file_mode = 0o666 & ~current_umask()
-
- @contextlib.contextmanager
- def _generate_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
- with adjacent_tmp_file(path, **kwargs) as f:
- yield f
- os.chmod(f.name, generated_file_mode)
- replace(f.name, path)
-
- dest_info_dir = os.path.join(lib_dir, info_dir)
-
- # Record pip as the installer
- installer_path = os.path.join(dest_info_dir, "INSTALLER")
- with _generate_file(installer_path) as installer_file:
- installer_file.write(b"pip\n")
- generated.append(installer_path)
-
- # Record the PEP 610 direct URL reference
- if direct_url is not None:
- direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
- with _generate_file(direct_url_path) as direct_url_file:
- direct_url_file.write(direct_url.to_json().encode("utf-8"))
- generated.append(direct_url_path)
-
- # Record the REQUESTED file
- if requested:
- requested_path = os.path.join(dest_info_dir, "REQUESTED")
- with open(requested_path, "wb"):
- pass
- generated.append(requested_path)
-
- record_text = distribution.read_text("RECORD")
- record_rows = list(csv.reader(record_text.splitlines()))
-
- rows = get_csv_rows_for_installed(
- record_rows,
- installed=installed,
- changed=changed,
- generated=generated,
- lib_dir=lib_dir,
- )
-
- # Record details of all files installed
- record_path = os.path.join(dest_info_dir, "RECORD")
-
- with _generate_file(record_path, **csv_io_kwargs("w")) as record_file:
- # Explicitly cast to typing.IO[str] as a workaround for the mypy error:
- # "writer" has incompatible type "BinaryIO"; expected "_Writer"
- writer = csv.writer(cast("IO[str]", record_file))
- writer.writerows(_normalized_outrows(rows))
-
-
-@contextlib.contextmanager
-def req_error_context(req_description: str) -> Generator[None, None, None]:
- try:
- yield
- except InstallationError as e:
- message = f"For req: {req_description}. {e.args[0]}"
- raise InstallationError(message) from e
-
-
-def install_wheel(
- name: str,
- wheel_path: str,
- scheme: Scheme,
- req_description: str,
- pycompile: bool = True,
- warn_script_location: bool = True,
- direct_url: Optional[DirectUrl] = None,
- requested: bool = False,
-) -> None:
- with ZipFile(wheel_path, allowZip64=True) as z:
- with req_error_context(req_description):
- _install_wheel(
- name=name,
- wheel_zip=z,
- wheel_path=wheel_path,
- scheme=scheme,
- pycompile=pycompile,
- warn_script_location=warn_script_location,
- direct_url=direct_url,
- requested=requested,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/operations/prepare.py b/venv/lib/python3.11/site-packages/pip/_internal/operations/prepare.py
deleted file mode 100644
index 956717d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/operations/prepare.py
+++ /dev/null
@@ -1,730 +0,0 @@
-"""Prepares a distribution for installation
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-import mimetypes
-import os
-import shutil
-from pathlib import Path
-from typing import Dict, Iterable, List, Optional
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.distributions import make_distribution_for_install_requirement
-from pip._internal.distributions.installed import InstalledDistribution
-from pip._internal.exceptions import (
- DirectoryUrlHashUnsupported,
- HashMismatch,
- HashUnpinned,
- InstallationError,
- MetadataInconsistent,
- NetworkConnectionError,
- VcsHashUnsupported,
-)
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution, get_metadata_distribution
-from pip._internal.models.direct_url import ArchiveInfo
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.network.download import BatchDownloader, Downloader
-from pip._internal.network.lazy_wheel import (
- HTTPRangeRequestUnsupported,
- dist_from_wheel_url,
-)
-from pip._internal.network.session import PipSession
-from pip._internal.operations.build.build_tracker import BuildTracker
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils._log import getLogger
-from pip._internal.utils.direct_url_helpers import (
- direct_url_for_editable,
- direct_url_from_link,
-)
-from pip._internal.utils.hashes import Hashes, MissingHashes
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import (
- display_path,
- hash_file,
- hide_url,
- redact_auth_from_requirement,
-)
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.unpacking import unpack_file
-from pip._internal.vcs import vcs
-
-logger = getLogger(__name__)
-
-
-def _get_prepared_distribution(
- req: InstallRequirement,
- build_tracker: BuildTracker,
- finder: PackageFinder,
- build_isolation: bool,
- check_build_deps: bool,
-) -> BaseDistribution:
- """Prepare a distribution for installation."""
- abstract_dist = make_distribution_for_install_requirement(req)
- tracker_id = abstract_dist.build_tracker_id
- if tracker_id is not None:
- with build_tracker.track(req, tracker_id):
- abstract_dist.prepare_distribution_metadata(
- finder, build_isolation, check_build_deps
- )
- return abstract_dist.get_metadata_distribution()
-
-
-def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
- vcs_backend = vcs.get_backend_for_scheme(link.scheme)
- assert vcs_backend is not None
- vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
-
-
-class File:
- def __init__(self, path: str, content_type: Optional[str]) -> None:
- self.path = path
- if content_type is None:
- self.content_type = mimetypes.guess_type(path)[0]
- else:
- self.content_type = content_type
-
-
-def get_http_url(
- link: Link,
- download: Downloader,
- download_dir: Optional[str] = None,
- hashes: Optional[Hashes] = None,
-) -> File:
- temp_dir = TempDirectory(kind="unpack", globally_managed=True)
- # If a download dir is specified, is the file already downloaded there?
- already_downloaded_path = None
- if download_dir:
- already_downloaded_path = _check_download_dir(link, download_dir, hashes)
-
- if already_downloaded_path:
- from_path = already_downloaded_path
- content_type = None
- else:
- # let's download to a tmp dir
- from_path, content_type = download(link, temp_dir.path)
- if hashes:
- hashes.check_against_path(from_path)
-
- return File(from_path, content_type)
-
-
-def get_file_url(
- link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
-) -> File:
- """Get file and optionally check its hash."""
- # If a download dir is specified, is the file already there and valid?
- already_downloaded_path = None
- if download_dir:
- already_downloaded_path = _check_download_dir(link, download_dir, hashes)
-
- if already_downloaded_path:
- from_path = already_downloaded_path
- else:
- from_path = link.file_path
-
- # If --require-hashes is off, `hashes` is either empty, the
- # link's embedded hash, or MissingHashes; it is required to
- # match. If --require-hashes is on, we are satisfied by any
- # hash in `hashes` matching: a URL-based or an option-based
- # one; no internet-sourced hash will be in `hashes`.
- if hashes:
- hashes.check_against_path(from_path)
- return File(from_path, None)
-
-
-def unpack_url(
- link: Link,
- location: str,
- download: Downloader,
- verbosity: int,
- download_dir: Optional[str] = None,
- hashes: Optional[Hashes] = None,
-) -> Optional[File]:
- """Unpack link into location, downloading if required.
-
- :param hashes: A Hashes object, one of whose embedded hashes must match,
- or HashMismatch will be raised. If the Hashes is empty, no matches are
- required, and unhashable types of requirements (like VCS ones, which
- would ordinarily raise HashUnsupported) are allowed.
- """
- # non-editable vcs urls
- if link.is_vcs:
- unpack_vcs_link(link, location, verbosity=verbosity)
- return None
-
- assert not link.is_existing_dir()
-
- # file urls
- if link.is_file:
- file = get_file_url(link, download_dir, hashes=hashes)
-
- # http urls
- else:
- file = get_http_url(
- link,
- download,
- download_dir,
- hashes=hashes,
- )
-
- # unpack the archive to the build dir location. even when only downloading
- # archives, they have to be unpacked to parse dependencies, except wheels
- if not link.is_wheel:
- unpack_file(file.path, location, file.content_type)
-
- return file
-
-
-def _check_download_dir(
- link: Link,
- download_dir: str,
- hashes: Optional[Hashes],
- warn_on_hash_mismatch: bool = True,
-) -> Optional[str]:
- """Check download_dir for previously downloaded file with correct hash
- If a correct file is found return its path else None
- """
- download_path = os.path.join(download_dir, link.filename)
-
- if not os.path.exists(download_path):
- return None
-
- # If already downloaded, does its hash match?
- logger.info("File was already downloaded %s", download_path)
- if hashes:
- try:
- hashes.check_against_path(download_path)
- except HashMismatch:
- if warn_on_hash_mismatch:
- logger.warning(
- "Previously-downloaded file %s has bad hash. Re-downloading.",
- download_path,
- )
- os.unlink(download_path)
- return None
- return download_path
-
-
-class RequirementPreparer:
- """Prepares a Requirement"""
-
- def __init__(
- self,
- build_dir: str,
- download_dir: Optional[str],
- src_dir: str,
- build_isolation: bool,
- check_build_deps: bool,
- build_tracker: BuildTracker,
- session: PipSession,
- progress_bar: str,
- finder: PackageFinder,
- require_hashes: bool,
- use_user_site: bool,
- lazy_wheel: bool,
- verbosity: int,
- legacy_resolver: bool,
- ) -> None:
- super().__init__()
-
- self.src_dir = src_dir
- self.build_dir = build_dir
- self.build_tracker = build_tracker
- self._session = session
- self._download = Downloader(session, progress_bar)
- self._batch_download = BatchDownloader(session, progress_bar)
- self.finder = finder
-
- # Where still-packed archives should be written to. If None, they are
- # not saved, and are deleted immediately after unpacking.
- self.download_dir = download_dir
-
- # Is build isolation allowed?
- self.build_isolation = build_isolation
-
- # Should check build dependencies?
- self.check_build_deps = check_build_deps
-
- # Should hash-checking be required?
- self.require_hashes = require_hashes
-
- # Should install in user site-packages?
- self.use_user_site = use_user_site
-
- # Should wheels be downloaded lazily?
- self.use_lazy_wheel = lazy_wheel
-
- # How verbose should underlying tooling be?
- self.verbosity = verbosity
-
- # Are we using the legacy resolver?
- self.legacy_resolver = legacy_resolver
-
- # Memoized downloaded files, as mapping of url: path.
- self._downloaded: Dict[str, str] = {}
-
- # Previous "header" printed for a link-based InstallRequirement
- self._previous_requirement_header = ("", "")
-
- def _log_preparing_link(self, req: InstallRequirement) -> None:
- """Provide context for the requirement being prepared."""
- if req.link.is_file and not req.is_wheel_from_cache:
- message = "Processing %s"
- information = str(display_path(req.link.file_path))
- else:
- message = "Collecting %s"
- information = redact_auth_from_requirement(req.req) if req.req else str(req)
-
- # If we used req.req, inject requirement source if available (this
- # would already be included if we used req directly)
- if req.req and req.comes_from:
- if isinstance(req.comes_from, str):
- comes_from: Optional[str] = req.comes_from
- else:
- comes_from = req.comes_from.from_path()
- if comes_from:
- information += f" (from {comes_from})"
-
- if (message, information) != self._previous_requirement_header:
- self._previous_requirement_header = (message, information)
- logger.info(message, information)
-
- if req.is_wheel_from_cache:
- with indent_log():
- logger.info("Using cached %s", req.link.filename)
-
- def _ensure_link_req_src_dir(
- self, req: InstallRequirement, parallel_builds: bool
- ) -> None:
- """Ensure source_dir of a linked InstallRequirement."""
- # Since source_dir is only set for editable requirements.
- if req.link.is_wheel:
- # We don't need to unpack wheels, so no need for a source
- # directory.
- return
- assert req.source_dir is None
- if req.link.is_existing_dir():
- # build local directories in-tree
- req.source_dir = req.link.file_path
- return
-
- # We always delete unpacked sdists after pip runs.
- req.ensure_has_source_dir(
- self.build_dir,
- autodelete=True,
- parallel_builds=parallel_builds,
- )
- req.ensure_pristine_source_checkout()
-
- def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
- # By the time this is called, the requirement's link should have
- # been checked so we can tell what kind of requirements req is
- # and raise some more informative errors than otherwise.
- # (For example, we can raise VcsHashUnsupported for a VCS URL
- # rather than HashMissing.)
- if not self.require_hashes:
- return req.hashes(trust_internet=True)
-
- # We could check these first 2 conditions inside unpack_url
- # and save repetition of conditions, but then we would
- # report less-useful error messages for unhashable
- # requirements, complaining that there's no hash provided.
- if req.link.is_vcs:
- raise VcsHashUnsupported()
- if req.link.is_existing_dir():
- raise DirectoryUrlHashUnsupported()
-
- # Unpinned packages are asking for trouble when a new version
- # is uploaded. This isn't a security check, but it saves users
- # a surprising hash mismatch in the future.
- # file:/// URLs aren't pinnable, so don't complain about them
- # not being pinned.
- if not req.is_direct and not req.is_pinned:
- raise HashUnpinned()
-
- # If known-good hashes are missing for this requirement,
- # shim it with a facade object that will provoke hash
- # computation and then raise a HashMissing exception
- # showing the user what the hash should be.
- return req.hashes(trust_internet=False) or MissingHashes()
-
- def _fetch_metadata_only(
- self,
- req: InstallRequirement,
- ) -> Optional[BaseDistribution]:
- if self.legacy_resolver:
- logger.debug(
- "Metadata-only fetching is not used in the legacy resolver",
- )
- return None
- if self.require_hashes:
- logger.debug(
- "Metadata-only fetching is not used as hash checking is required",
- )
- return None
- # Try PEP 658 metadata first, then fall back to lazy wheel if unavailable.
- return self._fetch_metadata_using_link_data_attr(
- req
- ) or self._fetch_metadata_using_lazy_wheel(req.link)
-
- def _fetch_metadata_using_link_data_attr(
- self,
- req: InstallRequirement,
- ) -> Optional[BaseDistribution]:
- """Fetch metadata from the data-dist-info-metadata attribute, if possible."""
- # (1) Get the link to the metadata file, if provided by the backend.
- metadata_link = req.link.metadata_link()
- if metadata_link is None:
- return None
- assert req.req is not None
- logger.verbose(
- "Obtaining dependency information for %s from %s",
- req.req,
- metadata_link,
- )
- # (2) Download the contents of the METADATA file, separate from the dist itself.
- metadata_file = get_http_url(
- metadata_link,
- self._download,
- hashes=metadata_link.as_hashes(),
- )
- with open(metadata_file.path, "rb") as f:
- metadata_contents = f.read()
- # (3) Generate a dist just from those file contents.
- metadata_dist = get_metadata_distribution(
- metadata_contents,
- req.link.filename,
- req.req.name,
- )
- # (4) Ensure the Name: field from the METADATA file matches the name from the
- # install requirement.
- #
- # NB: raw_name will fall back to the name from the install requirement if
- # the Name: field is not present, but it's noted in the raw_name docstring
- # that that should NEVER happen anyway.
- if canonicalize_name(metadata_dist.raw_name) != canonicalize_name(req.req.name):
- raise MetadataInconsistent(
- req, "Name", req.req.name, metadata_dist.raw_name
- )
- return metadata_dist
-
- def _fetch_metadata_using_lazy_wheel(
- self,
- link: Link,
- ) -> Optional[BaseDistribution]:
- """Fetch metadata using lazy wheel, if possible."""
- # --use-feature=fast-deps must be provided.
- if not self.use_lazy_wheel:
- return None
- if link.is_file or not link.is_wheel:
- logger.debug(
- "Lazy wheel is not used as %r does not point to a remote wheel",
- link,
- )
- return None
-
- wheel = Wheel(link.filename)
- name = canonicalize_name(wheel.name)
- logger.info(
- "Obtaining dependency information from %s %s",
- name,
- wheel.version,
- )
- url = link.url.split("#", 1)[0]
- try:
- return dist_from_wheel_url(name, url, self._session)
- except HTTPRangeRequestUnsupported:
- logger.debug("%s does not support range requests", url)
- return None
-
- def _complete_partial_requirements(
- self,
- partially_downloaded_reqs: Iterable[InstallRequirement],
- parallel_builds: bool = False,
- ) -> None:
- """Download any requirements which were only fetched by metadata."""
- # Download to a temporary directory. These will be copied over as
- # needed for downstream 'download', 'wheel', and 'install' commands.
- temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
-
- # Map each link to the requirement that owns it. This allows us to set
- # `req.local_file_path` on the appropriate requirement after passing
- # all the links at once into BatchDownloader.
- links_to_fully_download: Dict[Link, InstallRequirement] = {}
- for req in partially_downloaded_reqs:
- assert req.link
- links_to_fully_download[req.link] = req
-
- batch_download = self._batch_download(
- links_to_fully_download.keys(),
- temp_dir,
- )
- for link, (filepath, _) in batch_download:
- logger.debug("Downloading link %s to %s", link, filepath)
- req = links_to_fully_download[link]
- # Record the downloaded file path so wheel reqs can extract a Distribution
- # in .get_dist().
- req.local_file_path = filepath
- # Record that the file is downloaded so we don't do it again in
- # _prepare_linked_requirement().
- self._downloaded[req.link.url] = filepath
-
- # If this is an sdist, we need to unpack it after downloading, but the
- # .source_dir won't be set up until we are in _prepare_linked_requirement().
- # Add the downloaded archive to the install requirement to unpack after
- # preparing the source dir.
- if not req.is_wheel:
- req.needs_unpacked_archive(Path(filepath))
-
- # This step is necessary to ensure all lazy wheels are processed
- # successfully by the 'download', 'wheel', and 'install' commands.
- for req in partially_downloaded_reqs:
- self._prepare_linked_requirement(req, parallel_builds)
-
- def prepare_linked_requirement(
- self, req: InstallRequirement, parallel_builds: bool = False
- ) -> BaseDistribution:
- """Prepare a requirement to be obtained from req.link."""
- assert req.link
- self._log_preparing_link(req)
- with indent_log():
- # Check if the relevant file is already available
- # in the download directory
- file_path = None
- if self.download_dir is not None and req.link.is_wheel:
- hashes = self._get_linked_req_hashes(req)
- file_path = _check_download_dir(
- req.link,
- self.download_dir,
- hashes,
- # When a locally built wheel has been found in cache, we don't warn
- # about re-downloading when the already downloaded wheel hash does
- # not match. This is because the hash must be checked against the
- # original link, not the cached link. It that case the already
- # downloaded file will be removed and re-fetched from cache (which
- # implies a hash check against the cache entry's origin.json).
- warn_on_hash_mismatch=not req.is_wheel_from_cache,
- )
-
- if file_path is not None:
- # The file is already available, so mark it as downloaded
- self._downloaded[req.link.url] = file_path
- else:
- # The file is not available, attempt to fetch only metadata
- metadata_dist = self._fetch_metadata_only(req)
- if metadata_dist is not None:
- req.needs_more_preparation = True
- return metadata_dist
-
- # None of the optimizations worked, fully prepare the requirement
- return self._prepare_linked_requirement(req, parallel_builds)
-
- def prepare_linked_requirements_more(
- self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
- ) -> None:
- """Prepare linked requirements more, if needed."""
- reqs = [req for req in reqs if req.needs_more_preparation]
- for req in reqs:
- # Determine if any of these requirements were already downloaded.
- if self.download_dir is not None and req.link.is_wheel:
- hashes = self._get_linked_req_hashes(req)
- file_path = _check_download_dir(req.link, self.download_dir, hashes)
- if file_path is not None:
- self._downloaded[req.link.url] = file_path
- req.needs_more_preparation = False
-
- # Prepare requirements we found were already downloaded for some
- # reason. The other downloads will be completed separately.
- partially_downloaded_reqs: List[InstallRequirement] = []
- for req in reqs:
- if req.needs_more_preparation:
- partially_downloaded_reqs.append(req)
- else:
- self._prepare_linked_requirement(req, parallel_builds)
-
- # TODO: separate this part out from RequirementPreparer when the v1
- # resolver can be removed!
- self._complete_partial_requirements(
- partially_downloaded_reqs,
- parallel_builds=parallel_builds,
- )
-
- def _prepare_linked_requirement(
- self, req: InstallRequirement, parallel_builds: bool
- ) -> BaseDistribution:
- assert req.link
- link = req.link
-
- hashes = self._get_linked_req_hashes(req)
-
- if hashes and req.is_wheel_from_cache:
- assert req.download_info is not None
- assert link.is_wheel
- assert link.is_file
- # We need to verify hashes, and we have found the requirement in the cache
- # of locally built wheels.
- if (
- isinstance(req.download_info.info, ArchiveInfo)
- and req.download_info.info.hashes
- and hashes.has_one_of(req.download_info.info.hashes)
- ):
- # At this point we know the requirement was built from a hashable source
- # artifact, and we verified that the cache entry's hash of the original
- # artifact matches one of the hashes we expect. We don't verify hashes
- # against the cached wheel, because the wheel is not the original.
- hashes = None
- else:
- logger.warning(
- "The hashes of the source archive found in cache entry "
- "don't match, ignoring cached built wheel "
- "and re-downloading source."
- )
- req.link = req.cached_wheel_source_link
- link = req.link
-
- self._ensure_link_req_src_dir(req, parallel_builds)
-
- if link.is_existing_dir():
- local_file = None
- elif link.url not in self._downloaded:
- try:
- local_file = unpack_url(
- link,
- req.source_dir,
- self._download,
- self.verbosity,
- self.download_dir,
- hashes,
- )
- except NetworkConnectionError as exc:
- raise InstallationError(
- f"Could not install requirement {req} because of HTTP "
- f"error {exc} for URL {link}"
- )
- else:
- file_path = self._downloaded[link.url]
- if hashes:
- hashes.check_against_path(file_path)
- local_file = File(file_path, content_type=None)
-
- # If download_info is set, we got it from the wheel cache.
- if req.download_info is None:
- # Editables don't go through this function (see
- # prepare_editable_requirement).
- assert not req.editable
- req.download_info = direct_url_from_link(link, req.source_dir)
- # Make sure we have a hash in download_info. If we got it as part of the
- # URL, it will have been verified and we can rely on it. Otherwise we
- # compute it from the downloaded file.
- # FIXME: https://github.com/pypa/pip/issues/11943
- if (
- isinstance(req.download_info.info, ArchiveInfo)
- and not req.download_info.info.hashes
- and local_file
- ):
- hash = hash_file(local_file.path)[0].hexdigest()
- # We populate info.hash for backward compatibility.
- # This will automatically populate info.hashes.
- req.download_info.info.hash = f"sha256={hash}"
-
- # For use in later processing,
- # preserve the file path on the requirement.
- if local_file:
- req.local_file_path = local_file.path
-
- dist = _get_prepared_distribution(
- req,
- self.build_tracker,
- self.finder,
- self.build_isolation,
- self.check_build_deps,
- )
- return dist
-
- def save_linked_requirement(self, req: InstallRequirement) -> None:
- assert self.download_dir is not None
- assert req.link is not None
- link = req.link
- if link.is_vcs or (link.is_existing_dir() and req.editable):
- # Make a .zip of the source_dir we already created.
- req.archive(self.download_dir)
- return
-
- if link.is_existing_dir():
- logger.debug(
- "Not copying link to destination directory "
- "since it is a directory: %s",
- link,
- )
- return
- if req.local_file_path is None:
- # No distribution was downloaded for this requirement.
- return
-
- download_location = os.path.join(self.download_dir, link.filename)
- if not os.path.exists(download_location):
- shutil.copy(req.local_file_path, download_location)
- download_path = display_path(download_location)
- logger.info("Saved %s", download_path)
-
- def prepare_editable_requirement(
- self,
- req: InstallRequirement,
- ) -> BaseDistribution:
- """Prepare an editable requirement."""
- assert req.editable, "cannot prepare a non-editable req as editable"
-
- logger.info("Obtaining %s", req)
-
- with indent_log():
- if self.require_hashes:
- raise InstallationError(
- f"The editable requirement {req} cannot be installed when "
- "requiring hashes, because there is no single file to "
- "hash."
- )
- req.ensure_has_source_dir(self.src_dir)
- req.update_editable()
- assert req.source_dir
- req.download_info = direct_url_for_editable(req.unpacked_source_directory)
-
- dist = _get_prepared_distribution(
- req,
- self.build_tracker,
- self.finder,
- self.build_isolation,
- self.check_build_deps,
- )
-
- req.check_if_exists(self.use_user_site)
-
- return dist
-
- def prepare_installed_requirement(
- self,
- req: InstallRequirement,
- skip_reason: str,
- ) -> BaseDistribution:
- """Prepare an already-installed requirement."""
- assert req.satisfied_by, "req should have been satisfied but isn't"
- assert skip_reason is not None, (
- "did not get skip reason skipped but req.satisfied_by "
- f"is set to {req.satisfied_by}"
- )
- logger.info(
- "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
- )
- with indent_log():
- if self.require_hashes:
- logger.debug(
- "Since it is already installed, we are trusting this "
- "package without checking its hash. To ensure a "
- "completely repeatable environment, install into an "
- "empty virtualenv."
- )
- return InstalledDistribution(req).get_metadata_distribution()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/pyproject.py b/venv/lib/python3.11/site-packages/pip/_internal/pyproject.py
deleted file mode 100644
index 8de36b8..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/pyproject.py
+++ /dev/null
@@ -1,179 +0,0 @@
-import importlib.util
-import os
-from collections import namedtuple
-from typing import Any, List, Optional
-
-from pip._vendor import tomli
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
-
-from pip._internal.exceptions import (
- InstallationError,
- InvalidPyProjectBuildRequires,
- MissingPyProjectBuildRequires,
-)
-
-
-def _is_list_of_str(obj: Any) -> bool:
- return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
-
-
-def make_pyproject_path(unpacked_source_directory: str) -> str:
- return os.path.join(unpacked_source_directory, "pyproject.toml")
-
-
-BuildSystemDetails = namedtuple(
- "BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
-)
-
-
-def load_pyproject_toml(
- use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
-) -> Optional[BuildSystemDetails]:
- """Load the pyproject.toml file.
-
- Parameters:
- use_pep517 - Has the user requested PEP 517 processing? None
- means the user hasn't explicitly specified.
- pyproject_toml - Location of the project's pyproject.toml file
- setup_py - Location of the project's setup.py file
- req_name - The name of the requirement we're processing (for
- error reporting)
-
- Returns:
- None if we should use the legacy code path, otherwise a tuple
- (
- requirements from pyproject.toml,
- name of PEP 517 backend,
- requirements we should check are installed after setting
- up the build environment
- directory paths to import the backend from (backend-path),
- relative to the project root.
- )
- """
- has_pyproject = os.path.isfile(pyproject_toml)
- has_setup = os.path.isfile(setup_py)
-
- if not has_pyproject and not has_setup:
- raise InstallationError(
- f"{req_name} does not appear to be a Python project: "
- f"neither 'setup.py' nor 'pyproject.toml' found."
- )
-
- if has_pyproject:
- with open(pyproject_toml, encoding="utf-8") as f:
- pp_toml = tomli.loads(f.read())
- build_system = pp_toml.get("build-system")
- else:
- build_system = None
-
- # The following cases must use PEP 517
- # We check for use_pep517 being non-None and falsey because that means
- # the user explicitly requested --no-use-pep517. The value 0 as
- # opposed to False can occur when the value is provided via an
- # environment variable or config file option (due to the quirk of
- # strtobool() returning an integer in pip's configuration code).
- if has_pyproject and not has_setup:
- if use_pep517 is not None and not use_pep517:
- raise InstallationError(
- "Disabling PEP 517 processing is invalid: "
- "project does not have a setup.py"
- )
- use_pep517 = True
- elif build_system and "build-backend" in build_system:
- if use_pep517 is not None and not use_pep517:
- raise InstallationError(
- "Disabling PEP 517 processing is invalid: "
- "project specifies a build backend of {} "
- "in pyproject.toml".format(build_system["build-backend"])
- )
- use_pep517 = True
-
- # If we haven't worked out whether to use PEP 517 yet,
- # and the user hasn't explicitly stated a preference,
- # we do so if the project has a pyproject.toml file
- # or if we cannot import setuptools or wheels.
-
- # We fallback to PEP 517 when without setuptools or without the wheel package,
- # so setuptools can be installed as a default build backend.
- # For more info see:
- # https://discuss.python.org/t/pip-without-setuptools-could-the-experience-be-improved/11810/9
- # https://github.com/pypa/pip/issues/8559
- elif use_pep517 is None:
- use_pep517 = (
- has_pyproject
- or not importlib.util.find_spec("setuptools")
- or not importlib.util.find_spec("wheel")
- )
-
- # At this point, we know whether we're going to use PEP 517.
- assert use_pep517 is not None
-
- # If we're using the legacy code path, there is nothing further
- # for us to do here.
- if not use_pep517:
- return None
-
- if build_system is None:
- # Either the user has a pyproject.toml with no build-system
- # section, or the user has no pyproject.toml, but has opted in
- # explicitly via --use-pep517.
- # In the absence of any explicit backend specification, we
- # assume the setuptools backend that most closely emulates the
- # traditional direct setup.py execution, and require wheel and
- # a version of setuptools that supports that backend.
-
- build_system = {
- "requires": ["setuptools>=40.8.0"],
- "build-backend": "setuptools.build_meta:__legacy__",
- }
-
- # If we're using PEP 517, we have build system information (either
- # from pyproject.toml, or defaulted by the code above).
- # Note that at this point, we do not know if the user has actually
- # specified a backend, though.
- assert build_system is not None
-
- # Ensure that the build-system section in pyproject.toml conforms
- # to PEP 518.
-
- # Specifying the build-system table but not the requires key is invalid
- if "requires" not in build_system:
- raise MissingPyProjectBuildRequires(package=req_name)
-
- # Error out if requires is not a list of strings
- requires = build_system["requires"]
- if not _is_list_of_str(requires):
- raise InvalidPyProjectBuildRequires(
- package=req_name,
- reason="It is not a list of strings.",
- )
-
- # Each requirement must be valid as per PEP 508
- for requirement in requires:
- try:
- Requirement(requirement)
- except InvalidRequirement as error:
- raise InvalidPyProjectBuildRequires(
- package=req_name,
- reason=f"It contains an invalid requirement: {requirement!r}",
- ) from error
-
- backend = build_system.get("build-backend")
- backend_path = build_system.get("backend-path", [])
- check: List[str] = []
- if backend is None:
- # If the user didn't specify a backend, we assume they want to use
- # the setuptools backend. But we can't be sure they have included
- # a version of setuptools which supplies the backend. So we
- # make a note to check that this requirement is present once
- # we have set up the environment.
- # This is quite a lot of work to check for a very specific case. But
- # the problem is, that case is potentially quite common - projects that
- # adopted PEP 518 early for the ability to specify requirements to
- # execute setup.py, but never considered needing to mention the build
- # tools themselves. The original PEP 518 code had a similar check (but
- # implemented in a different way).
- backend = "setuptools.build_meta:__legacy__"
- check = ["setuptools>=40.8.0"]
-
- return BuildSystemDetails(requires, backend, check, backend_path)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/req/__init__.py
deleted file mode 100644
index 16de903..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__init__.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import collections
-import logging
-from typing import Generator, List, Optional, Sequence, Tuple
-
-from pip._internal.utils.logging import indent_log
-
-from .req_file import parse_requirements
-from .req_install import InstallRequirement
-from .req_set import RequirementSet
-
-__all__ = [
- "RequirementSet",
- "InstallRequirement",
- "parse_requirements",
- "install_given_reqs",
-]
-
-logger = logging.getLogger(__name__)
-
-
-class InstallationResult:
- def __init__(self, name: str) -> None:
- self.name = name
-
- def __repr__(self) -> str:
- return f"InstallationResult(name={self.name!r})"
-
-
-def _validate_requirements(
- requirements: List[InstallRequirement],
-) -> Generator[Tuple[str, InstallRequirement], None, None]:
- for req in requirements:
- assert req.name, f"invalid to-be-installed requirement: {req}"
- yield req.name, req
-
-
-def install_given_reqs(
- requirements: List[InstallRequirement],
- global_options: Sequence[str],
- root: Optional[str],
- home: Optional[str],
- prefix: Optional[str],
- warn_script_location: bool,
- use_user_site: bool,
- pycompile: bool,
-) -> List[InstallationResult]:
- """
- Install everything in the given list.
-
- (to be called after having downloaded and unpacked the packages)
- """
- to_install = collections.OrderedDict(_validate_requirements(requirements))
-
- if to_install:
- logger.info(
- "Installing collected packages: %s",
- ", ".join(to_install.keys()),
- )
-
- installed = []
-
- with indent_log():
- for req_name, requirement in to_install.items():
- if requirement.should_reinstall:
- logger.info("Attempting uninstall: %s", req_name)
- with indent_log():
- uninstalled_pathset = requirement.uninstall(auto_confirm=True)
- else:
- uninstalled_pathset = None
-
- try:
- requirement.install(
- global_options,
- root=root,
- home=home,
- prefix=prefix,
- warn_script_location=warn_script_location,
- use_user_site=use_user_site,
- pycompile=pycompile,
- )
- except Exception:
- # if install did not succeed, rollback previous uninstall
- if uninstalled_pathset and not requirement.install_succeeded:
- uninstalled_pathset.rollback()
- raise
- else:
- if uninstalled_pathset and requirement.install_succeeded:
- uninstalled_pathset.commit()
-
- installed.append(InstallationResult(req_name))
-
- return installed
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 53d6f7d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc
deleted file mode 100644
index 7404820..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/constructors.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc
deleted file mode 100644
index bee7822..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_file.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc
deleted file mode 100644
index 424829d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_install.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc
deleted file mode 100644
index 5c280f3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_set.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc
deleted file mode 100644
index a77e730..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/constructors.py b/venv/lib/python3.11/site-packages/pip/_internal/req/constructors.py
deleted file mode 100644
index 7e2d0e5..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/constructors.py
+++ /dev/null
@@ -1,576 +0,0 @@
-"""Backing implementation for InstallRequirement's various constructors
-
-The idea here is that these formed a major chunk of InstallRequirement's size
-so, moving them and support code dedicated to them outside of that class
-helps creates for better understandability for the rest of the code.
-
-These are meant to be used elsewhere within pip to create instances of
-InstallRequirement.
-"""
-
-import copy
-import logging
-import os
-import re
-from typing import Collection, Dict, List, Optional, Set, Tuple, Union
-
-from pip._vendor.packaging.markers import Marker
-from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
-from pip._vendor.packaging.specifiers import Specifier
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.models.index import PyPI, TestPyPI
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.req.req_file import ParsedRequirement
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.filetypes import is_archive_file
-from pip._internal.utils.misc import is_installable_dir
-from pip._internal.utils.packaging import get_requirement
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs import is_url, vcs
-
-__all__ = [
- "install_req_from_editable",
- "install_req_from_line",
- "parse_editable",
-]
-
-logger = logging.getLogger(__name__)
-operators = Specifier._operators.keys()
-
-
-def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
- m = re.match(r"^(.+)(\[[^\]]+\])$", path)
- extras = None
- if m:
- path_no_extras = m.group(1)
- extras = m.group(2)
- else:
- path_no_extras = path
-
- return path_no_extras, extras
-
-
-def convert_extras(extras: Optional[str]) -> Set[str]:
- if not extras:
- return set()
- return get_requirement("placeholder" + extras.lower()).extras
-
-
-def _set_requirement_extras(req: Requirement, new_extras: Set[str]) -> Requirement:
- """
- Returns a new requirement based on the given one, with the supplied extras. If the
- given requirement already has extras those are replaced (or dropped if no new extras
- are given).
- """
- match: Optional[re.Match[str]] = re.fullmatch(
- # see https://peps.python.org/pep-0508/#complete-grammar
- r"([\w\t .-]+)(\[[^\]]*\])?(.*)",
- str(req),
- flags=re.ASCII,
- )
- # ireq.req is a valid requirement so the regex should always match
- assert (
- match is not None
- ), f"regex match on requirement {req} failed, this should never happen"
- pre: Optional[str] = match.group(1)
- post: Optional[str] = match.group(3)
- assert (
- pre is not None and post is not None
- ), f"regex group selection for requirement {req} failed, this should never happen"
- extras: str = "[%s]" % ",".join(sorted(new_extras)) if new_extras else ""
- return Requirement(f"{pre}{extras}{post}")
-
-
-def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
- """Parses an editable requirement into:
- - a requirement name
- - an URL
- - extras
- - editable options
- Accepted requirements:
- svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
- .[some_extra]
- """
-
- url = editable_req
-
- # If a file path is specified with extras, strip off the extras.
- url_no_extras, extras = _strip_extras(url)
-
- if os.path.isdir(url_no_extras):
- # Treating it as code that has already been checked out
- url_no_extras = path_to_url(url_no_extras)
-
- if url_no_extras.lower().startswith("file:"):
- package_name = Link(url_no_extras).egg_fragment
- if extras:
- return (
- package_name,
- url_no_extras,
- get_requirement("placeholder" + extras.lower()).extras,
- )
- else:
- return package_name, url_no_extras, set()
-
- for version_control in vcs:
- if url.lower().startswith(f"{version_control}:"):
- url = f"{version_control}+{url}"
- break
-
- link = Link(url)
-
- if not link.is_vcs:
- backends = ", ".join(vcs.all_schemes)
- raise InstallationError(
- f"{editable_req} is not a valid editable requirement. "
- f"It should either be a path to a local project or a VCS URL "
- f"(beginning with {backends})."
- )
-
- package_name = link.egg_fragment
- if not package_name:
- raise InstallationError(
- "Could not detect requirement name for '{}', please specify one "
- "with #egg=your_package_name".format(editable_req)
- )
- return package_name, url, set()
-
-
-def check_first_requirement_in_file(filename: str) -> None:
- """Check if file is parsable as a requirements file.
-
- This is heavily based on ``pkg_resources.parse_requirements``, but
- simplified to just check the first meaningful line.
-
- :raises InvalidRequirement: If the first meaningful line cannot be parsed
- as an requirement.
- """
- with open(filename, encoding="utf-8", errors="ignore") as f:
- # Create a steppable iterator, so we can handle \-continuations.
- lines = (
- line
- for line in (line.strip() for line in f)
- if line and not line.startswith("#") # Skip blank lines/comments.
- )
-
- for line in lines:
- # Drop comments -- a hash without a space may be in a URL.
- if " #" in line:
- line = line[: line.find(" #")]
- # If there is a line continuation, drop it, and append the next line.
- if line.endswith("\\"):
- line = line[:-2].strip() + next(lines, "")
- Requirement(line)
- return
-
-
-def deduce_helpful_msg(req: str) -> str:
- """Returns helpful msg in case requirements file does not exist,
- or cannot be parsed.
-
- :params req: Requirements file path
- """
- if not os.path.exists(req):
- return f" File '{req}' does not exist."
- msg = " The path does exist. "
- # Try to parse and check if it is a requirements file.
- try:
- check_first_requirement_in_file(req)
- except InvalidRequirement:
- logger.debug("Cannot parse '%s' as requirements file", req)
- else:
- msg += (
- f"The argument you provided "
- f"({req}) appears to be a"
- f" requirements file. If that is the"
- f" case, use the '-r' flag to install"
- f" the packages specified within it."
- )
- return msg
-
-
-class RequirementParts:
- def __init__(
- self,
- requirement: Optional[Requirement],
- link: Optional[Link],
- markers: Optional[Marker],
- extras: Set[str],
- ):
- self.requirement = requirement
- self.link = link
- self.markers = markers
- self.extras = extras
-
-
-def parse_req_from_editable(editable_req: str) -> RequirementParts:
- name, url, extras_override = parse_editable(editable_req)
-
- if name is not None:
- try:
- req: Optional[Requirement] = Requirement(name)
- except InvalidRequirement:
- raise InstallationError(f"Invalid requirement: '{name}'")
- else:
- req = None
-
- link = Link(url)
-
- return RequirementParts(req, link, None, extras_override)
-
-
-# ---- The actual constructors follow ----
-
-
-def install_req_from_editable(
- editable_req: str,
- comes_from: Optional[Union[InstallRequirement, str]] = None,
- *,
- use_pep517: Optional[bool] = None,
- isolated: bool = False,
- global_options: Optional[List[str]] = None,
- hash_options: Optional[Dict[str, List[str]]] = None,
- constraint: bool = False,
- user_supplied: bool = False,
- permit_editable_wheels: bool = False,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
-) -> InstallRequirement:
- parts = parse_req_from_editable(editable_req)
-
- return InstallRequirement(
- parts.requirement,
- comes_from=comes_from,
- user_supplied=user_supplied,
- editable=True,
- permit_editable_wheels=permit_editable_wheels,
- link=parts.link,
- constraint=constraint,
- use_pep517=use_pep517,
- isolated=isolated,
- global_options=global_options,
- hash_options=hash_options,
- config_settings=config_settings,
- extras=parts.extras,
- )
-
-
-def _looks_like_path(name: str) -> bool:
- """Checks whether the string "looks like" a path on the filesystem.
-
- This does not check whether the target actually exists, only judge from the
- appearance.
-
- Returns true if any of the following conditions is true:
- * a path separator is found (either os.path.sep or os.path.altsep);
- * a dot is found (which represents the current directory).
- """
- if os.path.sep in name:
- return True
- if os.path.altsep is not None and os.path.altsep in name:
- return True
- if name.startswith("."):
- return True
- return False
-
-
-def _get_url_from_path(path: str, name: str) -> Optional[str]:
- """
- First, it checks whether a provided path is an installable directory. If it
- is, returns the path.
-
- If false, check if the path is an archive file (such as a .whl).
- The function checks if the path is a file. If false, if the path has
- an @, it will treat it as a PEP 440 URL requirement and return the path.
- """
- if _looks_like_path(name) and os.path.isdir(path):
- if is_installable_dir(path):
- return path_to_url(path)
- # TODO: The is_installable_dir test here might not be necessary
- # now that it is done in load_pyproject_toml too.
- raise InstallationError(
- f"Directory {name!r} is not installable. Neither 'setup.py' "
- "nor 'pyproject.toml' found."
- )
- if not is_archive_file(path):
- return None
- if os.path.isfile(path):
- return path_to_url(path)
- urlreq_parts = name.split("@", 1)
- if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
- # If the path contains '@' and the part before it does not look
- # like a path, try to treat it as a PEP 440 URL req instead.
- return None
- logger.warning(
- "Requirement %r looks like a filename, but the file does not exist",
- name,
- )
- return path_to_url(path)
-
-
-def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
- if is_url(name):
- marker_sep = "; "
- else:
- marker_sep = ";"
- if marker_sep in name:
- name, markers_as_string = name.split(marker_sep, 1)
- markers_as_string = markers_as_string.strip()
- if not markers_as_string:
- markers = None
- else:
- markers = Marker(markers_as_string)
- else:
- markers = None
- name = name.strip()
- req_as_string = None
- path = os.path.normpath(os.path.abspath(name))
- link = None
- extras_as_string = None
-
- if is_url(name):
- link = Link(name)
- else:
- p, extras_as_string = _strip_extras(path)
- url = _get_url_from_path(p, name)
- if url is not None:
- link = Link(url)
-
- # it's a local file, dir, or url
- if link:
- # Handle relative file URLs
- if link.scheme == "file" and re.search(r"\.\./", link.url):
- link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
- # wheel file
- if link.is_wheel:
- wheel = Wheel(link.filename) # can raise InvalidWheelFilename
- req_as_string = f"{wheel.name}=={wheel.version}"
- else:
- # set the req to the egg fragment. when it's not there, this
- # will become an 'unnamed' requirement
- req_as_string = link.egg_fragment
-
- # a requirement specifier
- else:
- req_as_string = name
-
- extras = convert_extras(extras_as_string)
-
- def with_source(text: str) -> str:
- if not line_source:
- return text
- return f"{text} (from {line_source})"
-
- def _parse_req_string(req_as_string: str) -> Requirement:
- try:
- req = get_requirement(req_as_string)
- except InvalidRequirement:
- if os.path.sep in req_as_string:
- add_msg = "It looks like a path."
- add_msg += deduce_helpful_msg(req_as_string)
- elif "=" in req_as_string and not any(
- op in req_as_string for op in operators
- ):
- add_msg = "= is not a valid operator. Did you mean == ?"
- else:
- add_msg = ""
- msg = with_source(f"Invalid requirement: {req_as_string!r}")
- if add_msg:
- msg += f"\nHint: {add_msg}"
- raise InstallationError(msg)
- else:
- # Deprecate extras after specifiers: "name>=1.0[extras]"
- # This currently works by accident because _strip_extras() parses
- # any extras in the end of the string and those are saved in
- # RequirementParts
- for spec in req.specifier:
- spec_str = str(spec)
- if spec_str.endswith("]"):
- msg = f"Extras after version '{spec_str}'."
- raise InstallationError(msg)
- return req
-
- if req_as_string is not None:
- req: Optional[Requirement] = _parse_req_string(req_as_string)
- else:
- req = None
-
- return RequirementParts(req, link, markers, extras)
-
-
-def install_req_from_line(
- name: str,
- comes_from: Optional[Union[str, InstallRequirement]] = None,
- *,
- use_pep517: Optional[bool] = None,
- isolated: bool = False,
- global_options: Optional[List[str]] = None,
- hash_options: Optional[Dict[str, List[str]]] = None,
- constraint: bool = False,
- line_source: Optional[str] = None,
- user_supplied: bool = False,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
-) -> InstallRequirement:
- """Creates an InstallRequirement from a name, which might be a
- requirement, directory containing 'setup.py', filename, or URL.
-
- :param line_source: An optional string describing where the line is from,
- for logging purposes in case of an error.
- """
- parts = parse_req_from_line(name, line_source)
-
- return InstallRequirement(
- parts.requirement,
- comes_from,
- link=parts.link,
- markers=parts.markers,
- use_pep517=use_pep517,
- isolated=isolated,
- global_options=global_options,
- hash_options=hash_options,
- config_settings=config_settings,
- constraint=constraint,
- extras=parts.extras,
- user_supplied=user_supplied,
- )
-
-
-def install_req_from_req_string(
- req_string: str,
- comes_from: Optional[InstallRequirement] = None,
- isolated: bool = False,
- use_pep517: Optional[bool] = None,
- user_supplied: bool = False,
-) -> InstallRequirement:
- try:
- req = get_requirement(req_string)
- except InvalidRequirement:
- raise InstallationError(f"Invalid requirement: '{req_string}'")
-
- domains_not_allowed = [
- PyPI.file_storage_domain,
- TestPyPI.file_storage_domain,
- ]
- if (
- req.url
- and comes_from
- and comes_from.link
- and comes_from.link.netloc in domains_not_allowed
- ):
- # Explicitly disallow pypi packages that depend on external urls
- raise InstallationError(
- "Packages installed from PyPI cannot depend on packages "
- "which are not also hosted on PyPI.\n"
- f"{comes_from.name} depends on {req} "
- )
-
- return InstallRequirement(
- req,
- comes_from,
- isolated=isolated,
- use_pep517=use_pep517,
- user_supplied=user_supplied,
- )
-
-
-def install_req_from_parsed_requirement(
- parsed_req: ParsedRequirement,
- isolated: bool = False,
- use_pep517: Optional[bool] = None,
- user_supplied: bool = False,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
-) -> InstallRequirement:
- if parsed_req.is_editable:
- req = install_req_from_editable(
- parsed_req.requirement,
- comes_from=parsed_req.comes_from,
- use_pep517=use_pep517,
- constraint=parsed_req.constraint,
- isolated=isolated,
- user_supplied=user_supplied,
- config_settings=config_settings,
- )
-
- else:
- req = install_req_from_line(
- parsed_req.requirement,
- comes_from=parsed_req.comes_from,
- use_pep517=use_pep517,
- isolated=isolated,
- global_options=(
- parsed_req.options.get("global_options", [])
- if parsed_req.options
- else []
- ),
- hash_options=(
- parsed_req.options.get("hashes", {}) if parsed_req.options else {}
- ),
- constraint=parsed_req.constraint,
- line_source=parsed_req.line_source,
- user_supplied=user_supplied,
- config_settings=config_settings,
- )
- return req
-
-
-def install_req_from_link_and_ireq(
- link: Link, ireq: InstallRequirement
-) -> InstallRequirement:
- return InstallRequirement(
- req=ireq.req,
- comes_from=ireq.comes_from,
- editable=ireq.editable,
- link=link,
- markers=ireq.markers,
- use_pep517=ireq.use_pep517,
- isolated=ireq.isolated,
- global_options=ireq.global_options,
- hash_options=ireq.hash_options,
- config_settings=ireq.config_settings,
- user_supplied=ireq.user_supplied,
- )
-
-
-def install_req_drop_extras(ireq: InstallRequirement) -> InstallRequirement:
- """
- Creates a new InstallationRequirement using the given template but without
- any extras. Sets the original requirement as the new one's parent
- (comes_from).
- """
- return InstallRequirement(
- req=(
- _set_requirement_extras(ireq.req, set()) if ireq.req is not None else None
- ),
- comes_from=ireq,
- editable=ireq.editable,
- link=ireq.link,
- markers=ireq.markers,
- use_pep517=ireq.use_pep517,
- isolated=ireq.isolated,
- global_options=ireq.global_options,
- hash_options=ireq.hash_options,
- constraint=ireq.constraint,
- extras=[],
- config_settings=ireq.config_settings,
- user_supplied=ireq.user_supplied,
- permit_editable_wheels=ireq.permit_editable_wheels,
- )
-
-
-def install_req_extend_extras(
- ireq: InstallRequirement,
- extras: Collection[str],
-) -> InstallRequirement:
- """
- Returns a copy of an installation requirement with some additional extras.
- Makes a shallow copy of the ireq object.
- """
- result = copy.copy(ireq)
- result.extras = {*ireq.extras, *extras}
- result.req = (
- _set_requirement_extras(ireq.req, result.extras)
- if ireq.req is not None
- else None
- )
- return result
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/req_file.py b/venv/lib/python3.11/site-packages/pip/_internal/req/req_file.py
deleted file mode 100644
index 1ef3d5e..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/req_file.py
+++ /dev/null
@@ -1,554 +0,0 @@
-"""
-Requirements file parsing
-"""
-
-import logging
-import optparse
-import os
-import re
-import shlex
-import urllib.parse
-from optparse import Values
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Dict,
- Generator,
- Iterable,
- List,
- Optional,
- Tuple,
-)
-
-from pip._internal.cli import cmdoptions
-from pip._internal.exceptions import InstallationError, RequirementsFileParseError
-from pip._internal.models.search_scope import SearchScope
-from pip._internal.network.session import PipSession
-from pip._internal.network.utils import raise_for_status
-from pip._internal.utils.encoding import auto_decode
-from pip._internal.utils.urls import get_url_scheme
-
-if TYPE_CHECKING:
- # NoReturn introduced in 3.6.2; imported only for type checking to maintain
- # pip compatibility with older patch versions of Python 3.6
- from typing import NoReturn
-
- from pip._internal.index.package_finder import PackageFinder
-
-__all__ = ["parse_requirements"]
-
-ReqFileLines = Iterable[Tuple[int, str]]
-
-LineParser = Callable[[str], Tuple[str, Values]]
-
-SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
-COMMENT_RE = re.compile(r"(^|\s+)#.*$")
-
-# Matches environment variable-style values in '${MY_VARIABLE_1}' with the
-# variable name consisting of only uppercase letters, digits or the '_'
-# (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
-# 2013 Edition.
-ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
-
-SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
- cmdoptions.index_url,
- cmdoptions.extra_index_url,
- cmdoptions.no_index,
- cmdoptions.constraints,
- cmdoptions.requirements,
- cmdoptions.editable,
- cmdoptions.find_links,
- cmdoptions.no_binary,
- cmdoptions.only_binary,
- cmdoptions.prefer_binary,
- cmdoptions.require_hashes,
- cmdoptions.pre,
- cmdoptions.trusted_host,
- cmdoptions.use_new_feature,
-]
-
-# options to be passed to requirements
-SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
- cmdoptions.global_options,
- cmdoptions.hash,
- cmdoptions.config_settings,
-]
-
-SUPPORTED_OPTIONS_EDITABLE_REQ: List[Callable[..., optparse.Option]] = [
- cmdoptions.config_settings,
-]
-
-
-# the 'dest' string values
-SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
-SUPPORTED_OPTIONS_EDITABLE_REQ_DEST = [
- str(o().dest) for o in SUPPORTED_OPTIONS_EDITABLE_REQ
-]
-
-logger = logging.getLogger(__name__)
-
-
-class ParsedRequirement:
- def __init__(
- self,
- requirement: str,
- is_editable: bool,
- comes_from: str,
- constraint: bool,
- options: Optional[Dict[str, Any]] = None,
- line_source: Optional[str] = None,
- ) -> None:
- self.requirement = requirement
- self.is_editable = is_editable
- self.comes_from = comes_from
- self.options = options
- self.constraint = constraint
- self.line_source = line_source
-
-
-class ParsedLine:
- def __init__(
- self,
- filename: str,
- lineno: int,
- args: str,
- opts: Values,
- constraint: bool,
- ) -> None:
- self.filename = filename
- self.lineno = lineno
- self.opts = opts
- self.constraint = constraint
-
- if args:
- self.is_requirement = True
- self.is_editable = False
- self.requirement = args
- elif opts.editables:
- self.is_requirement = True
- self.is_editable = True
- # We don't support multiple -e on one line
- self.requirement = opts.editables[0]
- else:
- self.is_requirement = False
-
-
-def parse_requirements(
- filename: str,
- session: PipSession,
- finder: Optional["PackageFinder"] = None,
- options: Optional[optparse.Values] = None,
- constraint: bool = False,
-) -> Generator[ParsedRequirement, None, None]:
- """Parse a requirements file and yield ParsedRequirement instances.
-
- :param filename: Path or url of requirements file.
- :param session: PipSession instance.
- :param finder: Instance of pip.index.PackageFinder.
- :param options: cli options.
- :param constraint: If true, parsing a constraint file rather than
- requirements file.
- """
- line_parser = get_line_parser(finder)
- parser = RequirementsFileParser(session, line_parser)
-
- for parsed_line in parser.parse(filename, constraint):
- parsed_req = handle_line(
- parsed_line, options=options, finder=finder, session=session
- )
- if parsed_req is not None:
- yield parsed_req
-
-
-def preprocess(content: str) -> ReqFileLines:
- """Split, filter, and join lines, and return a line iterator
-
- :param content: the content of the requirements file
- """
- lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
- lines_enum = join_lines(lines_enum)
- lines_enum = ignore_comments(lines_enum)
- lines_enum = expand_env_variables(lines_enum)
- return lines_enum
-
-
-def handle_requirement_line(
- line: ParsedLine,
- options: Optional[optparse.Values] = None,
-) -> ParsedRequirement:
- # preserve for the nested code path
- line_comes_from = "{} {} (line {})".format(
- "-c" if line.constraint else "-r",
- line.filename,
- line.lineno,
- )
-
- assert line.is_requirement
-
- # get the options that apply to requirements
- if line.is_editable:
- supported_dest = SUPPORTED_OPTIONS_EDITABLE_REQ_DEST
- else:
- supported_dest = SUPPORTED_OPTIONS_REQ_DEST
- req_options = {}
- for dest in supported_dest:
- if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
- req_options[dest] = line.opts.__dict__[dest]
-
- line_source = f"line {line.lineno} of {line.filename}"
- return ParsedRequirement(
- requirement=line.requirement,
- is_editable=line.is_editable,
- comes_from=line_comes_from,
- constraint=line.constraint,
- options=req_options,
- line_source=line_source,
- )
-
-
-def handle_option_line(
- opts: Values,
- filename: str,
- lineno: int,
- finder: Optional["PackageFinder"] = None,
- options: Optional[optparse.Values] = None,
- session: Optional[PipSession] = None,
-) -> None:
- if opts.hashes:
- logger.warning(
- "%s line %s has --hash but no requirement, and will be ignored.",
- filename,
- lineno,
- )
-
- if options:
- # percolate options upward
- if opts.require_hashes:
- options.require_hashes = opts.require_hashes
- if opts.features_enabled:
- options.features_enabled.extend(
- f for f in opts.features_enabled if f not in options.features_enabled
- )
-
- # set finder options
- if finder:
- find_links = finder.find_links
- index_urls = finder.index_urls
- no_index = finder.search_scope.no_index
- if opts.no_index is True:
- no_index = True
- index_urls = []
- if opts.index_url and not no_index:
- index_urls = [opts.index_url]
- if opts.extra_index_urls and not no_index:
- index_urls.extend(opts.extra_index_urls)
- if opts.find_links:
- # FIXME: it would be nice to keep track of the source
- # of the find_links: support a find-links local path
- # relative to a requirements file.
- value = opts.find_links[0]
- req_dir = os.path.dirname(os.path.abspath(filename))
- relative_to_reqs_file = os.path.join(req_dir, value)
- if os.path.exists(relative_to_reqs_file):
- value = relative_to_reqs_file
- find_links.append(value)
-
- if session:
- # We need to update the auth urls in session
- session.update_index_urls(index_urls)
-
- search_scope = SearchScope(
- find_links=find_links,
- index_urls=index_urls,
- no_index=no_index,
- )
- finder.search_scope = search_scope
-
- if opts.pre:
- finder.set_allow_all_prereleases()
-
- if opts.prefer_binary:
- finder.set_prefer_binary()
-
- if session:
- for host in opts.trusted_hosts or []:
- source = f"line {lineno} of {filename}"
- session.add_trusted_host(host, source=source)
-
-
-def handle_line(
- line: ParsedLine,
- options: Optional[optparse.Values] = None,
- finder: Optional["PackageFinder"] = None,
- session: Optional[PipSession] = None,
-) -> Optional[ParsedRequirement]:
- """Handle a single parsed requirements line; This can result in
- creating/yielding requirements, or updating the finder.
-
- :param line: The parsed line to be processed.
- :param options: CLI options.
- :param finder: The finder - updated by non-requirement lines.
- :param session: The session - updated by non-requirement lines.
-
- Returns a ParsedRequirement object if the line is a requirement line,
- otherwise returns None.
-
- For lines that contain requirements, the only options that have an effect
- are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
- requirement. Other options from SUPPORTED_OPTIONS may be present, but are
- ignored.
-
- For lines that do not contain requirements, the only options that have an
- effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
- be present, but are ignored. These lines may contain multiple options
- (although our docs imply only one is supported), and all our parsed and
- affect the finder.
- """
-
- if line.is_requirement:
- parsed_req = handle_requirement_line(line, options)
- return parsed_req
- else:
- handle_option_line(
- line.opts,
- line.filename,
- line.lineno,
- finder,
- options,
- session,
- )
- return None
-
-
-class RequirementsFileParser:
- def __init__(
- self,
- session: PipSession,
- line_parser: LineParser,
- ) -> None:
- self._session = session
- self._line_parser = line_parser
-
- def parse(
- self, filename: str, constraint: bool
- ) -> Generator[ParsedLine, None, None]:
- """Parse a given file, yielding parsed lines."""
- yield from self._parse_and_recurse(filename, constraint)
-
- def _parse_and_recurse(
- self, filename: str, constraint: bool
- ) -> Generator[ParsedLine, None, None]:
- for line in self._parse_file(filename, constraint):
- if not line.is_requirement and (
- line.opts.requirements or line.opts.constraints
- ):
- # parse a nested requirements file
- if line.opts.requirements:
- req_path = line.opts.requirements[0]
- nested_constraint = False
- else:
- req_path = line.opts.constraints[0]
- nested_constraint = True
-
- # original file is over http
- if SCHEME_RE.search(filename):
- # do a url join so relative paths work
- req_path = urllib.parse.urljoin(filename, req_path)
- # original file and nested file are paths
- elif not SCHEME_RE.search(req_path):
- # do a join so relative paths work
- req_path = os.path.join(
- os.path.dirname(filename),
- req_path,
- )
-
- yield from self._parse_and_recurse(req_path, nested_constraint)
- else:
- yield line
-
- def _parse_file(
- self, filename: str, constraint: bool
- ) -> Generator[ParsedLine, None, None]:
- _, content = get_file_content(filename, self._session)
-
- lines_enum = preprocess(content)
-
- for line_number, line in lines_enum:
- try:
- args_str, opts = self._line_parser(line)
- except OptionParsingError as e:
- # add offending line
- msg = f"Invalid requirement: {line}\n{e.msg}"
- raise RequirementsFileParseError(msg)
-
- yield ParsedLine(
- filename,
- line_number,
- args_str,
- opts,
- constraint,
- )
-
-
-def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
- def parse_line(line: str) -> Tuple[str, Values]:
- # Build new parser for each line since it accumulates appendable
- # options.
- parser = build_parser()
- defaults = parser.get_default_values()
- defaults.index_url = None
- if finder:
- defaults.format_control = finder.format_control
-
- args_str, options_str = break_args_options(line)
-
- try:
- options = shlex.split(options_str)
- except ValueError as e:
- raise OptionParsingError(f"Could not split options: {options_str}") from e
-
- opts, _ = parser.parse_args(options, defaults)
-
- return args_str, opts
-
- return parse_line
-
-
-def break_args_options(line: str) -> Tuple[str, str]:
- """Break up the line into an args and options string. We only want to shlex
- (and then optparse) the options, not the args. args can contain markers
- which are corrupted by shlex.
- """
- tokens = line.split(" ")
- args = []
- options = tokens[:]
- for token in tokens:
- if token.startswith("-") or token.startswith("--"):
- break
- else:
- args.append(token)
- options.pop(0)
- return " ".join(args), " ".join(options)
-
-
-class OptionParsingError(Exception):
- def __init__(self, msg: str) -> None:
- self.msg = msg
-
-
-def build_parser() -> optparse.OptionParser:
- """
- Return a parser for parsing requirement lines
- """
- parser = optparse.OptionParser(add_help_option=False)
-
- option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
- for option_factory in option_factories:
- option = option_factory()
- parser.add_option(option)
-
- # By default optparse sys.exits on parsing errors. We want to wrap
- # that in our own exception.
- def parser_exit(self: Any, msg: str) -> "NoReturn":
- raise OptionParsingError(msg)
-
- # NOTE: mypy disallows assigning to a method
- # https://github.com/python/mypy/issues/2427
- parser.exit = parser_exit # type: ignore
-
- return parser
-
-
-def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
- """Joins a line ending in '\' with the previous line (except when following
- comments). The joined line takes on the index of the first line.
- """
- primary_line_number = None
- new_line: List[str] = []
- for line_number, line in lines_enum:
- if not line.endswith("\\") or COMMENT_RE.match(line):
- if COMMENT_RE.match(line):
- # this ensures comments are always matched later
- line = " " + line
- if new_line:
- new_line.append(line)
- assert primary_line_number is not None
- yield primary_line_number, "".join(new_line)
- new_line = []
- else:
- yield line_number, line
- else:
- if not new_line:
- primary_line_number = line_number
- new_line.append(line.strip("\\"))
-
- # last line contains \
- if new_line:
- assert primary_line_number is not None
- yield primary_line_number, "".join(new_line)
-
- # TODO: handle space after '\'.
-
-
-def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
- """
- Strips comments and filter empty lines.
- """
- for line_number, line in lines_enum:
- line = COMMENT_RE.sub("", line)
- line = line.strip()
- if line:
- yield line_number, line
-
-
-def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
- """Replace all environment variables that can be retrieved via `os.getenv`.
-
- The only allowed format for environment variables defined in the
- requirement file is `${MY_VARIABLE_1}` to ensure two things:
-
- 1. Strings that contain a `$` aren't accidentally (partially) expanded.
- 2. Ensure consistency across platforms for requirement files.
-
- These points are the result of a discussion on the `github pull
- request #3514 <https://github.com/pypa/pip/pull/3514>`_.
-
- Valid characters in variable names follow the `POSIX standard
- <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
- to uppercase letter, digits and the `_` (underscore).
- """
- for line_number, line in lines_enum:
- for env_var, var_name in ENV_VAR_RE.findall(line):
- value = os.getenv(var_name)
- if not value:
- continue
-
- line = line.replace(env_var, value)
-
- yield line_number, line
-
-
-def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
- """Gets the content of a file; it may be a filename, file: URL, or
- http: URL. Returns (location, content). Content is unicode.
- Respects # -*- coding: declarations on the retrieved files.
-
- :param url: File path or url.
- :param session: PipSession instance.
- """
- scheme = get_url_scheme(url)
-
- # Pip has special support for file:// URLs (LocalFSAdapter).
- if scheme in ["http", "https", "file"]:
- resp = session.get(url)
- raise_for_status(resp)
- return resp.url, resp.text
-
- # Assume this is a bare path.
- try:
- with open(url, "rb") as f:
- content = auto_decode(f.read())
- except OSError as exc:
- raise InstallationError(f"Could not open requirements file: {exc}")
- return url, content
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/req_install.py b/venv/lib/python3.11/site-packages/pip/_internal/req/req_install.py
deleted file mode 100644
index a65611c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/req_install.py
+++ /dev/null
@@ -1,923 +0,0 @@
-import functools
-import logging
-import os
-import shutil
-import sys
-import uuid
-import zipfile
-from optparse import Values
-from pathlib import Path
-from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
-
-from pip._vendor.packaging.markers import Marker
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import Version
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.pyproject_hooks import BuildBackendHookCaller
-
-from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
-from pip._internal.exceptions import InstallationError, PreviousBuildDirError
-from pip._internal.locations import get_scheme
-from pip._internal.metadata import (
- BaseDistribution,
- get_default_environment,
- get_directory_distribution,
- get_wheel_distribution,
-)
-from pip._internal.metadata.base import FilesystemWheel
-from pip._internal.models.direct_url import DirectUrl
-from pip._internal.models.link import Link
-from pip._internal.operations.build.metadata import generate_metadata
-from pip._internal.operations.build.metadata_editable import generate_editable_metadata
-from pip._internal.operations.build.metadata_legacy import (
- generate_metadata as generate_metadata_legacy,
-)
-from pip._internal.operations.install.editable_legacy import (
- install_editable as install_editable_legacy,
-)
-from pip._internal.operations.install.wheel import install_wheel
-from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
-from pip._internal.req.req_uninstall import UninstallPathSet
-from pip._internal.utils.deprecation import deprecated
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.misc import (
- ConfiguredBuildBackendHookCaller,
- ask_path_exists,
- backup_dir,
- display_path,
- hide_url,
- is_installable_dir,
- redact_auth_from_requirement,
- redact_auth_from_url,
-)
-from pip._internal.utils.packaging import safe_extra
-from pip._internal.utils.subprocess import runner_with_spinner_message
-from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
-from pip._internal.utils.unpacking import unpack_file
-from pip._internal.utils.virtualenv import running_under_virtualenv
-from pip._internal.vcs import vcs
-
-logger = logging.getLogger(__name__)
-
-
-class InstallRequirement:
- """
- Represents something that may be installed later on, may have information
- about where to fetch the relevant requirement and also contains logic for
- installing the said requirement.
- """
-
- def __init__(
- self,
- req: Optional[Requirement],
- comes_from: Optional[Union[str, "InstallRequirement"]],
- editable: bool = False,
- link: Optional[Link] = None,
- markers: Optional[Marker] = None,
- use_pep517: Optional[bool] = None,
- isolated: bool = False,
- *,
- global_options: Optional[List[str]] = None,
- hash_options: Optional[Dict[str, List[str]]] = None,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
- constraint: bool = False,
- extras: Collection[str] = (),
- user_supplied: bool = False,
- permit_editable_wheels: bool = False,
- ) -> None:
- assert req is None or isinstance(req, Requirement), req
- self.req = req
- self.comes_from = comes_from
- self.constraint = constraint
- self.editable = editable
- self.permit_editable_wheels = permit_editable_wheels
-
- # source_dir is the local directory where the linked requirement is
- # located, or unpacked. In case unpacking is needed, creating and
- # populating source_dir is done by the RequirementPreparer. Note this
- # is not necessarily the directory where pyproject.toml or setup.py is
- # located - that one is obtained via unpacked_source_directory.
- self.source_dir: Optional[str] = None
- if self.editable:
- assert link
- if link.is_file:
- self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
-
- # original_link is the direct URL that was provided by the user for the
- # requirement, either directly or via a constraints file.
- if link is None and req and req.url:
- # PEP 508 URL requirement
- link = Link(req.url)
- self.link = self.original_link = link
-
- # When this InstallRequirement is a wheel obtained from the cache of locally
- # built wheels, this is the source link corresponding to the cache entry, which
- # was used to download and build the cached wheel.
- self.cached_wheel_source_link: Optional[Link] = None
-
- # Information about the location of the artifact that was downloaded . This
- # property is guaranteed to be set in resolver results.
- self.download_info: Optional[DirectUrl] = None
-
- # Path to any downloaded or already-existing package.
- self.local_file_path: Optional[str] = None
- if self.link and self.link.is_file:
- self.local_file_path = self.link.file_path
-
- if extras:
- self.extras = extras
- elif req:
- self.extras = req.extras
- else:
- self.extras = set()
- if markers is None and req:
- markers = req.marker
- self.markers = markers
-
- # This holds the Distribution object if this requirement is already installed.
- self.satisfied_by: Optional[BaseDistribution] = None
- # Whether the installation process should try to uninstall an existing
- # distribution before installing this requirement.
- self.should_reinstall = False
- # Temporary build location
- self._temp_build_dir: Optional[TempDirectory] = None
- # Set to True after successful installation
- self.install_succeeded: Optional[bool] = None
- # Supplied options
- self.global_options = global_options if global_options else []
- self.hash_options = hash_options if hash_options else {}
- self.config_settings = config_settings
- # Set to True after successful preparation of this requirement
- self.prepared = False
- # User supplied requirement are explicitly requested for installation
- # by the user via CLI arguments or requirements files, as opposed to,
- # e.g. dependencies, extras or constraints.
- self.user_supplied = user_supplied
-
- self.isolated = isolated
- self.build_env: BuildEnvironment = NoOpBuildEnvironment()
-
- # For PEP 517, the directory where we request the project metadata
- # gets stored. We need this to pass to build_wheel, so the backend
- # can ensure that the wheel matches the metadata (see the PEP for
- # details).
- self.metadata_directory: Optional[str] = None
-
- # The static build requirements (from pyproject.toml)
- self.pyproject_requires: Optional[List[str]] = None
-
- # Build requirements that we will check are available
- self.requirements_to_check: List[str] = []
-
- # The PEP 517 backend we should use to build the project
- self.pep517_backend: Optional[BuildBackendHookCaller] = None
-
- # Are we using PEP 517 for this requirement?
- # After pyproject.toml has been loaded, the only valid values are True
- # and False. Before loading, None is valid (meaning "use the default").
- # Setting an explicit value before loading pyproject.toml is supported,
- # but after loading this flag should be treated as read only.
- self.use_pep517 = use_pep517
-
- # If config settings are provided, enforce PEP 517.
- if self.config_settings:
- if self.use_pep517 is False:
- logger.warning(
- "--no-use-pep517 ignored for %s "
- "because --config-settings are specified.",
- self,
- )
- self.use_pep517 = True
-
- # This requirement needs more preparation before it can be built
- self.needs_more_preparation = False
-
- # This requirement needs to be unpacked before it can be installed.
- self._archive_source: Optional[Path] = None
-
- def __str__(self) -> str:
- if self.req:
- s = redact_auth_from_requirement(self.req)
- if self.link:
- s += f" from {redact_auth_from_url(self.link.url)}"
- elif self.link:
- s = redact_auth_from_url(self.link.url)
- else:
- s = "<InstallRequirement>"
- if self.satisfied_by is not None:
- if self.satisfied_by.location is not None:
- location = display_path(self.satisfied_by.location)
- else:
- location = "<memory>"
- s += f" in {location}"
- if self.comes_from:
- if isinstance(self.comes_from, str):
- comes_from: Optional[str] = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += f" (from {comes_from})"
- return s
-
- def __repr__(self) -> str:
- return "<{} object: {} editable={!r}>".format(
- self.__class__.__name__, str(self), self.editable
- )
-
- def format_debug(self) -> str:
- """An un-tested helper for getting state, for debugging."""
- attributes = vars(self)
- names = sorted(attributes)
-
- state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
- return "<{name} object: {{{state}}}>".format(
- name=self.__class__.__name__,
- state=", ".join(state),
- )
-
- # Things that are valid for all kinds of requirements?
- @property
- def name(self) -> Optional[str]:
- if self.req is None:
- return None
- return self.req.name
-
- @functools.lru_cache() # use cached_property in python 3.8+
- def supports_pyproject_editable(self) -> bool:
- if not self.use_pep517:
- return False
- assert self.pep517_backend
- with self.build_env:
- runner = runner_with_spinner_message(
- "Checking if build backend supports build_editable"
- )
- with self.pep517_backend.subprocess_runner(runner):
- return "build_editable" in self.pep517_backend._supported_features()
-
- @property
- def specifier(self) -> SpecifierSet:
- assert self.req is not None
- return self.req.specifier
-
- @property
- def is_direct(self) -> bool:
- """Whether this requirement was specified as a direct URL."""
- return self.original_link is not None
-
- @property
- def is_pinned(self) -> bool:
- """Return whether I am pinned to an exact version.
-
- For example, some-package==1.2 is pinned; some-package>1.2 is not.
- """
- assert self.req is not None
- specifiers = self.req.specifier
- return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
-
- def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
- if not extras_requested:
- # Provide an extra to safely evaluate the markers
- # without matching any extra
- extras_requested = ("",)
- if self.markers is not None:
- return any(
- self.markers.evaluate({"extra": extra})
- # TODO: Remove these two variants when packaging is upgraded to
- # support the marker comparison logic specified in PEP 685.
- or self.markers.evaluate({"extra": safe_extra(extra)})
- or self.markers.evaluate({"extra": canonicalize_name(extra)})
- for extra in extras_requested
- )
- else:
- return True
-
- @property
- def has_hash_options(self) -> bool:
- """Return whether any known-good hashes are specified as options.
-
- These activate --require-hashes mode; hashes specified as part of a
- URL do not.
-
- """
- return bool(self.hash_options)
-
- def hashes(self, trust_internet: bool = True) -> Hashes:
- """Return a hash-comparer that considers my option- and URL-based
- hashes to be known-good.
-
- Hashes in URLs--ones embedded in the requirements file, not ones
- downloaded from an index server--are almost peers with ones from
- flags. They satisfy --require-hashes (whether it was implicitly or
- explicitly activated) but do not activate it. md5 and sha224 are not
- allowed in flags, which should nudge people toward good algos. We
- always OR all hashes together, even ones from URLs.
-
- :param trust_internet: Whether to trust URL-based (#md5=...) hashes
- downloaded from the internet, as by populate_link()
-
- """
- good_hashes = self.hash_options.copy()
- if trust_internet:
- link = self.link
- elif self.is_direct and self.user_supplied:
- link = self.original_link
- else:
- link = None
- if link and link.hash:
- assert link.hash_name is not None
- good_hashes.setdefault(link.hash_name, []).append(link.hash)
- return Hashes(good_hashes)
-
- def from_path(self) -> Optional[str]:
- """Format a nice indicator to show where this "comes from" """
- if self.req is None:
- return None
- s = str(self.req)
- if self.comes_from:
- comes_from: Optional[str]
- if isinstance(self.comes_from, str):
- comes_from = self.comes_from
- else:
- comes_from = self.comes_from.from_path()
- if comes_from:
- s += "->" + comes_from
- return s
-
- def ensure_build_location(
- self, build_dir: str, autodelete: bool, parallel_builds: bool
- ) -> str:
- assert build_dir is not None
- if self._temp_build_dir is not None:
- assert self._temp_build_dir.path
- return self._temp_build_dir.path
- if self.req is None:
- # Some systems have /tmp as a symlink which confuses custom
- # builds (such as numpy). Thus, we ensure that the real path
- # is returned.
- self._temp_build_dir = TempDirectory(
- kind=tempdir_kinds.REQ_BUILD, globally_managed=True
- )
-
- return self._temp_build_dir.path
-
- # This is the only remaining place where we manually determine the path
- # for the temporary directory. It is only needed for editables where
- # it is the value of the --src option.
-
- # When parallel builds are enabled, add a UUID to the build directory
- # name so multiple builds do not interfere with each other.
- dir_name: str = canonicalize_name(self.req.name)
- if parallel_builds:
- dir_name = f"{dir_name}_{uuid.uuid4().hex}"
-
- # FIXME: Is there a better place to create the build_dir? (hg and bzr
- # need this)
- if not os.path.exists(build_dir):
- logger.debug("Creating directory %s", build_dir)
- os.makedirs(build_dir)
- actual_build_dir = os.path.join(build_dir, dir_name)
- # `None` indicates that we respect the globally-configured deletion
- # settings, which is what we actually want when auto-deleting.
- delete_arg = None if autodelete else False
- return TempDirectory(
- path=actual_build_dir,
- delete=delete_arg,
- kind=tempdir_kinds.REQ_BUILD,
- globally_managed=True,
- ).path
-
- def _set_requirement(self) -> None:
- """Set requirement after generating metadata."""
- assert self.req is None
- assert self.metadata is not None
- assert self.source_dir is not None
-
- # Construct a Requirement object from the generated metadata
- if isinstance(parse_version(self.metadata["Version"]), Version):
- op = "=="
- else:
- op = "==="
-
- self.req = Requirement(
- "".join(
- [
- self.metadata["Name"],
- op,
- self.metadata["Version"],
- ]
- )
- )
-
- def warn_on_mismatching_name(self) -> None:
- assert self.req is not None
- metadata_name = canonicalize_name(self.metadata["Name"])
- if canonicalize_name(self.req.name) == metadata_name:
- # Everything is fine.
- return
-
- # If we're here, there's a mismatch. Log a warning about it.
- logger.warning(
- "Generating metadata for package %s "
- "produced metadata for project name %s. Fix your "
- "#egg=%s fragments.",
- self.name,
- metadata_name,
- self.name,
- )
- self.req = Requirement(metadata_name)
-
- def check_if_exists(self, use_user_site: bool) -> None:
- """Find an installed distribution that satisfies or conflicts
- with this requirement, and set self.satisfied_by or
- self.should_reinstall appropriately.
- """
- if self.req is None:
- return
- existing_dist = get_default_environment().get_distribution(self.req.name)
- if not existing_dist:
- return
-
- version_compatible = self.req.specifier.contains(
- existing_dist.version,
- prereleases=True,
- )
- if not version_compatible:
- self.satisfied_by = None
- if use_user_site:
- if existing_dist.in_usersite:
- self.should_reinstall = True
- elif running_under_virtualenv() and existing_dist.in_site_packages:
- raise InstallationError(
- f"Will not install to the user site because it will "
- f"lack sys.path precedence to {existing_dist.raw_name} "
- f"in {existing_dist.location}"
- )
- else:
- self.should_reinstall = True
- else:
- if self.editable:
- self.should_reinstall = True
- # when installing editables, nothing pre-existing should ever
- # satisfy
- self.satisfied_by = None
- else:
- self.satisfied_by = existing_dist
-
- # Things valid for wheels
- @property
- def is_wheel(self) -> bool:
- if not self.link:
- return False
- return self.link.is_wheel
-
- @property
- def is_wheel_from_cache(self) -> bool:
- # When True, it means that this InstallRequirement is a local wheel file in the
- # cache of locally built wheels.
- return self.cached_wheel_source_link is not None
-
- # Things valid for sdists
- @property
- def unpacked_source_directory(self) -> str:
- assert self.source_dir, f"No source dir for {self}"
- return os.path.join(
- self.source_dir, self.link and self.link.subdirectory_fragment or ""
- )
-
- @property
- def setup_py_path(self) -> str:
- assert self.source_dir, f"No source dir for {self}"
- setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
-
- return setup_py
-
- @property
- def setup_cfg_path(self) -> str:
- assert self.source_dir, f"No source dir for {self}"
- setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
-
- return setup_cfg
-
- @property
- def pyproject_toml_path(self) -> str:
- assert self.source_dir, f"No source dir for {self}"
- return make_pyproject_path(self.unpacked_source_directory)
-
- def load_pyproject_toml(self) -> None:
- """Load the pyproject.toml file.
-
- After calling this routine, all of the attributes related to PEP 517
- processing for this requirement have been set. In particular, the
- use_pep517 attribute can be used to determine whether we should
- follow the PEP 517 or legacy (setup.py) code path.
- """
- pyproject_toml_data = load_pyproject_toml(
- self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
- )
-
- if pyproject_toml_data is None:
- assert not self.config_settings
- self.use_pep517 = False
- return
-
- self.use_pep517 = True
- requires, backend, check, backend_path = pyproject_toml_data
- self.requirements_to_check = check
- self.pyproject_requires = requires
- self.pep517_backend = ConfiguredBuildBackendHookCaller(
- self,
- self.unpacked_source_directory,
- backend,
- backend_path=backend_path,
- )
-
- def isolated_editable_sanity_check(self) -> None:
- """Check that an editable requirement if valid for use with PEP 517/518.
-
- This verifies that an editable that has a pyproject.toml either supports PEP 660
- or as a setup.py or a setup.cfg
- """
- if (
- self.editable
- and self.use_pep517
- and not self.supports_pyproject_editable()
- and not os.path.isfile(self.setup_py_path)
- and not os.path.isfile(self.setup_cfg_path)
- ):
- raise InstallationError(
- f"Project {self} has a 'pyproject.toml' and its build "
- f"backend is missing the 'build_editable' hook. Since it does not "
- f"have a 'setup.py' nor a 'setup.cfg', "
- f"it cannot be installed in editable mode. "
- f"Consider using a build backend that supports PEP 660."
- )
-
- def prepare_metadata(self) -> None:
- """Ensure that project metadata is available.
-
- Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
- Under legacy processing, call setup.py egg-info.
- """
- assert self.source_dir, f"No source dir for {self}"
- details = self.name or f"from {self.link}"
-
- if self.use_pep517:
- assert self.pep517_backend is not None
- if (
- self.editable
- and self.permit_editable_wheels
- and self.supports_pyproject_editable()
- ):
- self.metadata_directory = generate_editable_metadata(
- build_env=self.build_env,
- backend=self.pep517_backend,
- details=details,
- )
- else:
- self.metadata_directory = generate_metadata(
- build_env=self.build_env,
- backend=self.pep517_backend,
- details=details,
- )
- else:
- self.metadata_directory = generate_metadata_legacy(
- build_env=self.build_env,
- setup_py_path=self.setup_py_path,
- source_dir=self.unpacked_source_directory,
- isolated=self.isolated,
- details=details,
- )
-
- # Act on the newly generated metadata, based on the name and version.
- if not self.name:
- self._set_requirement()
- else:
- self.warn_on_mismatching_name()
-
- self.assert_source_matches_version()
-
- @property
- def metadata(self) -> Any:
- if not hasattr(self, "_metadata"):
- self._metadata = self.get_dist().metadata
-
- return self._metadata
-
- def get_dist(self) -> BaseDistribution:
- if self.metadata_directory:
- return get_directory_distribution(self.metadata_directory)
- elif self.local_file_path and self.is_wheel:
- assert self.req is not None
- return get_wheel_distribution(
- FilesystemWheel(self.local_file_path),
- canonicalize_name(self.req.name),
- )
- raise AssertionError(
- f"InstallRequirement {self} has no metadata directory and no wheel: "
- f"can't make a distribution."
- )
-
- def assert_source_matches_version(self) -> None:
- assert self.source_dir, f"No source dir for {self}"
- version = self.metadata["version"]
- if self.req and self.req.specifier and version not in self.req.specifier:
- logger.warning(
- "Requested %s, but installing version %s",
- self,
- version,
- )
- else:
- logger.debug(
- "Source in %s has version %s, which satisfies requirement %s",
- display_path(self.source_dir),
- version,
- self,
- )
-
- # For both source distributions and editables
- def ensure_has_source_dir(
- self,
- parent_dir: str,
- autodelete: bool = False,
- parallel_builds: bool = False,
- ) -> None:
- """Ensure that a source_dir is set.
-
- This will create a temporary build dir if the name of the requirement
- isn't known yet.
-
- :param parent_dir: The ideal pip parent_dir for the source_dir.
- Generally src_dir for editables and build_dir for sdists.
- :return: self.source_dir
- """
- if self.source_dir is None:
- self.source_dir = self.ensure_build_location(
- parent_dir,
- autodelete=autodelete,
- parallel_builds=parallel_builds,
- )
-
- def needs_unpacked_archive(self, archive_source: Path) -> None:
- assert self._archive_source is None
- self._archive_source = archive_source
-
- def ensure_pristine_source_checkout(self) -> None:
- """Ensure the source directory has not yet been built in."""
- assert self.source_dir is not None
- if self._archive_source is not None:
- unpack_file(str(self._archive_source), self.source_dir)
- elif is_installable_dir(self.source_dir):
- # If a checkout exists, it's unwise to keep going.
- # version inconsistencies are logged later, but do not fail
- # the installation.
- raise PreviousBuildDirError(
- f"pip can't proceed with requirements '{self}' due to a "
- f"pre-existing build directory ({self.source_dir}). This is likely "
- "due to a previous installation that failed . pip is "
- "being responsible and not assuming it can delete this. "
- "Please delete it and try again."
- )
-
- # For editable installations
- def update_editable(self) -> None:
- if not self.link:
- logger.debug(
- "Cannot update repository at %s; repository location is unknown",
- self.source_dir,
- )
- return
- assert self.editable
- assert self.source_dir
- if self.link.scheme == "file":
- # Static paths don't get updated
- return
- vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
- # Editable requirements are validated in Requirement constructors.
- # So here, if it's neither a path nor a valid VCS URL, it's a bug.
- assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
- hidden_url = hide_url(self.link.url)
- vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
-
- # Top-level Actions
- def uninstall(
- self, auto_confirm: bool = False, verbose: bool = False
- ) -> Optional[UninstallPathSet]:
- """
- Uninstall the distribution currently satisfying this requirement.
-
- Prompts before removing or modifying files unless
- ``auto_confirm`` is True.
-
- Refuses to delete or modify files outside of ``sys.prefix`` -
- thus uninstallation within a virtual environment can only
- modify that virtual environment, even if the virtualenv is
- linked to global site-packages.
-
- """
- assert self.req
- dist = get_default_environment().get_distribution(self.req.name)
- if not dist:
- logger.warning("Skipping %s as it is not installed.", self.name)
- return None
- logger.info("Found existing installation: %s", dist)
-
- uninstalled_pathset = UninstallPathSet.from_dist(dist)
- uninstalled_pathset.remove(auto_confirm, verbose)
- return uninstalled_pathset
-
- def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
- def _clean_zip_name(name: str, prefix: str) -> str:
- assert name.startswith(
- prefix + os.path.sep
- ), f"name {name!r} doesn't start with prefix {prefix!r}"
- name = name[len(prefix) + 1 :]
- name = name.replace(os.path.sep, "/")
- return name
-
- assert self.req is not None
- path = os.path.join(parentdir, path)
- name = _clean_zip_name(path, rootdir)
- return self.req.name + "/" + name
-
- def archive(self, build_dir: Optional[str]) -> None:
- """Saves archive to provided build_dir.
-
- Used for saving downloaded VCS requirements as part of `pip download`.
- """
- assert self.source_dir
- if build_dir is None:
- return
-
- create_archive = True
- archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
- archive_path = os.path.join(build_dir, archive_name)
-
- if os.path.exists(archive_path):
- response = ask_path_exists(
- f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
- "(b)ackup, (a)bort ",
- ("i", "w", "b", "a"),
- )
- if response == "i":
- create_archive = False
- elif response == "w":
- logger.warning("Deleting %s", display_path(archive_path))
- os.remove(archive_path)
- elif response == "b":
- dest_file = backup_dir(archive_path)
- logger.warning(
- "Backing up %s to %s",
- display_path(archive_path),
- display_path(dest_file),
- )
- shutil.move(archive_path, dest_file)
- elif response == "a":
- sys.exit(-1)
-
- if not create_archive:
- return
-
- zip_output = zipfile.ZipFile(
- archive_path,
- "w",
- zipfile.ZIP_DEFLATED,
- allowZip64=True,
- )
- with zip_output:
- dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
- for dirpath, dirnames, filenames in os.walk(dir):
- for dirname in dirnames:
- dir_arcname = self._get_archive_name(
- dirname,
- parentdir=dirpath,
- rootdir=dir,
- )
- zipdir = zipfile.ZipInfo(dir_arcname + "/")
- zipdir.external_attr = 0x1ED << 16 # 0o755
- zip_output.writestr(zipdir, "")
- for filename in filenames:
- file_arcname = self._get_archive_name(
- filename,
- parentdir=dirpath,
- rootdir=dir,
- )
- filename = os.path.join(dirpath, filename)
- zip_output.write(filename, file_arcname)
-
- logger.info("Saved %s", display_path(archive_path))
-
- def install(
- self,
- global_options: Optional[Sequence[str]] = None,
- root: Optional[str] = None,
- home: Optional[str] = None,
- prefix: Optional[str] = None,
- warn_script_location: bool = True,
- use_user_site: bool = False,
- pycompile: bool = True,
- ) -> None:
- assert self.req is not None
- scheme = get_scheme(
- self.req.name,
- user=use_user_site,
- home=home,
- root=root,
- isolated=self.isolated,
- prefix=prefix,
- )
-
- if self.editable and not self.is_wheel:
- if self.config_settings:
- logger.warning(
- "--config-settings ignored for legacy editable install of %s. "
- "Consider upgrading to a version of setuptools "
- "that supports PEP 660 (>= 64).",
- self,
- )
- install_editable_legacy(
- global_options=global_options if global_options is not None else [],
- prefix=prefix,
- home=home,
- use_user_site=use_user_site,
- name=self.req.name,
- setup_py_path=self.setup_py_path,
- isolated=self.isolated,
- build_env=self.build_env,
- unpacked_source_directory=self.unpacked_source_directory,
- )
- self.install_succeeded = True
- return
-
- assert self.is_wheel
- assert self.local_file_path
-
- install_wheel(
- self.req.name,
- self.local_file_path,
- scheme=scheme,
- req_description=str(self.req),
- pycompile=pycompile,
- warn_script_location=warn_script_location,
- direct_url=self.download_info if self.is_direct else None,
- requested=self.user_supplied,
- )
- self.install_succeeded = True
-
-
-def check_invalid_constraint_type(req: InstallRequirement) -> str:
- # Check for unsupported forms
- problem = ""
- if not req.name:
- problem = "Unnamed requirements are not allowed as constraints"
- elif req.editable:
- problem = "Editable requirements are not allowed as constraints"
- elif req.extras:
- problem = "Constraints cannot have extras"
-
- if problem:
- deprecated(
- reason=(
- "Constraints are only allowed to take the form of a package "
- "name and a version specifier. Other forms were originally "
- "permitted as an accident of the implementation, but were "
- "undocumented. The new implementation of the resolver no "
- "longer supports these forms."
- ),
- replacement="replacing the constraint with a requirement",
- # No plan yet for when the new resolver becomes default
- gone_in=None,
- issue=8210,
- )
-
- return problem
-
-
-def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
- if getattr(options, option, None):
- return True
- for req in reqs:
- if getattr(req, option, None):
- return True
- return False
-
-
-def check_legacy_setup_py_options(
- options: Values,
- reqs: List[InstallRequirement],
-) -> None:
- has_build_options = _has_option(options, reqs, "build_options")
- has_global_options = _has_option(options, reqs, "global_options")
- if has_build_options or has_global_options:
- deprecated(
- reason="--build-option and --global-option are deprecated.",
- issue=11859,
- replacement="to use --config-settings",
- gone_in="24.2",
- )
- logger.warning(
- "Implying --no-binary=:all: due to the presence of "
- "--build-option / --global-option. "
- )
- options.format_control.disallow_binaries()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/req_set.py b/venv/lib/python3.11/site-packages/pip/_internal/req/req_set.py
deleted file mode 100644
index bf36114..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/req_set.py
+++ /dev/null
@@ -1,119 +0,0 @@
-import logging
-from collections import OrderedDict
-from typing import Dict, List
-
-from pip._vendor.packaging.specifiers import LegacySpecifier
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.packaging.version import LegacyVersion
-
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.deprecation import deprecated
-
-logger = logging.getLogger(__name__)
-
-
-class RequirementSet:
- def __init__(self, check_supported_wheels: bool = True) -> None:
- """Create a RequirementSet."""
-
- self.requirements: Dict[str, InstallRequirement] = OrderedDict()
- self.check_supported_wheels = check_supported_wheels
-
- self.unnamed_requirements: List[InstallRequirement] = []
-
- def __str__(self) -> str:
- requirements = sorted(
- (req for req in self.requirements.values() if not req.comes_from),
- key=lambda req: canonicalize_name(req.name or ""),
- )
- return " ".join(str(req.req) for req in requirements)
-
- def __repr__(self) -> str:
- requirements = sorted(
- self.requirements.values(),
- key=lambda req: canonicalize_name(req.name or ""),
- )
-
- format_string = "<{classname} object; {count} requirement(s): {reqs}>"
- return format_string.format(
- classname=self.__class__.__name__,
- count=len(requirements),
- reqs=", ".join(str(req.req) for req in requirements),
- )
-
- def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
- assert not install_req.name
- self.unnamed_requirements.append(install_req)
-
- def add_named_requirement(self, install_req: InstallRequirement) -> None:
- assert install_req.name
-
- project_name = canonicalize_name(install_req.name)
- self.requirements[project_name] = install_req
-
- def has_requirement(self, name: str) -> bool:
- project_name = canonicalize_name(name)
-
- return (
- project_name in self.requirements
- and not self.requirements[project_name].constraint
- )
-
- def get_requirement(self, name: str) -> InstallRequirement:
- project_name = canonicalize_name(name)
-
- if project_name in self.requirements:
- return self.requirements[project_name]
-
- raise KeyError(f"No project with the name {name!r}")
-
- @property
- def all_requirements(self) -> List[InstallRequirement]:
- return self.unnamed_requirements + list(self.requirements.values())
-
- @property
- def requirements_to_install(self) -> List[InstallRequirement]:
- """Return the list of requirements that need to be installed.
-
- TODO remove this property together with the legacy resolver, since the new
- resolver only returns requirements that need to be installed.
- """
- return [
- install_req
- for install_req in self.all_requirements
- if not install_req.constraint and not install_req.satisfied_by
- ]
-
- def warn_legacy_versions_and_specifiers(self) -> None:
- for req in self.requirements_to_install:
- version = req.get_dist().version
- if isinstance(version, LegacyVersion):
- deprecated(
- reason=(
- f"pip has selected the non standard version {version} "
- f"of {req}. In the future this version will be "
- f"ignored as it isn't standard compliant."
- ),
- replacement=(
- "set or update constraints to select another version "
- "or contact the package author to fix the version number"
- ),
- issue=12063,
- gone_in="24.1",
- )
- for dep in req.get_dist().iter_dependencies():
- if any(isinstance(spec, LegacySpecifier) for spec in dep.specifier):
- deprecated(
- reason=(
- f"pip has selected {req} {version} which has non "
- f"standard dependency specifier {dep}. "
- f"In the future this version of {req} will be "
- f"ignored as it isn't standard compliant."
- ),
- replacement=(
- "set or update constraints to select another version "
- "or contact the package author to fix the version number"
- ),
- issue=12063,
- gone_in="24.1",
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/req/req_uninstall.py b/venv/lib/python3.11/site-packages/pip/_internal/req/req_uninstall.py
deleted file mode 100644
index 707fde1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/req/req_uninstall.py
+++ /dev/null
@@ -1,649 +0,0 @@
-import functools
-import os
-import sys
-import sysconfig
-from importlib.util import cache_from_source
-from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, Set, Tuple
-
-from pip._internal.exceptions import UninstallationError
-from pip._internal.locations import get_bin_prefix, get_bin_user
-from pip._internal.metadata import BaseDistribution
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.egg_link import egg_link_path_from_location
-from pip._internal.utils.logging import getLogger, indent_log
-from pip._internal.utils.misc import ask, normalize_path, renames, rmtree
-from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-logger = getLogger(__name__)
-
-
-def _script_names(
- bin_dir: str, script_name: str, is_gui: bool
-) -> Generator[str, None, None]:
- """Create the fully qualified name of the files created by
- {console,gui}_scripts for the given ``dist``.
- Returns the list of file names
- """
- exe_name = os.path.join(bin_dir, script_name)
- yield exe_name
- if not WINDOWS:
- return
- yield f"{exe_name}.exe"
- yield f"{exe_name}.exe.manifest"
- if is_gui:
- yield f"{exe_name}-script.pyw"
- else:
- yield f"{exe_name}-script.py"
-
-
-def _unique(
- fn: Callable[..., Generator[Any, None, None]]
-) -> Callable[..., Generator[Any, None, None]]:
- @functools.wraps(fn)
- def unique(*args: Any, **kw: Any) -> Generator[Any, None, None]:
- seen: Set[Any] = set()
- for item in fn(*args, **kw):
- if item not in seen:
- seen.add(item)
- yield item
-
- return unique
-
-
-@_unique
-def uninstallation_paths(dist: BaseDistribution) -> Generator[str, None, None]:
- """
- Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
-
- Yield paths to all the files in RECORD. For each .py file in RECORD, add
- the .pyc and .pyo in the same directory.
-
- UninstallPathSet.add() takes care of the __pycache__ .py[co].
-
- If RECORD is not found, raises UninstallationError,
- with possible information from the INSTALLER file.
-
- https://packaging.python.org/specifications/recording-installed-packages/
- """
- location = dist.location
- assert location is not None, "not installed"
-
- entries = dist.iter_declared_entries()
- if entries is None:
- msg = f"Cannot uninstall {dist}, RECORD file not found."
- installer = dist.installer
- if not installer or installer == "pip":
- dep = f"{dist.raw_name}=={dist.version}"
- msg += (
- " You might be able to recover from this via: "
- f"'pip install --force-reinstall --no-deps {dep}'."
- )
- else:
- msg += f" Hint: The package was installed by {installer}."
- raise UninstallationError(msg)
-
- for entry in entries:
- path = os.path.join(location, entry)
- yield path
- if path.endswith(".py"):
- dn, fn = os.path.split(path)
- base = fn[:-3]
- path = os.path.join(dn, base + ".pyc")
- yield path
- path = os.path.join(dn, base + ".pyo")
- yield path
-
-
-def compact(paths: Iterable[str]) -> Set[str]:
- """Compact a path set to contain the minimal number of paths
- necessary to contain all paths in the set. If /a/path/ and
- /a/path/to/a/file.txt are both in the set, leave only the
- shorter path."""
-
- sep = os.path.sep
- short_paths: Set[str] = set()
- for path in sorted(paths, key=len):
- should_skip = any(
- path.startswith(shortpath.rstrip("*"))
- and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
- for shortpath in short_paths
- )
- if not should_skip:
- short_paths.add(path)
- return short_paths
-
-
-def compress_for_rename(paths: Iterable[str]) -> Set[str]:
- """Returns a set containing the paths that need to be renamed.
-
- This set may include directories when the original sequence of paths
- included every file on disk.
- """
- case_map = {os.path.normcase(p): p for p in paths}
- remaining = set(case_map)
- unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
- wildcards: Set[str] = set()
-
- def norm_join(*a: str) -> str:
- return os.path.normcase(os.path.join(*a))
-
- for root in unchecked:
- if any(os.path.normcase(root).startswith(w) for w in wildcards):
- # This directory has already been handled.
- continue
-
- all_files: Set[str] = set()
- all_subdirs: Set[str] = set()
- for dirname, subdirs, files in os.walk(root):
- all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
- all_files.update(norm_join(root, dirname, f) for f in files)
- # If all the files we found are in our remaining set of files to
- # remove, then remove them from the latter set and add a wildcard
- # for the directory.
- if not (all_files - remaining):
- remaining.difference_update(all_files)
- wildcards.add(root + os.sep)
-
- return set(map(case_map.__getitem__, remaining)) | wildcards
-
-
-def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
- """Returns a tuple of 2 sets of which paths to display to user
-
- The first set contains paths that would be deleted. Files of a package
- are not added and the top-level directory of the package has a '*' added
- at the end - to signify that all it's contents are removed.
-
- The second set contains files that would have been skipped in the above
- folders.
- """
-
- will_remove = set(paths)
- will_skip = set()
-
- # Determine folders and files
- folders = set()
- files = set()
- for path in will_remove:
- if path.endswith(".pyc"):
- continue
- if path.endswith("__init__.py") or ".dist-info" in path:
- folders.add(os.path.dirname(path))
- files.add(path)
-
- _normcased_files = set(map(os.path.normcase, files))
-
- folders = compact(folders)
-
- # This walks the tree using os.walk to not miss extra folders
- # that might get added.
- for folder in folders:
- for dirpath, _, dirfiles in os.walk(folder):
- for fname in dirfiles:
- if fname.endswith(".pyc"):
- continue
-
- file_ = os.path.join(dirpath, fname)
- if (
- os.path.isfile(file_)
- and os.path.normcase(file_) not in _normcased_files
- ):
- # We are skipping this file. Add it to the set.
- will_skip.add(file_)
-
- will_remove = files | {os.path.join(folder, "*") for folder in folders}
-
- return will_remove, will_skip
-
-
-class StashedUninstallPathSet:
- """A set of file rename operations to stash files while
- tentatively uninstalling them."""
-
- def __init__(self) -> None:
- # Mapping from source file root to [Adjacent]TempDirectory
- # for files under that directory.
- self._save_dirs: Dict[str, TempDirectory] = {}
- # (old path, new path) tuples for each move that may need
- # to be undone.
- self._moves: List[Tuple[str, str]] = []
-
- def _get_directory_stash(self, path: str) -> str:
- """Stashes a directory.
-
- Directories are stashed adjacent to their original location if
- possible, or else moved/copied into the user's temp dir."""
-
- try:
- save_dir: TempDirectory = AdjacentTempDirectory(path)
- except OSError:
- save_dir = TempDirectory(kind="uninstall")
- self._save_dirs[os.path.normcase(path)] = save_dir
-
- return save_dir.path
-
- def _get_file_stash(self, path: str) -> str:
- """Stashes a file.
-
- If no root has been provided, one will be created for the directory
- in the user's temp directory."""
- path = os.path.normcase(path)
- head, old_head = os.path.dirname(path), None
- save_dir = None
-
- while head != old_head:
- try:
- save_dir = self._save_dirs[head]
- break
- except KeyError:
- pass
- head, old_head = os.path.dirname(head), head
- else:
- # Did not find any suitable root
- head = os.path.dirname(path)
- save_dir = TempDirectory(kind="uninstall")
- self._save_dirs[head] = save_dir
-
- relpath = os.path.relpath(path, head)
- if relpath and relpath != os.path.curdir:
- return os.path.join(save_dir.path, relpath)
- return save_dir.path
-
- def stash(self, path: str) -> str:
- """Stashes the directory or file and returns its new location.
- Handle symlinks as files to avoid modifying the symlink targets.
- """
- path_is_dir = os.path.isdir(path) and not os.path.islink(path)
- if path_is_dir:
- new_path = self._get_directory_stash(path)
- else:
- new_path = self._get_file_stash(path)
-
- self._moves.append((path, new_path))
- if path_is_dir and os.path.isdir(new_path):
- # If we're moving a directory, we need to
- # remove the destination first or else it will be
- # moved to inside the existing directory.
- # We just created new_path ourselves, so it will
- # be removable.
- os.rmdir(new_path)
- renames(path, new_path)
- return new_path
-
- def commit(self) -> None:
- """Commits the uninstall by removing stashed files."""
- for save_dir in self._save_dirs.values():
- save_dir.cleanup()
- self._moves = []
- self._save_dirs = {}
-
- def rollback(self) -> None:
- """Undoes the uninstall by moving stashed files back."""
- for p in self._moves:
- logger.info("Moving to %s\n from %s", *p)
-
- for new_path, path in self._moves:
- try:
- logger.debug("Replacing %s from %s", new_path, path)
- if os.path.isfile(new_path) or os.path.islink(new_path):
- os.unlink(new_path)
- elif os.path.isdir(new_path):
- rmtree(new_path)
- renames(path, new_path)
- except OSError as ex:
- logger.error("Failed to restore %s", new_path)
- logger.debug("Exception: %s", ex)
-
- self.commit()
-
- @property
- def can_rollback(self) -> bool:
- return bool(self._moves)
-
-
-class UninstallPathSet:
- """A set of file paths to be removed in the uninstallation of a
- requirement."""
-
- def __init__(self, dist: BaseDistribution) -> None:
- self._paths: Set[str] = set()
- self._refuse: Set[str] = set()
- self._pth: Dict[str, UninstallPthEntries] = {}
- self._dist = dist
- self._moved_paths = StashedUninstallPathSet()
- # Create local cache of normalize_path results. Creating an UninstallPathSet
- # can result in hundreds/thousands of redundant calls to normalize_path with
- # the same args, which hurts performance.
- self._normalize_path_cached = functools.lru_cache()(normalize_path)
-
- def _permitted(self, path: str) -> bool:
- """
- Return True if the given path is one we are permitted to
- remove/modify, False otherwise.
-
- """
- # aka is_local, but caching normalized sys.prefix
- if not running_under_virtualenv():
- return True
- return path.startswith(self._normalize_path_cached(sys.prefix))
-
- def add(self, path: str) -> None:
- head, tail = os.path.split(path)
-
- # we normalize the head to resolve parent directory symlinks, but not
- # the tail, since we only want to uninstall symlinks, not their targets
- path = os.path.join(self._normalize_path_cached(head), os.path.normcase(tail))
-
- if not os.path.exists(path):
- return
- if self._permitted(path):
- self._paths.add(path)
- else:
- self._refuse.add(path)
-
- # __pycache__ files can show up after 'installed-files.txt' is created,
- # due to imports
- if os.path.splitext(path)[1] == ".py":
- self.add(cache_from_source(path))
-
- def add_pth(self, pth_file: str, entry: str) -> None:
- pth_file = self._normalize_path_cached(pth_file)
- if self._permitted(pth_file):
- if pth_file not in self._pth:
- self._pth[pth_file] = UninstallPthEntries(pth_file)
- self._pth[pth_file].add(entry)
- else:
- self._refuse.add(pth_file)
-
- def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
- """Remove paths in ``self._paths`` with confirmation (unless
- ``auto_confirm`` is True)."""
-
- if not self._paths:
- logger.info(
- "Can't uninstall '%s'. No files were found to uninstall.",
- self._dist.raw_name,
- )
- return
-
- dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
- logger.info("Uninstalling %s:", dist_name_version)
-
- with indent_log():
- if auto_confirm or self._allowed_to_proceed(verbose):
- moved = self._moved_paths
-
- for_rename = compress_for_rename(self._paths)
-
- for path in sorted(compact(for_rename)):
- moved.stash(path)
- logger.verbose("Removing file or directory %s", path)
-
- for pth in self._pth.values():
- pth.remove()
-
- logger.info("Successfully uninstalled %s", dist_name_version)
-
- def _allowed_to_proceed(self, verbose: bool) -> bool:
- """Display which files would be deleted and prompt for confirmation"""
-
- def _display(msg: str, paths: Iterable[str]) -> None:
- if not paths:
- return
-
- logger.info(msg)
- with indent_log():
- for path in sorted(compact(paths)):
- logger.info(path)
-
- if not verbose:
- will_remove, will_skip = compress_for_output_listing(self._paths)
- else:
- # In verbose mode, display all the files that are going to be
- # deleted.
- will_remove = set(self._paths)
- will_skip = set()
-
- _display("Would remove:", will_remove)
- _display("Would not remove (might be manually added):", will_skip)
- _display("Would not remove (outside of prefix):", self._refuse)
- if verbose:
- _display("Will actually move:", compress_for_rename(self._paths))
-
- return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
-
- def rollback(self) -> None:
- """Rollback the changes previously made by remove()."""
- if not self._moved_paths.can_rollback:
- logger.error(
- "Can't roll back %s; was not uninstalled",
- self._dist.raw_name,
- )
- return
- logger.info("Rolling back uninstall of %s", self._dist.raw_name)
- self._moved_paths.rollback()
- for pth in self._pth.values():
- pth.rollback()
-
- def commit(self) -> None:
- """Remove temporary save dir: rollback will no longer be possible."""
- self._moved_paths.commit()
-
- @classmethod
- def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
- dist_location = dist.location
- info_location = dist.info_location
- if dist_location is None:
- logger.info(
- "Not uninstalling %s since it is not installed",
- dist.canonical_name,
- )
- return cls(dist)
-
- normalized_dist_location = normalize_path(dist_location)
- if not dist.local:
- logger.info(
- "Not uninstalling %s at %s, outside environment %s",
- dist.canonical_name,
- normalized_dist_location,
- sys.prefix,
- )
- return cls(dist)
-
- if normalized_dist_location in {
- p
- for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
- if p
- }:
- logger.info(
- "Not uninstalling %s at %s, as it is in the standard library.",
- dist.canonical_name,
- normalized_dist_location,
- )
- return cls(dist)
-
- paths_to_remove = cls(dist)
- develop_egg_link = egg_link_path_from_location(dist.raw_name)
-
- # Distribution is installed with metadata in a "flat" .egg-info
- # directory. This means it is not a modern .dist-info installation, an
- # egg, or legacy editable.
- setuptools_flat_installation = (
- dist.installed_with_setuptools_egg_info
- and info_location is not None
- and os.path.exists(info_location)
- # If dist is editable and the location points to a ``.egg-info``,
- # we are in fact in the legacy editable case.
- and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
- )
-
- # Uninstall cases order do matter as in the case of 2 installs of the
- # same package, pip needs to uninstall the currently detected version
- if setuptools_flat_installation:
- if info_location is not None:
- paths_to_remove.add(info_location)
- installed_files = dist.iter_declared_entries()
- if installed_files is not None:
- for installed_file in installed_files:
- paths_to_remove.add(os.path.join(dist_location, installed_file))
- # FIXME: need a test for this elif block
- # occurs with --single-version-externally-managed/--record outside
- # of pip
- elif dist.is_file("top_level.txt"):
- try:
- namespace_packages = dist.read_text("namespace_packages.txt")
- except FileNotFoundError:
- namespaces = []
- else:
- namespaces = namespace_packages.splitlines(keepends=False)
- for top_level_pkg in [
- p
- for p in dist.read_text("top_level.txt").splitlines()
- if p and p not in namespaces
- ]:
- path = os.path.join(dist_location, top_level_pkg)
- paths_to_remove.add(path)
- paths_to_remove.add(f"{path}.py")
- paths_to_remove.add(f"{path}.pyc")
- paths_to_remove.add(f"{path}.pyo")
-
- elif dist.installed_by_distutils:
- raise UninstallationError(
- "Cannot uninstall {!r}. It is a distutils installed project "
- "and thus we cannot accurately determine which files belong "
- "to it which would lead to only a partial uninstall.".format(
- dist.raw_name,
- )
- )
-
- elif dist.installed_as_egg:
- # package installed by easy_install
- # We cannot match on dist.egg_name because it can slightly vary
- # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
- paths_to_remove.add(dist_location)
- easy_install_egg = os.path.split(dist_location)[1]
- easy_install_pth = os.path.join(
- os.path.dirname(dist_location),
- "easy-install.pth",
- )
- paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
-
- elif dist.installed_with_dist_info:
- for path in uninstallation_paths(dist):
- paths_to_remove.add(path)
-
- elif develop_egg_link:
- # PEP 660 modern editable is handled in the ``.dist-info`` case
- # above, so this only covers the setuptools-style editable.
- with open(develop_egg_link) as fh:
- link_pointer = os.path.normcase(fh.readline().strip())
- normalized_link_pointer = paths_to_remove._normalize_path_cached(
- link_pointer
- )
- assert os.path.samefile(
- normalized_link_pointer, normalized_dist_location
- ), (
- f"Egg-link {develop_egg_link} (to {link_pointer}) does not match "
- f"installed location of {dist.raw_name} (at {dist_location})"
- )
- paths_to_remove.add(develop_egg_link)
- easy_install_pth = os.path.join(
- os.path.dirname(develop_egg_link), "easy-install.pth"
- )
- paths_to_remove.add_pth(easy_install_pth, dist_location)
-
- else:
- logger.debug(
- "Not sure how to uninstall: %s - Check: %s",
- dist,
- dist_location,
- )
-
- if dist.in_usersite:
- bin_dir = get_bin_user()
- else:
- bin_dir = get_bin_prefix()
-
- # find distutils scripts= scripts
- try:
- for script in dist.iter_distutils_script_names():
- paths_to_remove.add(os.path.join(bin_dir, script))
- if WINDOWS:
- paths_to_remove.add(os.path.join(bin_dir, f"{script}.bat"))
- except (FileNotFoundError, NotADirectoryError):
- pass
-
- # find console_scripts and gui_scripts
- def iter_scripts_to_remove(
- dist: BaseDistribution,
- bin_dir: str,
- ) -> Generator[str, None, None]:
- for entry_point in dist.iter_entry_points():
- if entry_point.group == "console_scripts":
- yield from _script_names(bin_dir, entry_point.name, False)
- elif entry_point.group == "gui_scripts":
- yield from _script_names(bin_dir, entry_point.name, True)
-
- for s in iter_scripts_to_remove(dist, bin_dir):
- paths_to_remove.add(s)
-
- return paths_to_remove
-
-
-class UninstallPthEntries:
- def __init__(self, pth_file: str) -> None:
- self.file = pth_file
- self.entries: Set[str] = set()
- self._saved_lines: Optional[List[bytes]] = None
-
- def add(self, entry: str) -> None:
- entry = os.path.normcase(entry)
- # On Windows, os.path.normcase converts the entry to use
- # backslashes. This is correct for entries that describe absolute
- # paths outside of site-packages, but all the others use forward
- # slashes.
- # os.path.splitdrive is used instead of os.path.isabs because isabs
- # treats non-absolute paths with drive letter markings like c:foo\bar
- # as absolute paths. It also does not recognize UNC paths if they don't
- # have more than "\\sever\share". Valid examples: "\\server\share\" or
- # "\\server\share\folder".
- if WINDOWS and not os.path.splitdrive(entry)[0]:
- entry = entry.replace("\\", "/")
- self.entries.add(entry)
-
- def remove(self) -> None:
- logger.verbose("Removing pth entries from %s:", self.file)
-
- # If the file doesn't exist, log a warning and return
- if not os.path.isfile(self.file):
- logger.warning("Cannot remove entries from nonexistent file %s", self.file)
- return
- with open(self.file, "rb") as fh:
- # windows uses '\r\n' with py3k, but uses '\n' with py2.x
- lines = fh.readlines()
- self._saved_lines = lines
- if any(b"\r\n" in line for line in lines):
- endline = "\r\n"
- else:
- endline = "\n"
- # handle missing trailing newline
- if lines and not lines[-1].endswith(endline.encode("utf-8")):
- lines[-1] = lines[-1] + endline.encode("utf-8")
- for entry in self.entries:
- try:
- logger.verbose("Removing entry: %s", entry)
- lines.remove((entry + endline).encode("utf-8"))
- except ValueError:
- pass
- with open(self.file, "wb") as fh:
- fh.writelines(lines)
-
- def rollback(self) -> bool:
- if self._saved_lines is None:
- logger.error("Cannot roll back changes to %s, none were made", self.file)
- return False
- logger.debug("Rolling %s back to previous state", self.file)
- with open(self.file, "wb") as fh:
- fh.writelines(self._saved_lines)
- return True
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 692bdf9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 9d8bba9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py
deleted file mode 100644
index 42dade1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/base.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from typing import Callable, List, Optional
-
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.req.req_set import RequirementSet
-
-InstallRequirementProvider = Callable[
- [str, Optional[InstallRequirement]], InstallRequirement
-]
-
-
-class BaseResolver:
- def resolve(
- self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
- ) -> RequirementSet:
- raise NotImplementedError()
-
- def get_installation_order(
- self, req_set: RequirementSet
- ) -> List[InstallRequirement]:
- raise NotImplementedError()
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 124d601..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc
deleted file mode 100644
index 8e0796b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py
deleted file mode 100644
index 5ddb848..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/legacy/resolver.py
+++ /dev/null
@@ -1,598 +0,0 @@
-"""Dependency Resolution
-
-The dependency resolution in pip is performed as follows:
-
-for top-level requirements:
- a. only one spec allowed per project, regardless of conflicts or not.
- otherwise a "double requirement" exception is raised
- b. they override sub-dependency requirements.
-for sub-dependencies
- a. "first found, wins" (where the order is breadth first)
-"""
-
-# The following comment should be removed at some point in the future.
-# mypy: strict-optional=False
-
-import logging
-import sys
-from collections import defaultdict
-from itertools import chain
-from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
-
-from pip._vendor.packaging import specifiers
-from pip._vendor.packaging.requirements import Requirement
-
-from pip._internal.cache import WheelCache
-from pip._internal.exceptions import (
- BestVersionAlreadyInstalled,
- DistributionNotFound,
- HashError,
- HashErrors,
- InstallationError,
- NoneMetadataError,
- UnsupportedPythonVersion,
-)
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.req_install import (
- InstallRequirement,
- check_invalid_constraint_type,
-)
-from pip._internal.req.req_set import RequirementSet
-from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
-from pip._internal.utils import compatibility_tags
-from pip._internal.utils.compatibility_tags import get_supported
-from pip._internal.utils.direct_url_helpers import direct_url_from_link
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import normalize_version_info
-from pip._internal.utils.packaging import check_requires_python
-
-logger = logging.getLogger(__name__)
-
-DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
-
-
-def _check_dist_requires_python(
- dist: BaseDistribution,
- version_info: Tuple[int, int, int],
- ignore_requires_python: bool = False,
-) -> None:
- """
- Check whether the given Python version is compatible with a distribution's
- "Requires-Python" value.
-
- :param version_info: A 3-tuple of ints representing the Python
- major-minor-micro version to check.
- :param ignore_requires_python: Whether to ignore the "Requires-Python"
- value if the given Python version isn't compatible.
-
- :raises UnsupportedPythonVersion: When the given Python version isn't
- compatible.
- """
- # This idiosyncratically converts the SpecifierSet to str and let
- # check_requires_python then parse it again into SpecifierSet. But this
- # is the legacy resolver so I'm just not going to bother refactoring.
- try:
- requires_python = str(dist.requires_python)
- except FileNotFoundError as e:
- raise NoneMetadataError(dist, str(e))
- try:
- is_compatible = check_requires_python(
- requires_python,
- version_info=version_info,
- )
- except specifiers.InvalidSpecifier as exc:
- logger.warning(
- "Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
- )
- return
-
- if is_compatible:
- return
-
- version = ".".join(map(str, version_info))
- if ignore_requires_python:
- logger.debug(
- "Ignoring failed Requires-Python check for package %r: %s not in %r",
- dist.raw_name,
- version,
- requires_python,
- )
- return
-
- raise UnsupportedPythonVersion(
- "Package {!r} requires a different Python: {} not in {!r}".format(
- dist.raw_name, version, requires_python
- )
- )
-
-
-class Resolver(BaseResolver):
- """Resolves which packages need to be installed/uninstalled to perform \
- the requested operation without breaking the requirements of any package.
- """
-
- _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
- def __init__(
- self,
- preparer: RequirementPreparer,
- finder: PackageFinder,
- wheel_cache: Optional[WheelCache],
- make_install_req: InstallRequirementProvider,
- use_user_site: bool,
- ignore_dependencies: bool,
- ignore_installed: bool,
- ignore_requires_python: bool,
- force_reinstall: bool,
- upgrade_strategy: str,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ) -> None:
- super().__init__()
- assert upgrade_strategy in self._allowed_strategies
-
- if py_version_info is None:
- py_version_info = sys.version_info[:3]
- else:
- py_version_info = normalize_version_info(py_version_info)
-
- self._py_version_info = py_version_info
-
- self.preparer = preparer
- self.finder = finder
- self.wheel_cache = wheel_cache
-
- self.upgrade_strategy = upgrade_strategy
- self.force_reinstall = force_reinstall
- self.ignore_dependencies = ignore_dependencies
- self.ignore_installed = ignore_installed
- self.ignore_requires_python = ignore_requires_python
- self.use_user_site = use_user_site
- self._make_install_req = make_install_req
-
- self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
-
- def resolve(
- self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
- ) -> RequirementSet:
- """Resolve what operations need to be done
-
- As a side-effect of this method, the packages (and their dependencies)
- are downloaded, unpacked and prepared for installation. This
- preparation is done by ``pip.operations.prepare``.
-
- Once PyPI has static dependency metadata available, it would be
- possible to move the preparation to become a step separated from
- dependency resolution.
- """
- requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
- for req in root_reqs:
- if req.constraint:
- check_invalid_constraint_type(req)
- self._add_requirement_to_set(requirement_set, req)
-
- # Actually prepare the files, and collect any exceptions. Most hash
- # exceptions cannot be checked ahead of time, because
- # _populate_link() needs to be called before we can make decisions
- # based on link type.
- discovered_reqs: List[InstallRequirement] = []
- hash_errors = HashErrors()
- for req in chain(requirement_set.all_requirements, discovered_reqs):
- try:
- discovered_reqs.extend(self._resolve_one(requirement_set, req))
- except HashError as exc:
- exc.req = req
- hash_errors.append(exc)
-
- if hash_errors:
- raise hash_errors
-
- return requirement_set
-
- def _add_requirement_to_set(
- self,
- requirement_set: RequirementSet,
- install_req: InstallRequirement,
- parent_req_name: Optional[str] = None,
- extras_requested: Optional[Iterable[str]] = None,
- ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
- """Add install_req as a requirement to install.
-
- :param parent_req_name: The name of the requirement that needed this
- added. The name is used because when multiple unnamed requirements
- resolve to the same name, we could otherwise end up with dependency
- links that point outside the Requirements set. parent_req must
- already be added. Note that None implies that this is a user
- supplied requirement, vs an inferred one.
- :param extras_requested: an iterable of extras used to evaluate the
- environment markers.
- :return: Additional requirements to scan. That is either [] if
- the requirement is not applicable, or [install_req] if the
- requirement is applicable and has just been added.
- """
- # If the markers do not match, ignore this requirement.
- if not install_req.match_markers(extras_requested):
- logger.info(
- "Ignoring %s: markers '%s' don't match your environment",
- install_req.name,
- install_req.markers,
- )
- return [], None
-
- # If the wheel is not supported, raise an error.
- # Should check this after filtering out based on environment markers to
- # allow specifying different wheels based on the environment/OS, in a
- # single requirements file.
- if install_req.link and install_req.link.is_wheel:
- wheel = Wheel(install_req.link.filename)
- tags = compatibility_tags.get_supported()
- if requirement_set.check_supported_wheels and not wheel.supported(tags):
- raise InstallationError(
- f"{wheel.filename} is not a supported wheel on this platform."
- )
-
- # This next bit is really a sanity check.
- assert (
- not install_req.user_supplied or parent_req_name is None
- ), "a user supplied req shouldn't have a parent"
-
- # Unnamed requirements are scanned again and the requirement won't be
- # added as a dependency until after scanning.
- if not install_req.name:
- requirement_set.add_unnamed_requirement(install_req)
- return [install_req], None
-
- try:
- existing_req: Optional[
- InstallRequirement
- ] = requirement_set.get_requirement(install_req.name)
- except KeyError:
- existing_req = None
-
- has_conflicting_requirement = (
- parent_req_name is None
- and existing_req
- and not existing_req.constraint
- and existing_req.extras == install_req.extras
- and existing_req.req
- and install_req.req
- and existing_req.req.specifier != install_req.req.specifier
- )
- if has_conflicting_requirement:
- raise InstallationError(
- "Double requirement given: {} (already in {}, name={!r})".format(
- install_req, existing_req, install_req.name
- )
- )
-
- # When no existing requirement exists, add the requirement as a
- # dependency and it will be scanned again after.
- if not existing_req:
- requirement_set.add_named_requirement(install_req)
- # We'd want to rescan this requirement later
- return [install_req], install_req
-
- # Assume there's no need to scan, and that we've already
- # encountered this for scanning.
- if install_req.constraint or not existing_req.constraint:
- return [], existing_req
-
- does_not_satisfy_constraint = install_req.link and not (
- existing_req.link and install_req.link.path == existing_req.link.path
- )
- if does_not_satisfy_constraint:
- raise InstallationError(
- f"Could not satisfy constraints for '{install_req.name}': "
- "installation from path or url cannot be "
- "constrained to a version"
- )
- # If we're now installing a constraint, mark the existing
- # object for real installation.
- existing_req.constraint = False
- # If we're now installing a user supplied requirement,
- # mark the existing object as such.
- if install_req.user_supplied:
- existing_req.user_supplied = True
- existing_req.extras = tuple(
- sorted(set(existing_req.extras) | set(install_req.extras))
- )
- logger.debug(
- "Setting %s extras to: %s",
- existing_req,
- existing_req.extras,
- )
- # Return the existing requirement for addition to the parent and
- # scanning again.
- return [existing_req], existing_req
-
- def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
- if self.upgrade_strategy == "to-satisfy-only":
- return False
- elif self.upgrade_strategy == "eager":
- return True
- else:
- assert self.upgrade_strategy == "only-if-needed"
- return req.user_supplied or req.constraint
-
- def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
- """
- Set a requirement to be installed.
- """
- # Don't uninstall the conflict if doing a user install and the
- # conflict is not a user install.
- if not self.use_user_site or req.satisfied_by.in_usersite:
- req.should_reinstall = True
- req.satisfied_by = None
-
- def _check_skip_installed(
- self, req_to_install: InstallRequirement
- ) -> Optional[str]:
- """Check if req_to_install should be skipped.
-
- This will check if the req is installed, and whether we should upgrade
- or reinstall it, taking into account all the relevant user options.
-
- After calling this req_to_install will only have satisfied_by set to
- None if the req_to_install is to be upgraded/reinstalled etc. Any
- other value will be a dist recording the current thing installed that
- satisfies the requirement.
-
- Note that for vcs urls and the like we can't assess skipping in this
- routine - we simply identify that we need to pull the thing down,
- then later on it is pulled down and introspected to assess upgrade/
- reinstalls etc.
-
- :return: A text reason for why it was skipped, or None.
- """
- if self.ignore_installed:
- return None
-
- req_to_install.check_if_exists(self.use_user_site)
- if not req_to_install.satisfied_by:
- return None
-
- if self.force_reinstall:
- self._set_req_to_reinstall(req_to_install)
- return None
-
- if not self._is_upgrade_allowed(req_to_install):
- if self.upgrade_strategy == "only-if-needed":
- return "already satisfied, skipping upgrade"
- return "already satisfied"
-
- # Check for the possibility of an upgrade. For link-based
- # requirements we have to pull the tree down and inspect to assess
- # the version #, so it's handled way down.
- if not req_to_install.link:
- try:
- self.finder.find_requirement(req_to_install, upgrade=True)
- except BestVersionAlreadyInstalled:
- # Then the best version is installed.
- return "already up-to-date"
- except DistributionNotFound:
- # No distribution found, so we squash the error. It will
- # be raised later when we re-try later to do the install.
- # Why don't we just raise here?
- pass
-
- self._set_req_to_reinstall(req_to_install)
- return None
-
- def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
- upgrade = self._is_upgrade_allowed(req)
- best_candidate = self.finder.find_requirement(req, upgrade)
- if not best_candidate:
- return None
-
- # Log a warning per PEP 592 if necessary before returning.
- link = best_candidate.link
- if link.is_yanked:
- reason = link.yanked_reason or "<none given>"
- msg = (
- # Mark this as a unicode string to prevent
- # "UnicodeEncodeError: 'ascii' codec can't encode character"
- # in Python 2 when the reason contains non-ascii characters.
- "The candidate selected for download or install is a "
- f"yanked version: {best_candidate}\n"
- f"Reason for being yanked: {reason}"
- )
- logger.warning(msg)
-
- return link
-
- def _populate_link(self, req: InstallRequirement) -> None:
- """Ensure that if a link can be found for this, that it is found.
-
- Note that req.link may still be None - if the requirement is already
- installed and not needed to be upgraded based on the return value of
- _is_upgrade_allowed().
-
- If preparer.require_hashes is True, don't use the wheel cache, because
- cached wheels, always built locally, have different hashes than the
- files downloaded from the index server and thus throw false hash
- mismatches. Furthermore, cached wheels at present have undeterministic
- contents due to file modification times.
- """
- if req.link is None:
- req.link = self._find_requirement_link(req)
-
- if self.wheel_cache is None or self.preparer.require_hashes:
- return
- cache_entry = self.wheel_cache.get_cache_entry(
- link=req.link,
- package_name=req.name,
- supported_tags=get_supported(),
- )
- if cache_entry is not None:
- logger.debug("Using cached wheel link: %s", cache_entry.link)
- if req.link is req.original_link and cache_entry.persistent:
- req.cached_wheel_source_link = req.link
- if cache_entry.origin is not None:
- req.download_info = cache_entry.origin
- else:
- # Legacy cache entry that does not have origin.json.
- # download_info may miss the archive_info.hashes field.
- req.download_info = direct_url_from_link(
- req.link, link_is_in_wheel_cache=cache_entry.persistent
- )
- req.link = cache_entry.link
-
- def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
- """Takes a InstallRequirement and returns a single AbstractDist \
- representing a prepared variant of the same.
- """
- if req.editable:
- return self.preparer.prepare_editable_requirement(req)
-
- # satisfied_by is only evaluated by calling _check_skip_installed,
- # so it must be None here.
- assert req.satisfied_by is None
- skip_reason = self._check_skip_installed(req)
-
- if req.satisfied_by:
- return self.preparer.prepare_installed_requirement(req, skip_reason)
-
- # We eagerly populate the link, since that's our "legacy" behavior.
- self._populate_link(req)
- dist = self.preparer.prepare_linked_requirement(req)
-
- # NOTE
- # The following portion is for determining if a certain package is
- # going to be re-installed/upgraded or not and reporting to the user.
- # This should probably get cleaned up in a future refactor.
-
- # req.req is only avail after unpack for URL
- # pkgs repeat check_if_exists to uninstall-on-upgrade
- # (#14)
- if not self.ignore_installed:
- req.check_if_exists(self.use_user_site)
-
- if req.satisfied_by:
- should_modify = (
- self.upgrade_strategy != "to-satisfy-only"
- or self.force_reinstall
- or self.ignore_installed
- or req.link.scheme == "file"
- )
- if should_modify:
- self._set_req_to_reinstall(req)
- else:
- logger.info(
- "Requirement already satisfied (use --upgrade to upgrade): %s",
- req,
- )
- return dist
-
- def _resolve_one(
- self,
- requirement_set: RequirementSet,
- req_to_install: InstallRequirement,
- ) -> List[InstallRequirement]:
- """Prepare a single requirements file.
-
- :return: A list of additional InstallRequirements to also install.
- """
- # Tell user what we are doing for this requirement:
- # obtain (editable), skipping, processing (local url), collecting
- # (remote url or package name)
- if req_to_install.constraint or req_to_install.prepared:
- return []
-
- req_to_install.prepared = True
-
- # Parse and return dependencies
- dist = self._get_dist_for(req_to_install)
- # This will raise UnsupportedPythonVersion if the given Python
- # version isn't compatible with the distribution's Requires-Python.
- _check_dist_requires_python(
- dist,
- version_info=self._py_version_info,
- ignore_requires_python=self.ignore_requires_python,
- )
-
- more_reqs: List[InstallRequirement] = []
-
- def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
- # This idiosyncratically converts the Requirement to str and let
- # make_install_req then parse it again into Requirement. But this is
- # the legacy resolver so I'm just not going to bother refactoring.
- sub_install_req = self._make_install_req(str(subreq), req_to_install)
- parent_req_name = req_to_install.name
- to_scan_again, add_to_parent = self._add_requirement_to_set(
- requirement_set,
- sub_install_req,
- parent_req_name=parent_req_name,
- extras_requested=extras_requested,
- )
- if parent_req_name and add_to_parent:
- self._discovered_dependencies[parent_req_name].append(add_to_parent)
- more_reqs.extend(to_scan_again)
-
- with indent_log():
- # We add req_to_install before its dependencies, so that we
- # can refer to it when adding dependencies.
- if not requirement_set.has_requirement(req_to_install.name):
- # 'unnamed' requirements will get added here
- # 'unnamed' requirements can only come from being directly
- # provided by the user.
- assert req_to_install.user_supplied
- self._add_requirement_to_set(
- requirement_set, req_to_install, parent_req_name=None
- )
-
- if not self.ignore_dependencies:
- if req_to_install.extras:
- logger.debug(
- "Installing extra requirements: %r",
- ",".join(req_to_install.extras),
- )
- missing_requested = sorted(
- set(req_to_install.extras) - set(dist.iter_provided_extras())
- )
- for missing in missing_requested:
- logger.warning(
- "%s %s does not provide the extra '%s'",
- dist.raw_name,
- dist.version,
- missing,
- )
-
- available_requested = sorted(
- set(dist.iter_provided_extras()) & set(req_to_install.extras)
- )
- for subreq in dist.iter_dependencies(available_requested):
- add_req(subreq, extras_requested=available_requested)
-
- return more_reqs
-
- def get_installation_order(
- self, req_set: RequirementSet
- ) -> List[InstallRequirement]:
- """Create the installation order.
-
- The installation order is topological - requirements are installed
- before the requiring thing. We break cycles at an arbitrary point,
- and make no other guarantees.
- """
- # The current implementation, which we may change at any point
- # installs the user specified things in the order given, except when
- # dependencies must come earlier to achieve topological order.
- order = []
- ordered_reqs: Set[InstallRequirement] = set()
-
- def schedule(req: InstallRequirement) -> None:
- if req.satisfied_by or req in ordered_reqs:
- return
- if req.constraint:
- return
- ordered_reqs.add(req)
- for dep in self._discovered_dependencies[req.name]:
- schedule(dep)
- order.append(req)
-
- for install_req in req_set.requirements.values():
- schedule(install_req)
- return order
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index ae5d57a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 74e9558..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc
deleted file mode 100644
index 0990251..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc
deleted file mode 100644
index 30dac5d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc
deleted file mode 100644
index 492b5bd..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc
deleted file mode 100644
index 03ae80a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc
deleted file mode 100644
index e8a1b66..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc
deleted file mode 100644
index fbe135d..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc
deleted file mode 100644
index f79ed40..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py
deleted file mode 100644
index 9c0ef5c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/base.py
+++ /dev/null
@@ -1,141 +0,0 @@
-from typing import FrozenSet, Iterable, Optional, Tuple, Union
-
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName
-from pip._vendor.packaging.version import LegacyVersion, Version
-
-from pip._internal.models.link import Link, links_equivalent
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.hashes import Hashes
-
-CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
-CandidateVersion = Union[LegacyVersion, Version]
-
-
-def format_name(project: NormalizedName, extras: FrozenSet[NormalizedName]) -> str:
- if not extras:
- return project
- extras_expr = ",".join(sorted(extras))
- return f"{project}[{extras_expr}]"
-
-
-class Constraint:
- def __init__(
- self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
- ) -> None:
- self.specifier = specifier
- self.hashes = hashes
- self.links = links
-
- @classmethod
- def empty(cls) -> "Constraint":
- return Constraint(SpecifierSet(), Hashes(), frozenset())
-
- @classmethod
- def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
- links = frozenset([ireq.link]) if ireq.link else frozenset()
- return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
-
- def __bool__(self) -> bool:
- return bool(self.specifier) or bool(self.hashes) or bool(self.links)
-
- def __and__(self, other: InstallRequirement) -> "Constraint":
- if not isinstance(other, InstallRequirement):
- return NotImplemented
- specifier = self.specifier & other.specifier
- hashes = self.hashes & other.hashes(trust_internet=False)
- links = self.links
- if other.link:
- links = links.union([other.link])
- return Constraint(specifier, hashes, links)
-
- def is_satisfied_by(self, candidate: "Candidate") -> bool:
- # Reject if there are any mismatched URL constraints on this package.
- if self.links and not all(_match_link(link, candidate) for link in self.links):
- return False
- # We can safely always allow prereleases here since PackageFinder
- # already implements the prerelease logic, and would have filtered out
- # prerelease candidates if the user does not expect them.
- return self.specifier.contains(candidate.version, prereleases=True)
-
-
-class Requirement:
- @property
- def project_name(self) -> NormalizedName:
- """The "project name" of a requirement.
-
- This is different from ``name`` if this requirement contains extras,
- in which case ``name`` would contain the ``[...]`` part, while this
- refers to the name of the project.
- """
- raise NotImplementedError("Subclass should override")
-
- @property
- def name(self) -> str:
- """The name identifying this requirement in the resolver.
-
- This is different from ``project_name`` if this requirement contains
- extras, where ``project_name`` would not contain the ``[...]`` part.
- """
- raise NotImplementedError("Subclass should override")
-
- def is_satisfied_by(self, candidate: "Candidate") -> bool:
- return False
-
- def get_candidate_lookup(self) -> CandidateLookup:
- raise NotImplementedError("Subclass should override")
-
- def format_for_error(self) -> str:
- raise NotImplementedError("Subclass should override")
-
-
-def _match_link(link: Link, candidate: "Candidate") -> bool:
- if candidate.source_link:
- return links_equivalent(link, candidate.source_link)
- return False
-
-
-class Candidate:
- @property
- def project_name(self) -> NormalizedName:
- """The "project name" of the candidate.
-
- This is different from ``name`` if this candidate contains extras,
- in which case ``name`` would contain the ``[...]`` part, while this
- refers to the name of the project.
- """
- raise NotImplementedError("Override in subclass")
-
- @property
- def name(self) -> str:
- """The name identifying this candidate in the resolver.
-
- This is different from ``project_name`` if this candidate contains
- extras, where ``project_name`` would not contain the ``[...]`` part.
- """
- raise NotImplementedError("Override in subclass")
-
- @property
- def version(self) -> CandidateVersion:
- raise NotImplementedError("Override in subclass")
-
- @property
- def is_installed(self) -> bool:
- raise NotImplementedError("Override in subclass")
-
- @property
- def is_editable(self) -> bool:
- raise NotImplementedError("Override in subclass")
-
- @property
- def source_link(self) -> Optional[Link]:
- raise NotImplementedError("Override in subclass")
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- raise NotImplementedError("Override in subclass")
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- raise NotImplementedError("Override in subclass")
-
- def format_for_error(self) -> str:
- raise NotImplementedError("Subclass should override")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py
deleted file mode 100644
index 4125cda..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/candidates.py
+++ /dev/null
@@ -1,597 +0,0 @@
-import logging
-import sys
-from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
-
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.packaging.version import Version
-
-from pip._internal.exceptions import (
- HashError,
- InstallationSubprocessError,
- MetadataInconsistent,
-)
-from pip._internal.metadata import BaseDistribution
-from pip._internal.models.link import Link, links_equivalent
-from pip._internal.models.wheel import Wheel
-from pip._internal.req.constructors import (
- install_req_from_editable,
- install_req_from_line,
-)
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.direct_url_helpers import direct_url_from_link
-from pip._internal.utils.misc import normalize_version_info
-
-from .base import Candidate, CandidateVersion, Requirement, format_name
-
-if TYPE_CHECKING:
- from .factory import Factory
-
-logger = logging.getLogger(__name__)
-
-BaseCandidate = Union[
- "AlreadyInstalledCandidate",
- "EditableCandidate",
- "LinkCandidate",
-]
-
-# Avoid conflicting with the PyPI package "Python".
-REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
-
-
-def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
- """The runtime version of BaseCandidate."""
- base_candidate_classes = (
- AlreadyInstalledCandidate,
- EditableCandidate,
- LinkCandidate,
- )
- if isinstance(candidate, base_candidate_classes):
- return candidate
- return None
-
-
-def make_install_req_from_link(
- link: Link, template: InstallRequirement
-) -> InstallRequirement:
- assert not template.editable, "template is editable"
- if template.req:
- line = str(template.req)
- else:
- line = link.url
- ireq = install_req_from_line(
- line,
- user_supplied=template.user_supplied,
- comes_from=template.comes_from,
- use_pep517=template.use_pep517,
- isolated=template.isolated,
- constraint=template.constraint,
- global_options=template.global_options,
- hash_options=template.hash_options,
- config_settings=template.config_settings,
- )
- ireq.original_link = template.original_link
- ireq.link = link
- ireq.extras = template.extras
- return ireq
-
-
-def make_install_req_from_editable(
- link: Link, template: InstallRequirement
-) -> InstallRequirement:
- assert template.editable, "template not editable"
- ireq = install_req_from_editable(
- link.url,
- user_supplied=template.user_supplied,
- comes_from=template.comes_from,
- use_pep517=template.use_pep517,
- isolated=template.isolated,
- constraint=template.constraint,
- permit_editable_wheels=template.permit_editable_wheels,
- global_options=template.global_options,
- hash_options=template.hash_options,
- config_settings=template.config_settings,
- )
- ireq.extras = template.extras
- return ireq
-
-
-def _make_install_req_from_dist(
- dist: BaseDistribution, template: InstallRequirement
-) -> InstallRequirement:
- if template.req:
- line = str(template.req)
- elif template.link:
- line = f"{dist.canonical_name} @ {template.link.url}"
- else:
- line = f"{dist.canonical_name}=={dist.version}"
- ireq = install_req_from_line(
- line,
- user_supplied=template.user_supplied,
- comes_from=template.comes_from,
- use_pep517=template.use_pep517,
- isolated=template.isolated,
- constraint=template.constraint,
- global_options=template.global_options,
- hash_options=template.hash_options,
- config_settings=template.config_settings,
- )
- ireq.satisfied_by = dist
- return ireq
-
-
-class _InstallRequirementBackedCandidate(Candidate):
- """A candidate backed by an ``InstallRequirement``.
-
- This represents a package request with the target not being already
- in the environment, and needs to be fetched and installed. The backing
- ``InstallRequirement`` is responsible for most of the leg work; this
- class exposes appropriate information to the resolver.
-
- :param link: The link passed to the ``InstallRequirement``. The backing
- ``InstallRequirement`` will use this link to fetch the distribution.
- :param source_link: The link this candidate "originates" from. This is
- different from ``link`` when the link is found in the wheel cache.
- ``link`` would point to the wheel cache, while this points to the
- found remote link (e.g. from pypi.org).
- """
-
- dist: BaseDistribution
- is_installed = False
-
- def __init__(
- self,
- link: Link,
- source_link: Link,
- ireq: InstallRequirement,
- factory: "Factory",
- name: Optional[NormalizedName] = None,
- version: Optional[CandidateVersion] = None,
- ) -> None:
- self._link = link
- self._source_link = source_link
- self._factory = factory
- self._ireq = ireq
- self._name = name
- self._version = version
- self.dist = self._prepare()
-
- def __str__(self) -> str:
- return f"{self.name} {self.version}"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self._link)!r})"
-
- def __hash__(self) -> int:
- return hash((self.__class__, self._link))
-
- def __eq__(self, other: Any) -> bool:
- if isinstance(other, self.__class__):
- return links_equivalent(self._link, other._link)
- return False
-
- @property
- def source_link(self) -> Optional[Link]:
- return self._source_link
-
- @property
- def project_name(self) -> NormalizedName:
- """The normalised name of the project the candidate refers to"""
- if self._name is None:
- self._name = self.dist.canonical_name
- return self._name
-
- @property
- def name(self) -> str:
- return self.project_name
-
- @property
- def version(self) -> CandidateVersion:
- if self._version is None:
- self._version = self.dist.version
- return self._version
-
- def format_for_error(self) -> str:
- return "{} {} (from {})".format(
- self.name,
- self.version,
- self._link.file_path if self._link.is_file else self._link,
- )
-
- def _prepare_distribution(self) -> BaseDistribution:
- raise NotImplementedError("Override in subclass")
-
- def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
- """Check for consistency of project name and version of dist."""
- if self._name is not None and self._name != dist.canonical_name:
- raise MetadataInconsistent(
- self._ireq,
- "name",
- self._name,
- dist.canonical_name,
- )
- if self._version is not None and self._version != dist.version:
- raise MetadataInconsistent(
- self._ireq,
- "version",
- str(self._version),
- str(dist.version),
- )
-
- def _prepare(self) -> BaseDistribution:
- try:
- dist = self._prepare_distribution()
- except HashError as e:
- # Provide HashError the underlying ireq that caused it. This
- # provides context for the resulting error message to show the
- # offending line to the user.
- e.req = self._ireq
- raise
- except InstallationSubprocessError as exc:
- # The output has been presented already, so don't duplicate it.
- exc.context = "See above for output."
- raise
-
- self._check_metadata_consistency(dist)
- return dist
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- requires = self.dist.iter_dependencies() if with_requires else ()
- for r in requires:
- yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
- yield self._factory.make_requires_python_requirement(self.dist.requires_python)
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- return self._ireq
-
-
-class LinkCandidate(_InstallRequirementBackedCandidate):
- is_editable = False
-
- def __init__(
- self,
- link: Link,
- template: InstallRequirement,
- factory: "Factory",
- name: Optional[NormalizedName] = None,
- version: Optional[CandidateVersion] = None,
- ) -> None:
- source_link = link
- cache_entry = factory.get_wheel_cache_entry(source_link, name)
- if cache_entry is not None:
- logger.debug("Using cached wheel link: %s", cache_entry.link)
- link = cache_entry.link
- ireq = make_install_req_from_link(link, template)
- assert ireq.link == link
- if ireq.link.is_wheel and not ireq.link.is_file:
- wheel = Wheel(ireq.link.filename)
- wheel_name = canonicalize_name(wheel.name)
- assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
- # Version may not be present for PEP 508 direct URLs
- if version is not None:
- wheel_version = Version(wheel.version)
- assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
- version, wheel_version, name
- )
-
- if cache_entry is not None:
- assert ireq.link.is_wheel
- assert ireq.link.is_file
- if cache_entry.persistent and template.link is template.original_link:
- ireq.cached_wheel_source_link = source_link
- if cache_entry.origin is not None:
- ireq.download_info = cache_entry.origin
- else:
- # Legacy cache entry that does not have origin.json.
- # download_info may miss the archive_info.hashes field.
- ireq.download_info = direct_url_from_link(
- source_link, link_is_in_wheel_cache=cache_entry.persistent
- )
-
- super().__init__(
- link=link,
- source_link=source_link,
- ireq=ireq,
- factory=factory,
- name=name,
- version=version,
- )
-
- def _prepare_distribution(self) -> BaseDistribution:
- preparer = self._factory.preparer
- return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
-
-
-class EditableCandidate(_InstallRequirementBackedCandidate):
- is_editable = True
-
- def __init__(
- self,
- link: Link,
- template: InstallRequirement,
- factory: "Factory",
- name: Optional[NormalizedName] = None,
- version: Optional[CandidateVersion] = None,
- ) -> None:
- super().__init__(
- link=link,
- source_link=link,
- ireq=make_install_req_from_editable(link, template),
- factory=factory,
- name=name,
- version=version,
- )
-
- def _prepare_distribution(self) -> BaseDistribution:
- return self._factory.preparer.prepare_editable_requirement(self._ireq)
-
-
-class AlreadyInstalledCandidate(Candidate):
- is_installed = True
- source_link = None
-
- def __init__(
- self,
- dist: BaseDistribution,
- template: InstallRequirement,
- factory: "Factory",
- ) -> None:
- self.dist = dist
- self._ireq = _make_install_req_from_dist(dist, template)
- self._factory = factory
- self._version = None
-
- # This is just logging some messages, so we can do it eagerly.
- # The returned dist would be exactly the same as self.dist because we
- # set satisfied_by in _make_install_req_from_dist.
- # TODO: Supply reason based on force_reinstall and upgrade_strategy.
- skip_reason = "already satisfied"
- factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
-
- def __str__(self) -> str:
- return str(self.dist)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self.dist!r})"
-
- def __hash__(self) -> int:
- return hash((self.__class__, self.name, self.version))
-
- def __eq__(self, other: Any) -> bool:
- if isinstance(other, self.__class__):
- return self.name == other.name and self.version == other.version
- return False
-
- @property
- def project_name(self) -> NormalizedName:
- return self.dist.canonical_name
-
- @property
- def name(self) -> str:
- return self.project_name
-
- @property
- def version(self) -> CandidateVersion:
- if self._version is None:
- self._version = self.dist.version
- return self._version
-
- @property
- def is_editable(self) -> bool:
- return self.dist.editable
-
- def format_for_error(self) -> str:
- return f"{self.name} {self.version} (Installed)"
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- if not with_requires:
- return
- for r in self.dist.iter_dependencies():
- yield from self._factory.make_requirements_from_spec(str(r), self._ireq)
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- return None
-
-
-class ExtrasCandidate(Candidate):
- """A candidate that has 'extras', indicating additional dependencies.
-
- Requirements can be for a project with dependencies, something like
- foo[extra]. The extras don't affect the project/version being installed
- directly, but indicate that we need additional dependencies. We model that
- by having an artificial ExtrasCandidate that wraps the "base" candidate.
-
- The ExtrasCandidate differs from the base in the following ways:
-
- 1. It has a unique name, of the form foo[extra]. This causes the resolver
- to treat it as a separate node in the dependency graph.
- 2. When we're getting the candidate's dependencies,
- a) We specify that we want the extra dependencies as well.
- b) We add a dependency on the base candidate.
- See below for why this is needed.
- 3. We return None for the underlying InstallRequirement, as the base
- candidate will provide it, and we don't want to end up with duplicates.
-
- The dependency on the base candidate is needed so that the resolver can't
- decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
- version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
- respectively forces the resolver to recognise that this is a conflict.
- """
-
- def __init__(
- self,
- base: BaseCandidate,
- extras: FrozenSet[str],
- *,
- comes_from: Optional[InstallRequirement] = None,
- ) -> None:
- """
- :param comes_from: the InstallRequirement that led to this candidate if it
- differs from the base's InstallRequirement. This will often be the
- case in the sense that this candidate's requirement has the extras
- while the base's does not. Unlike the InstallRequirement backed
- candidates, this requirement is used solely for reporting purposes,
- it does not do any leg work.
- """
- self.base = base
- self.extras = frozenset(canonicalize_name(e) for e in extras)
- # If any extras are requested in their non-normalized forms, keep track
- # of their raw values. This is needed when we look up dependencies
- # since PEP 685 has not been implemented for marker-matching, and using
- # the non-normalized extra for lookup ensures the user can select a
- # non-normalized extra in a package with its non-normalized form.
- # TODO: Remove this attribute when packaging is upgraded to support the
- # marker comparison logic specified in PEP 685.
- self._unnormalized_extras = extras.difference(self.extras)
- self._comes_from = comes_from if comes_from is not None else self.base._ireq
-
- def __str__(self) -> str:
- name, rest = str(self.base).split(" ", 1)
- return "{}[{}] {}".format(name, ",".join(self.extras), rest)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}(base={self.base!r}, extras={self.extras!r})"
-
- def __hash__(self) -> int:
- return hash((self.base, self.extras))
-
- def __eq__(self, other: Any) -> bool:
- if isinstance(other, self.__class__):
- return self.base == other.base and self.extras == other.extras
- return False
-
- @property
- def project_name(self) -> NormalizedName:
- return self.base.project_name
-
- @property
- def name(self) -> str:
- """The normalised name of the project the candidate refers to"""
- return format_name(self.base.project_name, self.extras)
-
- @property
- def version(self) -> CandidateVersion:
- return self.base.version
-
- def format_for_error(self) -> str:
- return "{} [{}]".format(
- self.base.format_for_error(), ", ".join(sorted(self.extras))
- )
-
- @property
- def is_installed(self) -> bool:
- return self.base.is_installed
-
- @property
- def is_editable(self) -> bool:
- return self.base.is_editable
-
- @property
- def source_link(self) -> Optional[Link]:
- return self.base.source_link
-
- def _warn_invalid_extras(
- self,
- requested: FrozenSet[str],
- valid: FrozenSet[str],
- ) -> None:
- """Emit warnings for invalid extras being requested.
-
- This emits a warning for each requested extra that is not in the
- candidate's ``Provides-Extra`` list.
- """
- invalid_extras_to_warn = frozenset(
- extra
- for extra in requested
- if extra not in valid
- # If an extra is requested in an unnormalized form, skip warning
- # about the normalized form being missing.
- and extra in self.extras
- )
- if not invalid_extras_to_warn:
- return
- for extra in sorted(invalid_extras_to_warn):
- logger.warning(
- "%s %s does not provide the extra '%s'",
- self.base.name,
- self.version,
- extra,
- )
-
- def _calculate_valid_requested_extras(self) -> FrozenSet[str]:
- """Get a list of valid extras requested by this candidate.
-
- The user (or upstream dependant) may have specified extras that the
- candidate doesn't support. Any unsupported extras are dropped, and each
- cause a warning to be logged here.
- """
- requested_extras = self.extras.union(self._unnormalized_extras)
- valid_extras = frozenset(
- extra
- for extra in requested_extras
- if self.base.dist.is_extra_provided(extra)
- )
- self._warn_invalid_extras(requested_extras, valid_extras)
- return valid_extras
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- factory = self.base._factory
-
- # Add a dependency on the exact base
- # (See note 2b in the class docstring)
- yield factory.make_requirement_from_candidate(self.base)
- if not with_requires:
- return
-
- valid_extras = self._calculate_valid_requested_extras()
- for r in self.base.dist.iter_dependencies(valid_extras):
- yield from factory.make_requirements_from_spec(
- str(r),
- self._comes_from,
- valid_extras,
- )
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- # We don't return anything here, because we always
- # depend on the base candidate, and we'll get the
- # install requirement from that.
- return None
-
-
-class RequiresPythonCandidate(Candidate):
- is_installed = False
- source_link = None
-
- def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
- if py_version_info is not None:
- version_info = normalize_version_info(py_version_info)
- else:
- version_info = sys.version_info[:3]
- self._version = Version(".".join(str(c) for c in version_info))
-
- # We don't need to implement __eq__() and __ne__() since there is always
- # only one RequiresPythonCandidate in a resolution, i.e. the host Python.
- # The built-in object.__eq__() and object.__ne__() do exactly what we want.
-
- def __str__(self) -> str:
- return f"Python {self._version}"
-
- @property
- def project_name(self) -> NormalizedName:
- return REQUIRES_PYTHON_IDENTIFIER
-
- @property
- def name(self) -> str:
- return REQUIRES_PYTHON_IDENTIFIER
-
- @property
- def version(self) -> CandidateVersion:
- return self._version
-
- def format_for_error(self) -> str:
- return f"Python {self.version}"
-
- def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
- return ()
-
- def get_install_requirement(self) -> Optional[InstallRequirement]:
- return None
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py
deleted file mode 100644
index 4adeb43..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/factory.py
+++ /dev/null
@@ -1,812 +0,0 @@
-import contextlib
-import functools
-import logging
-from typing import (
- TYPE_CHECKING,
- Dict,
- FrozenSet,
- Iterable,
- Iterator,
- List,
- Mapping,
- NamedTuple,
- Optional,
- Sequence,
- Set,
- Tuple,
- TypeVar,
- cast,
-)
-
-from pip._vendor.packaging.requirements import InvalidRequirement
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-from pip._vendor.resolvelib import ResolutionImpossible
-
-from pip._internal.cache import CacheEntry, WheelCache
-from pip._internal.exceptions import (
- DistributionNotFound,
- InstallationError,
- MetadataInconsistent,
- UnsupportedPythonVersion,
- UnsupportedWheel,
-)
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import BaseDistribution, get_default_environment
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.constructors import (
- install_req_drop_extras,
- install_req_from_link_and_ireq,
-)
-from pip._internal.req.req_install import (
- InstallRequirement,
- check_invalid_constraint_type,
-)
-from pip._internal.resolution.base import InstallRequirementProvider
-from pip._internal.utils.compatibility_tags import get_supported
-from pip._internal.utils.hashes import Hashes
-from pip._internal.utils.packaging import get_requirement
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-from .base import Candidate, CandidateVersion, Constraint, Requirement
-from .candidates import (
- AlreadyInstalledCandidate,
- BaseCandidate,
- EditableCandidate,
- ExtrasCandidate,
- LinkCandidate,
- RequiresPythonCandidate,
- as_base_candidate,
-)
-from .found_candidates import FoundCandidates, IndexCandidateInfo
-from .requirements import (
- ExplicitRequirement,
- RequiresPythonRequirement,
- SpecifierRequirement,
- SpecifierWithoutExtrasRequirement,
- UnsatisfiableRequirement,
-)
-
-if TYPE_CHECKING:
- from typing import Protocol
-
- class ConflictCause(Protocol):
- requirement: RequiresPythonRequirement
- parent: Candidate
-
-
-logger = logging.getLogger(__name__)
-
-C = TypeVar("C")
-Cache = Dict[Link, C]
-
-
-class CollectedRootRequirements(NamedTuple):
- requirements: List[Requirement]
- constraints: Dict[str, Constraint]
- user_requested: Dict[str, int]
-
-
-class Factory:
- def __init__(
- self,
- finder: PackageFinder,
- preparer: RequirementPreparer,
- make_install_req: InstallRequirementProvider,
- wheel_cache: Optional[WheelCache],
- use_user_site: bool,
- force_reinstall: bool,
- ignore_installed: bool,
- ignore_requires_python: bool,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ) -> None:
- self._finder = finder
- self.preparer = preparer
- self._wheel_cache = wheel_cache
- self._python_candidate = RequiresPythonCandidate(py_version_info)
- self._make_install_req_from_spec = make_install_req
- self._use_user_site = use_user_site
- self._force_reinstall = force_reinstall
- self._ignore_requires_python = ignore_requires_python
-
- self._build_failures: Cache[InstallationError] = {}
- self._link_candidate_cache: Cache[LinkCandidate] = {}
- self._editable_candidate_cache: Cache[EditableCandidate] = {}
- self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
- self._extras_candidate_cache: Dict[
- Tuple[int, FrozenSet[NormalizedName]], ExtrasCandidate
- ] = {}
-
- if not ignore_installed:
- env = get_default_environment()
- self._installed_dists = {
- dist.canonical_name: dist
- for dist in env.iter_installed_distributions(local_only=False)
- }
- else:
- self._installed_dists = {}
-
- @property
- def force_reinstall(self) -> bool:
- return self._force_reinstall
-
- def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
- if not link.is_wheel:
- return
- wheel = Wheel(link.filename)
- if wheel.supported(self._finder.target_python.get_unsorted_tags()):
- return
- msg = f"{link.filename} is not a supported wheel on this platform."
- raise UnsupportedWheel(msg)
-
- def _make_extras_candidate(
- self,
- base: BaseCandidate,
- extras: FrozenSet[str],
- *,
- comes_from: Optional[InstallRequirement] = None,
- ) -> ExtrasCandidate:
- cache_key = (id(base), frozenset(canonicalize_name(e) for e in extras))
- try:
- candidate = self._extras_candidate_cache[cache_key]
- except KeyError:
- candidate = ExtrasCandidate(base, extras, comes_from=comes_from)
- self._extras_candidate_cache[cache_key] = candidate
- return candidate
-
- def _make_candidate_from_dist(
- self,
- dist: BaseDistribution,
- extras: FrozenSet[str],
- template: InstallRequirement,
- ) -> Candidate:
- try:
- base = self._installed_candidate_cache[dist.canonical_name]
- except KeyError:
- base = AlreadyInstalledCandidate(dist, template, factory=self)
- self._installed_candidate_cache[dist.canonical_name] = base
- if not extras:
- return base
- return self._make_extras_candidate(base, extras, comes_from=template)
-
- def _make_candidate_from_link(
- self,
- link: Link,
- extras: FrozenSet[str],
- template: InstallRequirement,
- name: Optional[NormalizedName],
- version: Optional[CandidateVersion],
- ) -> Optional[Candidate]:
- base: Optional[BaseCandidate] = self._make_base_candidate_from_link(
- link, template, name, version
- )
- if not extras or base is None:
- return base
- return self._make_extras_candidate(base, extras, comes_from=template)
-
- def _make_base_candidate_from_link(
- self,
- link: Link,
- template: InstallRequirement,
- name: Optional[NormalizedName],
- version: Optional[CandidateVersion],
- ) -> Optional[BaseCandidate]:
- # TODO: Check already installed candidate, and use it if the link and
- # editable flag match.
-
- if link in self._build_failures:
- # We already tried this candidate before, and it does not build.
- # Don't bother trying again.
- return None
-
- if template.editable:
- if link not in self._editable_candidate_cache:
- try:
- self._editable_candidate_cache[link] = EditableCandidate(
- link,
- template,
- factory=self,
- name=name,
- version=version,
- )
- except MetadataInconsistent as e:
- logger.info(
- "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
- link,
- e,
- extra={"markup": True},
- )
- self._build_failures[link] = e
- return None
-
- return self._editable_candidate_cache[link]
- else:
- if link not in self._link_candidate_cache:
- try:
- self._link_candidate_cache[link] = LinkCandidate(
- link,
- template,
- factory=self,
- name=name,
- version=version,
- )
- except MetadataInconsistent as e:
- logger.info(
- "Discarding [blue underline]%s[/]: [yellow]%s[reset]",
- link,
- e,
- extra={"markup": True},
- )
- self._build_failures[link] = e
- return None
- return self._link_candidate_cache[link]
-
- def _iter_found_candidates(
- self,
- ireqs: Sequence[InstallRequirement],
- specifier: SpecifierSet,
- hashes: Hashes,
- prefers_installed: bool,
- incompatible_ids: Set[int],
- ) -> Iterable[Candidate]:
- if not ireqs:
- return ()
-
- # The InstallRequirement implementation requires us to give it a
- # "template". Here we just choose the first requirement to represent
- # all of them.
- # Hopefully the Project model can correct this mismatch in the future.
- template = ireqs[0]
- assert template.req, "Candidates found on index must be PEP 508"
- name = canonicalize_name(template.req.name)
-
- extras: FrozenSet[str] = frozenset()
- for ireq in ireqs:
- assert ireq.req, "Candidates found on index must be PEP 508"
- specifier &= ireq.req.specifier
- hashes &= ireq.hashes(trust_internet=False)
- extras |= frozenset(ireq.extras)
-
- def _get_installed_candidate() -> Optional[Candidate]:
- """Get the candidate for the currently-installed version."""
- # If --force-reinstall is set, we want the version from the index
- # instead, so we "pretend" there is nothing installed.
- if self._force_reinstall:
- return None
- try:
- installed_dist = self._installed_dists[name]
- except KeyError:
- return None
- # Don't use the installed distribution if its version does not fit
- # the current dependency graph.
- if not specifier.contains(installed_dist.version, prereleases=True):
- return None
- candidate = self._make_candidate_from_dist(
- dist=installed_dist,
- extras=extras,
- template=template,
- )
- # The candidate is a known incompatibility. Don't use it.
- if id(candidate) in incompatible_ids:
- return None
- return candidate
-
- def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
- result = self._finder.find_best_candidate(
- project_name=name,
- specifier=specifier,
- hashes=hashes,
- )
- icans = list(result.iter_applicable())
-
- # PEP 592: Yanked releases are ignored unless the specifier
- # explicitly pins a version (via '==' or '===') that can be
- # solely satisfied by a yanked release.
- all_yanked = all(ican.link.is_yanked for ican in icans)
-
- def is_pinned(specifier: SpecifierSet) -> bool:
- for sp in specifier:
- if sp.operator == "===":
- return True
- if sp.operator != "==":
- continue
- if sp.version.endswith(".*"):
- continue
- return True
- return False
-
- pinned = is_pinned(specifier)
-
- # PackageFinder returns earlier versions first, so we reverse.
- for ican in reversed(icans):
- if not (all_yanked and pinned) and ican.link.is_yanked:
- continue
- func = functools.partial(
- self._make_candidate_from_link,
- link=ican.link,
- extras=extras,
- template=template,
- name=name,
- version=ican.version,
- )
- yield ican.version, func
-
- return FoundCandidates(
- iter_index_candidate_infos,
- _get_installed_candidate(),
- prefers_installed,
- incompatible_ids,
- )
-
- def _iter_explicit_candidates_from_base(
- self,
- base_requirements: Iterable[Requirement],
- extras: FrozenSet[str],
- ) -> Iterator[Candidate]:
- """Produce explicit candidates from the base given an extra-ed package.
-
- :param base_requirements: Requirements known to the resolver. The
- requirements are guaranteed to not have extras.
- :param extras: The extras to inject into the explicit requirements'
- candidates.
- """
- for req in base_requirements:
- lookup_cand, _ = req.get_candidate_lookup()
- if lookup_cand is None: # Not explicit.
- continue
- # We've stripped extras from the identifier, and should always
- # get a BaseCandidate here, unless there's a bug elsewhere.
- base_cand = as_base_candidate(lookup_cand)
- assert base_cand is not None, "no extras here"
- yield self._make_extras_candidate(base_cand, extras)
-
- def _iter_candidates_from_constraints(
- self,
- identifier: str,
- constraint: Constraint,
- template: InstallRequirement,
- ) -> Iterator[Candidate]:
- """Produce explicit candidates from constraints.
-
- This creates "fake" InstallRequirement objects that are basically clones
- of what "should" be the template, but with original_link set to link.
- """
- for link in constraint.links:
- self._fail_if_link_is_unsupported_wheel(link)
- candidate = self._make_base_candidate_from_link(
- link,
- template=install_req_from_link_and_ireq(link, template),
- name=canonicalize_name(identifier),
- version=None,
- )
- if candidate:
- yield candidate
-
- def find_candidates(
- self,
- identifier: str,
- requirements: Mapping[str, Iterable[Requirement]],
- incompatibilities: Mapping[str, Iterator[Candidate]],
- constraint: Constraint,
- prefers_installed: bool,
- ) -> Iterable[Candidate]:
- # Collect basic lookup information from the requirements.
- explicit_candidates: Set[Candidate] = set()
- ireqs: List[InstallRequirement] = []
- for req in requirements[identifier]:
- cand, ireq = req.get_candidate_lookup()
- if cand is not None:
- explicit_candidates.add(cand)
- if ireq is not None:
- ireqs.append(ireq)
-
- # If the current identifier contains extras, add requires and explicit
- # candidates from entries from extra-less identifier.
- with contextlib.suppress(InvalidRequirement):
- parsed_requirement = get_requirement(identifier)
- if parsed_requirement.name != identifier:
- explicit_candidates.update(
- self._iter_explicit_candidates_from_base(
- requirements.get(parsed_requirement.name, ()),
- frozenset(parsed_requirement.extras),
- ),
- )
- for req in requirements.get(parsed_requirement.name, []):
- _, ireq = req.get_candidate_lookup()
- if ireq is not None:
- ireqs.append(ireq)
-
- # Add explicit candidates from constraints. We only do this if there are
- # known ireqs, which represent requirements not already explicit. If
- # there are no ireqs, we're constraining already-explicit requirements,
- # which is handled later when we return the explicit candidates.
- if ireqs:
- try:
- explicit_candidates.update(
- self._iter_candidates_from_constraints(
- identifier,
- constraint,
- template=ireqs[0],
- ),
- )
- except UnsupportedWheel:
- # If we're constrained to install a wheel incompatible with the
- # target architecture, no candidates will ever be valid.
- return ()
-
- # Since we cache all the candidates, incompatibility identification
- # can be made quicker by comparing only the id() values.
- incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
-
- # If none of the requirements want an explicit candidate, we can ask
- # the finder for candidates.
- if not explicit_candidates:
- return self._iter_found_candidates(
- ireqs,
- constraint.specifier,
- constraint.hashes,
- prefers_installed,
- incompat_ids,
- )
-
- return (
- c
- for c in explicit_candidates
- if id(c) not in incompat_ids
- and constraint.is_satisfied_by(c)
- and all(req.is_satisfied_by(c) for req in requirements[identifier])
- )
-
- def _make_requirements_from_install_req(
- self, ireq: InstallRequirement, requested_extras: Iterable[str]
- ) -> Iterator[Requirement]:
- """
- Returns requirement objects associated with the given InstallRequirement. In
- most cases this will be a single object but the following special cases exist:
- - the InstallRequirement has markers that do not apply -> result is empty
- - the InstallRequirement has both a constraint (or link) and extras
- -> result is split in two requirement objects: one with the constraint
- (or link) and one with the extra. This allows centralized constraint
- handling for the base, resulting in fewer candidate rejections.
- """
- if not ireq.match_markers(requested_extras):
- logger.info(
- "Ignoring %s: markers '%s' don't match your environment",
- ireq.name,
- ireq.markers,
- )
- elif not ireq.link:
- if ireq.extras and ireq.req is not None and ireq.req.specifier:
- yield SpecifierWithoutExtrasRequirement(ireq)
- yield SpecifierRequirement(ireq)
- else:
- self._fail_if_link_is_unsupported_wheel(ireq.link)
- # Always make the link candidate for the base requirement to make it
- # available to `find_candidates` for explicit candidate lookup for any
- # set of extras.
- # The extras are required separately via a second requirement.
- cand = self._make_base_candidate_from_link(
- ireq.link,
- template=install_req_drop_extras(ireq) if ireq.extras else ireq,
- name=canonicalize_name(ireq.name) if ireq.name else None,
- version=None,
- )
- if cand is None:
- # There's no way we can satisfy a URL requirement if the underlying
- # candidate fails to build. An unnamed URL must be user-supplied, so
- # we fail eagerly. If the URL is named, an unsatisfiable requirement
- # can make the resolver do the right thing, either backtrack (and
- # maybe find some other requirement that's buildable) or raise a
- # ResolutionImpossible eventually.
- if not ireq.name:
- raise self._build_failures[ireq.link]
- yield UnsatisfiableRequirement(canonicalize_name(ireq.name))
- else:
- # require the base from the link
- yield self.make_requirement_from_candidate(cand)
- if ireq.extras:
- # require the extras on top of the base candidate
- yield self.make_requirement_from_candidate(
- self._make_extras_candidate(cand, frozenset(ireq.extras))
- )
-
- def collect_root_requirements(
- self, root_ireqs: List[InstallRequirement]
- ) -> CollectedRootRequirements:
- collected = CollectedRootRequirements([], {}, {})
- for i, ireq in enumerate(root_ireqs):
- if ireq.constraint:
- # Ensure we only accept valid constraints
- problem = check_invalid_constraint_type(ireq)
- if problem:
- raise InstallationError(problem)
- if not ireq.match_markers():
- continue
- assert ireq.name, "Constraint must be named"
- name = canonicalize_name(ireq.name)
- if name in collected.constraints:
- collected.constraints[name] &= ireq
- else:
- collected.constraints[name] = Constraint.from_ireq(ireq)
- else:
- reqs = list(
- self._make_requirements_from_install_req(
- ireq,
- requested_extras=(),
- )
- )
- if not reqs:
- continue
- template = reqs[0]
- if ireq.user_supplied and template.name not in collected.user_requested:
- collected.user_requested[template.name] = i
- collected.requirements.extend(reqs)
- # Put requirements with extras at the end of the root requires. This does not
- # affect resolvelib's picking preference but it does affect its initial criteria
- # population: by putting extras at the end we enable the candidate finder to
- # present resolvelib with a smaller set of candidates to resolvelib, already
- # taking into account any non-transient constraints on the associated base. This
- # means resolvelib will have fewer candidates to visit and reject.
- # Python's list sort is stable, meaning relative order is kept for objects with
- # the same key.
- collected.requirements.sort(key=lambda r: r.name != r.project_name)
- return collected
-
- def make_requirement_from_candidate(
- self, candidate: Candidate
- ) -> ExplicitRequirement:
- return ExplicitRequirement(candidate)
-
- def make_requirements_from_spec(
- self,
- specifier: str,
- comes_from: Optional[InstallRequirement],
- requested_extras: Iterable[str] = (),
- ) -> Iterator[Requirement]:
- """
- Returns requirement objects associated with the given specifier. In most cases
- this will be a single object but the following special cases exist:
- - the specifier has markers that do not apply -> result is empty
- - the specifier has both a constraint and extras -> result is split
- in two requirement objects: one with the constraint and one with the
- extra. This allows centralized constraint handling for the base,
- resulting in fewer candidate rejections.
- """
- ireq = self._make_install_req_from_spec(specifier, comes_from)
- return self._make_requirements_from_install_req(ireq, requested_extras)
-
- def make_requires_python_requirement(
- self,
- specifier: SpecifierSet,
- ) -> Optional[Requirement]:
- if self._ignore_requires_python:
- return None
- # Don't bother creating a dependency for an empty Requires-Python.
- if not str(specifier):
- return None
- return RequiresPythonRequirement(specifier, self._python_candidate)
-
- def get_wheel_cache_entry(
- self, link: Link, name: Optional[str]
- ) -> Optional[CacheEntry]:
- """Look up the link in the wheel cache.
-
- If ``preparer.require_hashes`` is True, don't use the wheel cache,
- because cached wheels, always built locally, have different hashes
- than the files downloaded from the index server and thus throw false
- hash mismatches. Furthermore, cached wheels at present have
- nondeterministic contents due to file modification times.
- """
- if self._wheel_cache is None:
- return None
- return self._wheel_cache.get_cache_entry(
- link=link,
- package_name=name,
- supported_tags=get_supported(),
- )
-
- def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
- # TODO: Are there more cases this needs to return True? Editable?
- dist = self._installed_dists.get(candidate.project_name)
- if dist is None: # Not installed, no uninstallation required.
- return None
-
- # We're installing into global site. The current installation must
- # be uninstalled, no matter it's in global or user site, because the
- # user site installation has precedence over global.
- if not self._use_user_site:
- return dist
-
- # We're installing into user site. Remove the user site installation.
- if dist.in_usersite:
- return dist
-
- # We're installing into user site, but the installed incompatible
- # package is in global site. We can't uninstall that, and would let
- # the new user installation to "shadow" it. But shadowing won't work
- # in virtual environments, so we error out.
- if running_under_virtualenv() and dist.in_site_packages:
- message = (
- f"Will not install to the user site because it will lack "
- f"sys.path precedence to {dist.raw_name} in {dist.location}"
- )
- raise InstallationError(message)
- return None
-
- def _report_requires_python_error(
- self, causes: Sequence["ConflictCause"]
- ) -> UnsupportedPythonVersion:
- assert causes, "Requires-Python error reported with no cause"
-
- version = self._python_candidate.version
-
- if len(causes) == 1:
- specifier = str(causes[0].requirement.specifier)
- message = (
- f"Package {causes[0].parent.name!r} requires a different "
- f"Python: {version} not in {specifier!r}"
- )
- return UnsupportedPythonVersion(message)
-
- message = f"Packages require a different Python. {version} not in:"
- for cause in causes:
- package = cause.parent.format_for_error()
- specifier = str(cause.requirement.specifier)
- message += f"\n{specifier!r} (required by {package})"
- return UnsupportedPythonVersion(message)
-
- def _report_single_requirement_conflict(
- self, req: Requirement, parent: Optional[Candidate]
- ) -> DistributionNotFound:
- if parent is None:
- req_disp = str(req)
- else:
- req_disp = f"{req} (from {parent.name})"
-
- cands = self._finder.find_all_candidates(req.project_name)
- skipped_by_requires_python = self._finder.requires_python_skipped_reasons()
-
- versions_set: Set[CandidateVersion] = set()
- yanked_versions_set: Set[CandidateVersion] = set()
- for c in cands:
- is_yanked = c.link.is_yanked if c.link else False
- if is_yanked:
- yanked_versions_set.add(c.version)
- else:
- versions_set.add(c.version)
-
- versions = [str(v) for v in sorted(versions_set)]
- yanked_versions = [str(v) for v in sorted(yanked_versions_set)]
-
- if yanked_versions:
- # Saying "version X is yanked" isn't entirely accurate.
- # https://github.com/pypa/pip/issues/11745#issuecomment-1402805842
- logger.critical(
- "Ignored the following yanked versions: %s",
- ", ".join(yanked_versions) or "none",
- )
- if skipped_by_requires_python:
- logger.critical(
- "Ignored the following versions that require a different python "
- "version: %s",
- "; ".join(skipped_by_requires_python) or "none",
- )
- logger.critical(
- "Could not find a version that satisfies the requirement %s "
- "(from versions: %s)",
- req_disp,
- ", ".join(versions) or "none",
- )
- if str(req) == "requirements.txt":
- logger.info(
- "HINT: You are attempting to install a package literally "
- 'named "requirements.txt" (which cannot exist). Consider '
- "using the '-r' flag to install the packages listed in "
- "requirements.txt"
- )
-
- return DistributionNotFound(f"No matching distribution found for {req}")
-
- def get_installation_error(
- self,
- e: "ResolutionImpossible[Requirement, Candidate]",
- constraints: Dict[str, Constraint],
- ) -> InstallationError:
- assert e.causes, "Installation error reported with no cause"
-
- # If one of the things we can't solve is "we need Python X.Y",
- # that is what we report.
- requires_python_causes = [
- cause
- for cause in e.causes
- if isinstance(cause.requirement, RequiresPythonRequirement)
- and not cause.requirement.is_satisfied_by(self._python_candidate)
- ]
- if requires_python_causes:
- # The comprehension above makes sure all Requirement instances are
- # RequiresPythonRequirement, so let's cast for convenience.
- return self._report_requires_python_error(
- cast("Sequence[ConflictCause]", requires_python_causes),
- )
-
- # Otherwise, we have a set of causes which can't all be satisfied
- # at once.
-
- # The simplest case is when we have *one* cause that can't be
- # satisfied. We just report that case.
- if len(e.causes) == 1:
- req, parent = e.causes[0]
- if req.name not in constraints:
- return self._report_single_requirement_conflict(req, parent)
-
- # OK, we now have a list of requirements that can't all be
- # satisfied at once.
-
- # A couple of formatting helpers
- def text_join(parts: List[str]) -> str:
- if len(parts) == 1:
- return parts[0]
-
- return ", ".join(parts[:-1]) + " and " + parts[-1]
-
- def describe_trigger(parent: Candidate) -> str:
- ireq = parent.get_install_requirement()
- if not ireq or not ireq.comes_from:
- return f"{parent.name}=={parent.version}"
- if isinstance(ireq.comes_from, InstallRequirement):
- return str(ireq.comes_from.name)
- return str(ireq.comes_from)
-
- triggers = set()
- for req, parent in e.causes:
- if parent is None:
- # This is a root requirement, so we can report it directly
- trigger = req.format_for_error()
- else:
- trigger = describe_trigger(parent)
- triggers.add(trigger)
-
- if triggers:
- info = text_join(sorted(triggers))
- else:
- info = "the requested packages"
-
- msg = (
- f"Cannot install {info} because these package versions "
- "have conflicting dependencies."
- )
- logger.critical(msg)
- msg = "\nThe conflict is caused by:"
-
- relevant_constraints = set()
- for req, parent in e.causes:
- if req.name in constraints:
- relevant_constraints.add(req.name)
- msg = msg + "\n "
- if parent:
- msg = msg + f"{parent.name} {parent.version} depends on "
- else:
- msg = msg + "The user requested "
- msg = msg + req.format_for_error()
- for key in relevant_constraints:
- spec = constraints[key].specifier
- msg += f"\n The user requested (constraint) {key}{spec}"
-
- msg = (
- msg
- + "\n\n"
- + "To fix this you could try to:\n"
- + "1. loosen the range of package versions you've specified\n"
- + "2. remove package versions to allow pip attempt to solve "
- + "the dependency conflict\n"
- )
-
- logger.info(msg)
-
- return DistributionNotFound(
- "ResolutionImpossible: for help visit "
- "https://pip.pypa.io/en/latest/topics/dependency-resolution/"
- "#dealing-with-dependency-conflicts"
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
deleted file mode 100644
index 8663097..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
+++ /dev/null
@@ -1,155 +0,0 @@
-"""Utilities to lazily create and visit candidates found.
-
-Creating and visiting a candidate is a *very* costly operation. It involves
-fetching, extracting, potentially building modules from source, and verifying
-distribution metadata. It is therefore crucial for performance to keep
-everything here lazy all the way down, so we only touch candidates that we
-absolutely need, and not "download the world" when we only need one version of
-something.
-"""
-
-import functools
-from collections.abc import Sequence
-from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
-
-from pip._vendor.packaging.version import _BaseVersion
-
-from .base import Candidate
-
-IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
-
-if TYPE_CHECKING:
- SequenceCandidate = Sequence[Candidate]
-else:
- # For compatibility: Python before 3.9 does not support using [] on the
- # Sequence class.
- #
- # >>> from collections.abc import Sequence
- # >>> Sequence[str]
- # Traceback (most recent call last):
- # File "<stdin>", line 1, in <module>
- # TypeError: 'ABCMeta' object is not subscriptable
- #
- # TODO: Remove this block after dropping Python 3.8 support.
- SequenceCandidate = Sequence
-
-
-def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
- """Iterator for ``FoundCandidates``.
-
- This iterator is used when the package is not already installed. Candidates
- from index come later in their normal ordering.
- """
- versions_found: Set[_BaseVersion] = set()
- for version, func in infos:
- if version in versions_found:
- continue
- candidate = func()
- if candidate is None:
- continue
- yield candidate
- versions_found.add(version)
-
-
-def _iter_built_with_prepended(
- installed: Candidate, infos: Iterator[IndexCandidateInfo]
-) -> Iterator[Candidate]:
- """Iterator for ``FoundCandidates``.
-
- This iterator is used when the resolver prefers the already-installed
- candidate and NOT to upgrade. The installed candidate is therefore
- always yielded first, and candidates from index come later in their
- normal ordering, except skipped when the version is already installed.
- """
- yield installed
- versions_found: Set[_BaseVersion] = {installed.version}
- for version, func in infos:
- if version in versions_found:
- continue
- candidate = func()
- if candidate is None:
- continue
- yield candidate
- versions_found.add(version)
-
-
-def _iter_built_with_inserted(
- installed: Candidate, infos: Iterator[IndexCandidateInfo]
-) -> Iterator[Candidate]:
- """Iterator for ``FoundCandidates``.
-
- This iterator is used when the resolver prefers to upgrade an
- already-installed package. Candidates from index are returned in their
- normal ordering, except replaced when the version is already installed.
-
- The implementation iterates through and yields other candidates, inserting
- the installed candidate exactly once before we start yielding older or
- equivalent candidates, or after all other candidates if they are all newer.
- """
- versions_found: Set[_BaseVersion] = set()
- for version, func in infos:
- if version in versions_found:
- continue
- # If the installed candidate is better, yield it first.
- if installed.version >= version:
- yield installed
- versions_found.add(installed.version)
- candidate = func()
- if candidate is None:
- continue
- yield candidate
- versions_found.add(version)
-
- # If the installed candidate is older than all other candidates.
- if installed.version not in versions_found:
- yield installed
-
-
-class FoundCandidates(SequenceCandidate):
- """A lazy sequence to provide candidates to the resolver.
-
- The intended usage is to return this from `find_matches()` so the resolver
- can iterate through the sequence multiple times, but only access the index
- page when remote packages are actually needed. This improve performances
- when suitable candidates are already installed on disk.
- """
-
- def __init__(
- self,
- get_infos: Callable[[], Iterator[IndexCandidateInfo]],
- installed: Optional[Candidate],
- prefers_installed: bool,
- incompatible_ids: Set[int],
- ):
- self._get_infos = get_infos
- self._installed = installed
- self._prefers_installed = prefers_installed
- self._incompatible_ids = incompatible_ids
-
- def __getitem__(self, index: Any) -> Any:
- # Implemented to satisfy the ABC check. This is not needed by the
- # resolver, and should not be used by the provider either (for
- # performance reasons).
- raise NotImplementedError("don't do this")
-
- def __iter__(self) -> Iterator[Candidate]:
- infos = self._get_infos()
- if not self._installed:
- iterator = _iter_built(infos)
- elif self._prefers_installed:
- iterator = _iter_built_with_prepended(self._installed, infos)
- else:
- iterator = _iter_built_with_inserted(self._installed, infos)
- return (c for c in iterator if id(c) not in self._incompatible_ids)
-
- def __len__(self) -> int:
- # Implemented to satisfy the ABC check. This is not needed by the
- # resolver, and should not be used by the provider either (for
- # performance reasons).
- raise NotImplementedError("don't do this")
-
- @functools.lru_cache(maxsize=1)
- def __bool__(self) -> bool:
- if self._prefers_installed and self._installed:
- return True
- return any(self)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py
deleted file mode 100644
index 315fb9c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/provider.py
+++ /dev/null
@@ -1,255 +0,0 @@
-import collections
-import math
-from typing import (
- TYPE_CHECKING,
- Dict,
- Iterable,
- Iterator,
- Mapping,
- Sequence,
- TypeVar,
- Union,
-)
-
-from pip._vendor.resolvelib.providers import AbstractProvider
-
-from .base import Candidate, Constraint, Requirement
-from .candidates import REQUIRES_PYTHON_IDENTIFIER
-from .factory import Factory
-
-if TYPE_CHECKING:
- from pip._vendor.resolvelib.providers import Preference
- from pip._vendor.resolvelib.resolvers import RequirementInformation
-
- PreferenceInformation = RequirementInformation[Requirement, Candidate]
-
- _ProviderBase = AbstractProvider[Requirement, Candidate, str]
-else:
- _ProviderBase = AbstractProvider
-
-# Notes on the relationship between the provider, the factory, and the
-# candidate and requirement classes.
-#
-# The provider is a direct implementation of the resolvelib class. Its role
-# is to deliver the API that resolvelib expects.
-#
-# Rather than work with completely abstract "requirement" and "candidate"
-# concepts as resolvelib does, pip has concrete classes implementing these two
-# ideas. The API of Requirement and Candidate objects are defined in the base
-# classes, but essentially map fairly directly to the equivalent provider
-# methods. In particular, `find_matches` and `is_satisfied_by` are
-# requirement methods, and `get_dependencies` is a candidate method.
-#
-# The factory is the interface to pip's internal mechanisms. It is stateless,
-# and is created by the resolver and held as a property of the provider. It is
-# responsible for creating Requirement and Candidate objects, and provides
-# services to those objects (access to pip's finder and preparer).
-
-
-D = TypeVar("D")
-V = TypeVar("V")
-
-
-def _get_with_identifier(
- mapping: Mapping[str, V],
- identifier: str,
- default: D,
-) -> Union[D, V]:
- """Get item from a package name lookup mapping with a resolver identifier.
-
- This extra logic is needed when the target mapping is keyed by package
- name, which cannot be directly looked up with an identifier (which may
- contain requested extras). Additional logic is added to also look up a value
- by "cleaning up" the extras from the identifier.
- """
- if identifier in mapping:
- return mapping[identifier]
- # HACK: Theoretically we should check whether this identifier is a valid
- # "NAME[EXTRAS]" format, and parse out the name part with packaging or
- # some regular expression. But since pip's resolver only spits out three
- # kinds of identifiers: normalized PEP 503 names, normalized names plus
- # extras, and Requires-Python, we can cheat a bit here.
- name, open_bracket, _ = identifier.partition("[")
- if open_bracket and name in mapping:
- return mapping[name]
- return default
-
-
-class PipProvider(_ProviderBase):
- """Pip's provider implementation for resolvelib.
-
- :params constraints: A mapping of constraints specified by the user. Keys
- are canonicalized project names.
- :params ignore_dependencies: Whether the user specified ``--no-deps``.
- :params upgrade_strategy: The user-specified upgrade strategy.
- :params user_requested: A set of canonicalized package names that the user
- supplied for pip to install/upgrade.
- """
-
- def __init__(
- self,
- factory: Factory,
- constraints: Dict[str, Constraint],
- ignore_dependencies: bool,
- upgrade_strategy: str,
- user_requested: Dict[str, int],
- ) -> None:
- self._factory = factory
- self._constraints = constraints
- self._ignore_dependencies = ignore_dependencies
- self._upgrade_strategy = upgrade_strategy
- self._user_requested = user_requested
- self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
-
- def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
- return requirement_or_candidate.name
-
- def get_preference(
- self,
- identifier: str,
- resolutions: Mapping[str, Candidate],
- candidates: Mapping[str, Iterator[Candidate]],
- information: Mapping[str, Iterable["PreferenceInformation"]],
- backtrack_causes: Sequence["PreferenceInformation"],
- ) -> "Preference":
- """Produce a sort key for given requirement based on preference.
-
- The lower the return value is, the more preferred this group of
- arguments is.
-
- Currently pip considers the following in order:
-
- * Prefer if any of the known requirements is "direct", e.g. points to an
- explicit URL.
- * If equal, prefer if any requirement is "pinned", i.e. contains
- operator ``===`` or ``==``.
- * If equal, calculate an approximate "depth" and resolve requirements
- closer to the user-specified requirements first. If the depth cannot
- by determined (eg: due to no matching parents), it is considered
- infinite.
- * Order user-specified requirements by the order they are specified.
- * If equal, prefers "non-free" requirements, i.e. contains at least one
- operator, such as ``>=`` or ``<``.
- * If equal, order alphabetically for consistency (helps debuggability).
- """
- try:
- next(iter(information[identifier]))
- except StopIteration:
- # There is no information for this identifier, so there's no known
- # candidates.
- has_information = False
- else:
- has_information = True
-
- if has_information:
- lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
- candidate, ireqs = zip(*lookups)
- else:
- candidate, ireqs = None, ()
-
- operators = [
- specifier.operator
- for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
- for specifier in specifier_set
- ]
-
- direct = candidate is not None
- pinned = any(op[:2] == "==" for op in operators)
- unfree = bool(operators)
-
- try:
- requested_order: Union[int, float] = self._user_requested[identifier]
- except KeyError:
- requested_order = math.inf
- if has_information:
- parent_depths = (
- self._known_depths[parent.name] if parent is not None else 0.0
- for _, parent in information[identifier]
- )
- inferred_depth = min(d for d in parent_depths) + 1.0
- else:
- inferred_depth = math.inf
- else:
- inferred_depth = 1.0
- self._known_depths[identifier] = inferred_depth
-
- requested_order = self._user_requested.get(identifier, math.inf)
-
- # Requires-Python has only one candidate and the check is basically
- # free, so we always do it first to avoid needless work if it fails.
- requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
-
- # Prefer the causes of backtracking on the assumption that the problem
- # resolving the dependency tree is related to the failures that caused
- # the backtracking
- backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
-
- return (
- not requires_python,
- not direct,
- not pinned,
- not backtrack_cause,
- inferred_depth,
- requested_order,
- not unfree,
- identifier,
- )
-
- def find_matches(
- self,
- identifier: str,
- requirements: Mapping[str, Iterator[Requirement]],
- incompatibilities: Mapping[str, Iterator[Candidate]],
- ) -> Iterable[Candidate]:
- def _eligible_for_upgrade(identifier: str) -> bool:
- """Are upgrades allowed for this project?
-
- This checks the upgrade strategy, and whether the project was one
- that the user specified in the command line, in order to decide
- whether we should upgrade if there's a newer version available.
-
- (Note that we don't need access to the `--upgrade` flag, because
- an upgrade strategy of "to-satisfy-only" means that `--upgrade`
- was not specified).
- """
- if self._upgrade_strategy == "eager":
- return True
- elif self._upgrade_strategy == "only-if-needed":
- user_order = _get_with_identifier(
- self._user_requested,
- identifier,
- default=None,
- )
- return user_order is not None
- return False
-
- constraint = _get_with_identifier(
- self._constraints,
- identifier,
- default=Constraint.empty(),
- )
- return self._factory.find_candidates(
- identifier=identifier,
- requirements=requirements,
- constraint=constraint,
- prefers_installed=(not _eligible_for_upgrade(identifier)),
- incompatibilities=incompatibilities,
- )
-
- def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
- return requirement.is_satisfied_by(candidate)
-
- def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
- with_requires = not self._ignore_dependencies
- return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
-
- @staticmethod
- def is_backtrack_cause(
- identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
- ) -> bool:
- for backtrack_cause in backtrack_causes:
- if identifier == backtrack_cause.requirement.name:
- return True
- if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
- return True
- return False
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py
deleted file mode 100644
index 12adeff..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/reporter.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from collections import defaultdict
-from logging import getLogger
-from typing import Any, DefaultDict
-
-from pip._vendor.resolvelib.reporters import BaseReporter
-
-from .base import Candidate, Requirement
-
-logger = getLogger(__name__)
-
-
-class PipReporter(BaseReporter):
- def __init__(self) -> None:
- self.reject_count_by_package: DefaultDict[str, int] = defaultdict(int)
-
- self._messages_at_reject_count = {
- 1: (
- "pip is looking at multiple versions of {package_name} to "
- "determine which version is compatible with other "
- "requirements. This could take a while."
- ),
- 8: (
- "pip is still looking at multiple versions of {package_name} to "
- "determine which version is compatible with other "
- "requirements. This could take a while."
- ),
- 13: (
- "This is taking longer than usual. You might need to provide "
- "the dependency resolver with stricter constraints to reduce "
- "runtime. See https://pip.pypa.io/warnings/backtracking for "
- "guidance. If you want to abort this run, press Ctrl + C."
- ),
- }
-
- def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
- self.reject_count_by_package[candidate.name] += 1
-
- count = self.reject_count_by_package[candidate.name]
- if count not in self._messages_at_reject_count:
- return
-
- message = self._messages_at_reject_count[count]
- logger.info("INFO: %s", message.format(package_name=candidate.name))
-
- msg = "Will try a different candidate, due to conflict:"
- for req_info in criterion.information:
- req, parent = req_info.requirement, req_info.parent
- # Inspired by Factory.get_installation_error
- msg += "\n "
- if parent:
- msg += f"{parent.name} {parent.version} depends on "
- else:
- msg += "The user requested "
- msg += req.format_for_error()
- logger.debug(msg)
-
-
-class PipDebuggingReporter(BaseReporter):
- """A reporter that does an info log for every event it sees."""
-
- def starting(self) -> None:
- logger.info("Reporter.starting()")
-
- def starting_round(self, index: int) -> None:
- logger.info("Reporter.starting_round(%r)", index)
-
- def ending_round(self, index: int, state: Any) -> None:
- logger.info("Reporter.ending_round(%r, state)", index)
-
- def ending(self, state: Any) -> None:
- logger.info("Reporter.ending(%r)", state)
-
- def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
- logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
-
- def rejecting_candidate(self, criterion: Any, candidate: Candidate) -> None:
- logger.info("Reporter.rejecting_candidate(%r, %r)", criterion, candidate)
-
- def pinning(self, candidate: Candidate) -> None:
- logger.info("Reporter.pinning(%r)", candidate)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py
deleted file mode 100644
index 4af4a9f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/requirements.py
+++ /dev/null
@@ -1,166 +0,0 @@
-from pip._vendor.packaging.specifiers import SpecifierSet
-from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
-
-from pip._internal.req.constructors import install_req_drop_extras
-from pip._internal.req.req_install import InstallRequirement
-
-from .base import Candidate, CandidateLookup, Requirement, format_name
-
-
-class ExplicitRequirement(Requirement):
- def __init__(self, candidate: Candidate) -> None:
- self.candidate = candidate
-
- def __str__(self) -> str:
- return str(self.candidate)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self.candidate!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- # No need to canonicalize - the candidate did this
- return self.candidate.project_name
-
- @property
- def name(self) -> str:
- # No need to canonicalize - the candidate did this
- return self.candidate.name
-
- def format_for_error(self) -> str:
- return self.candidate.format_for_error()
-
- def get_candidate_lookup(self) -> CandidateLookup:
- return self.candidate, None
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- return candidate == self.candidate
-
-
-class SpecifierRequirement(Requirement):
- def __init__(self, ireq: InstallRequirement) -> None:
- assert ireq.link is None, "This is a link, not a specifier"
- self._ireq = ireq
- self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
-
- def __str__(self) -> str:
- return str(self._ireq.req)
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self._ireq.req)!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
- return canonicalize_name(self._ireq.req.name)
-
- @property
- def name(self) -> str:
- return format_name(self.project_name, self._extras)
-
- def format_for_error(self) -> str:
- # Convert comma-separated specifiers into "A, B, ..., F and G"
- # This makes the specifier a bit more "human readable", without
- # risking a change in meaning. (Hopefully! Not all edge cases have
- # been checked)
- parts = [s.strip() for s in str(self).split(",")]
- if len(parts) == 0:
- return ""
- elif len(parts) == 1:
- return parts[0]
-
- return ", ".join(parts[:-1]) + " and " + parts[-1]
-
- def get_candidate_lookup(self) -> CandidateLookup:
- return None, self._ireq
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- assert candidate.name == self.name, (
- f"Internal issue: Candidate is not for this requirement "
- f"{candidate.name} vs {self.name}"
- )
- # We can safely always allow prereleases here since PackageFinder
- # already implements the prerelease logic, and would have filtered out
- # prerelease candidates if the user does not expect them.
- assert self._ireq.req, "Specifier-backed ireq is always PEP 508"
- spec = self._ireq.req.specifier
- return spec.contains(candidate.version, prereleases=True)
-
-
-class SpecifierWithoutExtrasRequirement(SpecifierRequirement):
- """
- Requirement backed by an install requirement on a base package.
- Trims extras from its install requirement if there are any.
- """
-
- def __init__(self, ireq: InstallRequirement) -> None:
- assert ireq.link is None, "This is a link, not a specifier"
- self._ireq = install_req_drop_extras(ireq)
- self._extras = frozenset(canonicalize_name(e) for e in self._ireq.extras)
-
-
-class RequiresPythonRequirement(Requirement):
- """A requirement representing Requires-Python metadata."""
-
- def __init__(self, specifier: SpecifierSet, match: Candidate) -> None:
- self.specifier = specifier
- self._candidate = match
-
- def __str__(self) -> str:
- return f"Python {self.specifier}"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self.specifier)!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- return self._candidate.project_name
-
- @property
- def name(self) -> str:
- return self._candidate.name
-
- def format_for_error(self) -> str:
- return str(self)
-
- def get_candidate_lookup(self) -> CandidateLookup:
- if self.specifier.contains(self._candidate.version, prereleases=True):
- return self._candidate, None
- return None, None
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- assert candidate.name == self._candidate.name, "Not Python candidate"
- # We can safely always allow prereleases here since PackageFinder
- # already implements the prerelease logic, and would have filtered out
- # prerelease candidates if the user does not expect them.
- return self.specifier.contains(candidate.version, prereleases=True)
-
-
-class UnsatisfiableRequirement(Requirement):
- """A requirement that cannot be satisfied."""
-
- def __init__(self, name: NormalizedName) -> None:
- self._name = name
-
- def __str__(self) -> str:
- return f"{self._name} (unavailable)"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({str(self._name)!r})"
-
- @property
- def project_name(self) -> NormalizedName:
- return self._name
-
- @property
- def name(self) -> str:
- return self._name
-
- def format_for_error(self) -> str:
- return str(self)
-
- def get_candidate_lookup(self) -> CandidateLookup:
- return None, None
-
- def is_satisfied_by(self, candidate: Candidate) -> bool:
- return False
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py b/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py
deleted file mode 100644
index c12beef..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/resolution/resolvelib/resolver.py
+++ /dev/null
@@ -1,317 +0,0 @@
-import contextlib
-import functools
-import logging
-import os
-from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast
-
-from pip._vendor.packaging.utils import canonicalize_name
-from pip._vendor.resolvelib import BaseReporter, ResolutionImpossible
-from pip._vendor.resolvelib import Resolver as RLResolver
-from pip._vendor.resolvelib.structs import DirectedGraph
-
-from pip._internal.cache import WheelCache
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.operations.prepare import RequirementPreparer
-from pip._internal.req.constructors import install_req_extend_extras
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.req.req_set import RequirementSet
-from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
-from pip._internal.resolution.resolvelib.provider import PipProvider
-from pip._internal.resolution.resolvelib.reporter import (
- PipDebuggingReporter,
- PipReporter,
-)
-from pip._internal.utils.packaging import get_requirement
-
-from .base import Candidate, Requirement
-from .factory import Factory
-
-if TYPE_CHECKING:
- from pip._vendor.resolvelib.resolvers import Result as RLResult
-
- Result = RLResult[Requirement, Candidate, str]
-
-
-logger = logging.getLogger(__name__)
-
-
-class Resolver(BaseResolver):
- _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
-
- def __init__(
- self,
- preparer: RequirementPreparer,
- finder: PackageFinder,
- wheel_cache: Optional[WheelCache],
- make_install_req: InstallRequirementProvider,
- use_user_site: bool,
- ignore_dependencies: bool,
- ignore_installed: bool,
- ignore_requires_python: bool,
- force_reinstall: bool,
- upgrade_strategy: str,
- py_version_info: Optional[Tuple[int, ...]] = None,
- ):
- super().__init__()
- assert upgrade_strategy in self._allowed_strategies
-
- self.factory = Factory(
- finder=finder,
- preparer=preparer,
- make_install_req=make_install_req,
- wheel_cache=wheel_cache,
- use_user_site=use_user_site,
- force_reinstall=force_reinstall,
- ignore_installed=ignore_installed,
- ignore_requires_python=ignore_requires_python,
- py_version_info=py_version_info,
- )
- self.ignore_dependencies = ignore_dependencies
- self.upgrade_strategy = upgrade_strategy
- self._result: Optional[Result] = None
-
- def resolve(
- self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
- ) -> RequirementSet:
- collected = self.factory.collect_root_requirements(root_reqs)
- provider = PipProvider(
- factory=self.factory,
- constraints=collected.constraints,
- ignore_dependencies=self.ignore_dependencies,
- upgrade_strategy=self.upgrade_strategy,
- user_requested=collected.user_requested,
- )
- if "PIP_RESOLVER_DEBUG" in os.environ:
- reporter: BaseReporter = PipDebuggingReporter()
- else:
- reporter = PipReporter()
- resolver: RLResolver[Requirement, Candidate, str] = RLResolver(
- provider,
- reporter,
- )
-
- try:
- limit_how_complex_resolution_can_be = 200000
- result = self._result = resolver.resolve(
- collected.requirements, max_rounds=limit_how_complex_resolution_can_be
- )
-
- except ResolutionImpossible as e:
- error = self.factory.get_installation_error(
- cast("ResolutionImpossible[Requirement, Candidate]", e),
- collected.constraints,
- )
- raise error from e
-
- req_set = RequirementSet(check_supported_wheels=check_supported_wheels)
- # process candidates with extras last to ensure their base equivalent is
- # already in the req_set if appropriate.
- # Python's sort is stable so using a binary key function keeps relative order
- # within both subsets.
- for candidate in sorted(
- result.mapping.values(), key=lambda c: c.name != c.project_name
- ):
- ireq = candidate.get_install_requirement()
- if ireq is None:
- if candidate.name != candidate.project_name:
- # extend existing req's extras
- with contextlib.suppress(KeyError):
- req = req_set.get_requirement(candidate.project_name)
- req_set.add_named_requirement(
- install_req_extend_extras(
- req, get_requirement(candidate.name).extras
- )
- )
- continue
-
- # Check if there is already an installation under the same name,
- # and set a flag for later stages to uninstall it, if needed.
- installed_dist = self.factory.get_dist_to_uninstall(candidate)
- if installed_dist is None:
- # There is no existing installation -- nothing to uninstall.
- ireq.should_reinstall = False
- elif self.factory.force_reinstall:
- # The --force-reinstall flag is set -- reinstall.
- ireq.should_reinstall = True
- elif installed_dist.version != candidate.version:
- # The installation is different in version -- reinstall.
- ireq.should_reinstall = True
- elif candidate.is_editable or installed_dist.editable:
- # The incoming distribution is editable, or different in
- # editable-ness to installation -- reinstall.
- ireq.should_reinstall = True
- elif candidate.source_link and candidate.source_link.is_file:
- # The incoming distribution is under file://
- if candidate.source_link.is_wheel:
- # is a local wheel -- do nothing.
- logger.info(
- "%s is already installed with the same version as the "
- "provided wheel. Use --force-reinstall to force an "
- "installation of the wheel.",
- ireq.name,
- )
- continue
-
- # is a local sdist or path -- reinstall
- ireq.should_reinstall = True
- else:
- continue
-
- link = candidate.source_link
- if link and link.is_yanked:
- # The reason can contain non-ASCII characters, Unicode
- # is required for Python 2.
- msg = (
- "The candidate selected for download or install is a "
- "yanked version: {name!r} candidate (version {version} "
- "at {link})\nReason for being yanked: {reason}"
- ).format(
- name=candidate.name,
- version=candidate.version,
- link=link,
- reason=link.yanked_reason or "<none given>",
- )
- logger.warning(msg)
-
- req_set.add_named_requirement(ireq)
-
- reqs = req_set.all_requirements
- self.factory.preparer.prepare_linked_requirements_more(reqs)
- for req in reqs:
- req.prepared = True
- req.needs_more_preparation = False
- return req_set
-
- def get_installation_order(
- self, req_set: RequirementSet
- ) -> List[InstallRequirement]:
- """Get order for installation of requirements in RequirementSet.
-
- The returned list contains a requirement before another that depends on
- it. This helps ensure that the environment is kept consistent as they
- get installed one-by-one.
-
- The current implementation creates a topological ordering of the
- dependency graph, giving more weight to packages with less
- or no dependencies, while breaking any cycles in the graph at
- arbitrary points. We make no guarantees about where the cycle
- would be broken, other than it *would* be broken.
- """
- assert self._result is not None, "must call resolve() first"
-
- if not req_set.requirements:
- # Nothing is left to install, so we do not need an order.
- return []
-
- graph = self._result.graph
- weights = get_topological_weights(graph, set(req_set.requirements.keys()))
-
- sorted_items = sorted(
- req_set.requirements.items(),
- key=functools.partial(_req_set_item_sorter, weights=weights),
- reverse=True,
- )
- return [ireq for _, ireq in sorted_items]
-
-
-def get_topological_weights(
- graph: "DirectedGraph[Optional[str]]", requirement_keys: Set[str]
-) -> Dict[Optional[str], int]:
- """Assign weights to each node based on how "deep" they are.
-
- This implementation may change at any point in the future without prior
- notice.
-
- We first simplify the dependency graph by pruning any leaves and giving them
- the highest weight: a package without any dependencies should be installed
- first. This is done again and again in the same way, giving ever less weight
- to the newly found leaves. The loop stops when no leaves are left: all
- remaining packages have at least one dependency left in the graph.
-
- Then we continue with the remaining graph, by taking the length for the
- longest path to any node from root, ignoring any paths that contain a single
- node twice (i.e. cycles). This is done through a depth-first search through
- the graph, while keeping track of the path to the node.
-
- Cycles in the graph result would result in node being revisited while also
- being on its own path. In this case, take no action. This helps ensure we
- don't get stuck in a cycle.
-
- When assigning weight, the longer path (i.e. larger length) is preferred.
-
- We are only interested in the weights of packages that are in the
- requirement_keys.
- """
- path: Set[Optional[str]] = set()
- weights: Dict[Optional[str], int] = {}
-
- def visit(node: Optional[str]) -> None:
- if node in path:
- # We hit a cycle, so we'll break it here.
- return
-
- # Time to visit the children!
- path.add(node)
- for child in graph.iter_children(node):
- visit(child)
- path.remove(node)
-
- if node not in requirement_keys:
- return
-
- last_known_parent_count = weights.get(node, 0)
- weights[node] = max(last_known_parent_count, len(path))
-
- # Simplify the graph, pruning leaves that have no dependencies.
- # This is needed for large graphs (say over 200 packages) because the
- # `visit` function is exponentially slower then, taking minutes.
- # See https://github.com/pypa/pip/issues/10557
- # We will loop until we explicitly break the loop.
- while True:
- leaves = set()
- for key in graph:
- if key is None:
- continue
- for _child in graph.iter_children(key):
- # This means we have at least one child
- break
- else:
- # No child.
- leaves.add(key)
- if not leaves:
- # We are done simplifying.
- break
- # Calculate the weight for the leaves.
- weight = len(graph) - 1
- for leaf in leaves:
- if leaf not in requirement_keys:
- continue
- weights[leaf] = weight
- # Remove the leaves from the graph, making it simpler.
- for leaf in leaves:
- graph.remove(leaf)
-
- # Visit the remaining graph.
- # `None` is guaranteed to be the root node by resolvelib.
- visit(None)
-
- # Sanity check: all requirement keys should be in the weights,
- # and no other keys should be in the weights.
- difference = set(weights.keys()).difference(requirement_keys)
- assert not difference, difference
-
- return weights
-
-
-def _req_set_item_sorter(
- item: Tuple[str, InstallRequirement],
- weights: Dict[Optional[str], int],
-) -> Tuple[int, str]:
- """Key function used to sort install requirements for installation.
-
- Based on the "weight" mapping calculated in ``get_installation_order()``.
- The canonical package name is returned as the second member as a tie-
- breaker to ensure the result is predictable, which is useful in tests.
- """
- name = canonicalize_name(item[0])
- return weights[name], name
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/self_outdated_check.py b/venv/lib/python3.11/site-packages/pip/_internal/self_outdated_check.py
deleted file mode 100644
index 0f64ae0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/self_outdated_check.py
+++ /dev/null
@@ -1,248 +0,0 @@
-import datetime
-import functools
-import hashlib
-import json
-import logging
-import optparse
-import os.path
-import sys
-from dataclasses import dataclass
-from typing import Any, Callable, Dict, Optional
-
-from pip._vendor.packaging.version import parse as parse_version
-from pip._vendor.rich.console import Group
-from pip._vendor.rich.markup import escape
-from pip._vendor.rich.text import Text
-
-from pip._internal.index.collector import LinkCollector
-from pip._internal.index.package_finder import PackageFinder
-from pip._internal.metadata import get_default_environment
-from pip._internal.metadata.base import DistributionVersion
-from pip._internal.models.selection_prefs import SelectionPreferences
-from pip._internal.network.session import PipSession
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.entrypoints import (
- get_best_invocation_for_this_pip,
- get_best_invocation_for_this_python,
-)
-from pip._internal.utils.filesystem import adjacent_tmp_file, check_path_owner, replace
-from pip._internal.utils.misc import ensure_dir
-
-_WEEK = datetime.timedelta(days=7)
-
-logger = logging.getLogger(__name__)
-
-
-def _get_statefile_name(key: str) -> str:
- key_bytes = key.encode()
- name = hashlib.sha224(key_bytes).hexdigest()
- return name
-
-
-def _convert_date(isodate: str) -> datetime.datetime:
- """Convert an ISO format string to a date.
-
- Handles the format 2020-01-22T14:24:01Z (trailing Z)
- which is not supported by older versions of fromisoformat.
- """
- return datetime.datetime.fromisoformat(isodate.replace("Z", "+00:00"))
-
-
-class SelfCheckState:
- def __init__(self, cache_dir: str) -> None:
- self._state: Dict[str, Any] = {}
- self._statefile_path = None
-
- # Try to load the existing state
- if cache_dir:
- self._statefile_path = os.path.join(
- cache_dir, "selfcheck", _get_statefile_name(self.key)
- )
- try:
- with open(self._statefile_path, encoding="utf-8") as statefile:
- self._state = json.load(statefile)
- except (OSError, ValueError, KeyError):
- # Explicitly suppressing exceptions, since we don't want to
- # error out if the cache file is invalid.
- pass
-
- @property
- def key(self) -> str:
- return sys.prefix
-
- def get(self, current_time: datetime.datetime) -> Optional[str]:
- """Check if we have a not-outdated version loaded already."""
- if not self._state:
- return None
-
- if "last_check" not in self._state:
- return None
-
- if "pypi_version" not in self._state:
- return None
-
- # Determine if we need to refresh the state
- last_check = _convert_date(self._state["last_check"])
- time_since_last_check = current_time - last_check
- if time_since_last_check > _WEEK:
- return None
-
- return self._state["pypi_version"]
-
- def set(self, pypi_version: str, current_time: datetime.datetime) -> None:
- # If we do not have a path to cache in, don't bother saving.
- if not self._statefile_path:
- return
-
- # Check to make sure that we own the directory
- if not check_path_owner(os.path.dirname(self._statefile_path)):
- return
-
- # Now that we've ensured the directory is owned by this user, we'll go
- # ahead and make sure that all our directories are created.
- ensure_dir(os.path.dirname(self._statefile_path))
-
- state = {
- # Include the key so it's easy to tell which pip wrote the
- # file.
- "key": self.key,
- "last_check": current_time.isoformat(),
- "pypi_version": pypi_version,
- }
-
- text = json.dumps(state, sort_keys=True, separators=(",", ":"))
-
- with adjacent_tmp_file(self._statefile_path) as f:
- f.write(text.encode())
-
- try:
- # Since we have a prefix-specific state file, we can just
- # overwrite whatever is there, no need to check.
- replace(f.name, self._statefile_path)
- except OSError:
- # Best effort.
- pass
-
-
-@dataclass
-class UpgradePrompt:
- old: str
- new: str
-
- def __rich__(self) -> Group:
- if WINDOWS:
- pip_cmd = f"{get_best_invocation_for_this_python()} -m pip"
- else:
- pip_cmd = get_best_invocation_for_this_pip()
-
- notice = "[bold][[reset][blue]notice[reset][bold]][reset]"
- return Group(
- Text(),
- Text.from_markup(
- f"{notice} A new release of pip is available: "
- f"[red]{self.old}[reset] -> [green]{self.new}[reset]"
- ),
- Text.from_markup(
- f"{notice} To update, run: "
- f"[green]{escape(pip_cmd)} install --upgrade pip"
- ),
- )
-
-
-def was_installed_by_pip(pkg: str) -> bool:
- """Checks whether pkg was installed by pip
-
- This is used not to display the upgrade message when pip is in fact
- installed by system package manager, such as dnf on Fedora.
- """
- dist = get_default_environment().get_distribution(pkg)
- return dist is not None and "pip" == dist.installer
-
-
-def _get_current_remote_pip_version(
- session: PipSession, options: optparse.Values
-) -> Optional[str]:
- # Lets use PackageFinder to see what the latest pip version is
- link_collector = LinkCollector.create(
- session,
- options=options,
- suppress_no_index=True,
- )
-
- # Pass allow_yanked=False so we don't suggest upgrading to a
- # yanked version.
- selection_prefs = SelectionPreferences(
- allow_yanked=False,
- allow_all_prereleases=False, # Explicitly set to False
- )
-
- finder = PackageFinder.create(
- link_collector=link_collector,
- selection_prefs=selection_prefs,
- )
- best_candidate = finder.find_best_candidate("pip").best_candidate
- if best_candidate is None:
- return None
-
- return str(best_candidate.version)
-
-
-def _self_version_check_logic(
- *,
- state: SelfCheckState,
- current_time: datetime.datetime,
- local_version: DistributionVersion,
- get_remote_version: Callable[[], Optional[str]],
-) -> Optional[UpgradePrompt]:
- remote_version_str = state.get(current_time)
- if remote_version_str is None:
- remote_version_str = get_remote_version()
- if remote_version_str is None:
- logger.debug("No remote pip version found")
- return None
- state.set(remote_version_str, current_time)
-
- remote_version = parse_version(remote_version_str)
- logger.debug("Remote version of pip: %s", remote_version)
- logger.debug("Local version of pip: %s", local_version)
-
- pip_installed_by_pip = was_installed_by_pip("pip")
- logger.debug("Was pip installed by pip? %s", pip_installed_by_pip)
- if not pip_installed_by_pip:
- return None # Only suggest upgrade if pip is installed by pip.
-
- local_version_is_older = (
- local_version < remote_version
- and local_version.base_version != remote_version.base_version
- )
- if local_version_is_older:
- return UpgradePrompt(old=str(local_version), new=remote_version_str)
-
- return None
-
-
-def pip_self_version_check(session: PipSession, options: optparse.Values) -> None:
- """Check for an update for pip.
-
- Limit the frequency of checks to once per week. State is stored either in
- the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix
- of the pip script path.
- """
- installed_dist = get_default_environment().get_distribution("pip")
- if not installed_dist:
- return
-
- try:
- upgrade_prompt = _self_version_check_logic(
- state=SelfCheckState(cache_dir=options.cache_dir),
- current_time=datetime.datetime.now(datetime.timezone.utc),
- local_version=installed_dist.version,
- get_remote_version=functools.partial(
- _get_current_remote_pip_version, session, options
- ),
- )
- if upgrade_prompt is not None:
- logger.warning("%s", upgrade_prompt, extra={"rich": True})
- except Exception:
- logger.warning("There was an error checking the latest version of pip.")
- logger.debug("See below for error", exc_info=True)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 2712bd4..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc
deleted file mode 100644
index d0880f6..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_jaraco_text.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc
deleted file mode 100644
index 3277409..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/_log.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc
deleted file mode 100644
index 22028e0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc
deleted file mode 100644
index 45679be..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compat.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc
deleted file mode 100644
index f18cbf8..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/compatibility_tags.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc
deleted file mode 100644
index 4ce98d1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc
deleted file mode 100644
index 5b921fe..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc
deleted file mode 100644
index a2800ef..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc
deleted file mode 100644
index 8ff07bf..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc
deleted file mode 100644
index 1294f2b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc
deleted file mode 100644
index dda9323..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc
deleted file mode 100644
index 35a6eac..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filesystem.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc
deleted file mode 100644
index 892570c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/filetypes.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc
deleted file mode 100644
index 7d25934..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc
deleted file mode 100644
index 76a6e21..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc
deleted file mode 100644
index cf9c199..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/logging.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc
deleted file mode 100644
index 4564edb..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/misc.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc
deleted file mode 100644
index be5d24b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/models.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc
deleted file mode 100644
index fb4bb4f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc
deleted file mode 100644
index 653bca9..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc
deleted file mode 100644
index a23f442..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/subprocess.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc
deleted file mode 100644
index b13fd48..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc
deleted file mode 100644
index 206cb25..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc
deleted file mode 100644
index 02683a7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/urls.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc
deleted file mode 100644
index f6b9af8..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/virtualenv.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc
deleted file mode 100644
index ad0b2e6..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/__pycache__/wheel.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/_jaraco_text.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/_jaraco_text.py
deleted file mode 100644
index e06947c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/_jaraco_text.py
+++ /dev/null
@@ -1,109 +0,0 @@
-"""Functions brought over from jaraco.text.
-
-These functions are not supposed to be used within `pip._internal`. These are
-helper functions brought over from `jaraco.text` to enable vendoring newer
-copies of `pkg_resources` without having to vendor `jaraco.text` and its entire
-dependency cone; something that our vendoring setup is not currently capable of
-handling.
-
-License reproduced from original source below:
-
-Copyright Jason R. Coombs
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to
-deal in the Software without restriction, including without limitation the
-rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
-sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
-IN THE SOFTWARE.
-"""
-
-import functools
-import itertools
-
-
-def _nonblank(str):
- return str and not str.startswith("#")
-
-
-@functools.singledispatch
-def yield_lines(iterable):
- r"""
- Yield valid lines of a string or iterable.
-
- >>> list(yield_lines(''))
- []
- >>> list(yield_lines(['foo', 'bar']))
- ['foo', 'bar']
- >>> list(yield_lines('foo\nbar'))
- ['foo', 'bar']
- >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
- ['foo', 'baz #comment']
- >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
- ['foo', 'bar', 'baz', 'bing']
- """
- return itertools.chain.from_iterable(map(yield_lines, iterable))
-
-
-@yield_lines.register(str)
-def _(text):
- return filter(_nonblank, map(str.strip, text.splitlines()))
-
-
-def drop_comment(line):
- """
- Drop comments.
-
- >>> drop_comment('foo # bar')
- 'foo'
-
- A hash without a space may be in a URL.
-
- >>> drop_comment('http://example.com/foo#bar')
- 'http://example.com/foo#bar'
- """
- return line.partition(" #")[0]
-
-
-def join_continuation(lines):
- r"""
- Join lines continued by a trailing backslash.
-
- >>> list(join_continuation(['foo \\', 'bar', 'baz']))
- ['foobar', 'baz']
- >>> list(join_continuation(['foo \\', 'bar', 'baz']))
- ['foobar', 'baz']
- >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
- ['foobarbaz']
-
- Not sure why, but...
- The character preceeding the backslash is also elided.
-
- >>> list(join_continuation(['goo\\', 'dly']))
- ['godly']
-
- A terrible idea, but...
- If no line is available to continue, suppress the lines.
-
- >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
- ['foo']
- """
- lines = iter(lines)
- for item in lines:
- while item.endswith("\\"):
- try:
- item = item[:-2].strip() + next(lines)
- except StopIteration:
- return
- yield item
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/_log.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/_log.py
deleted file mode 100644
index 92c4c6a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/_log.py
+++ /dev/null
@@ -1,38 +0,0 @@
-"""Customize logging
-
-Defines custom logger class for the `logger.verbose(...)` method.
-
-init_logging() must be called before any other modules that call logging.getLogger.
-"""
-
-import logging
-from typing import Any, cast
-
-# custom log level for `--verbose` output
-# between DEBUG and INFO
-VERBOSE = 15
-
-
-class VerboseLogger(logging.Logger):
- """Custom Logger, defining a verbose log-level
-
- VERBOSE is between INFO and DEBUG.
- """
-
- def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
- return self.log(VERBOSE, msg, *args, **kwargs)
-
-
-def getLogger(name: str) -> VerboseLogger:
- """logging.getLogger, but ensures our VerboseLogger class is returned"""
- return cast(VerboseLogger, logging.getLogger(name))
-
-
-def init_logging() -> None:
- """Register our VerboseLogger and VERBOSE log level.
-
- Should be called before any calls to getLogger(),
- i.e. in pip._internal.__init__
- """
- logging.setLoggerClass(VerboseLogger)
- logging.addLevelName(VERBOSE, "VERBOSE")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/appdirs.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/appdirs.py
deleted file mode 100644
index 16933bf..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/appdirs.py
+++ /dev/null
@@ -1,52 +0,0 @@
-"""
-This code wraps the vendored appdirs module to so the return values are
-compatible for the current pip code base.
-
-The intention is to rewrite current usages gradually, keeping the tests pass,
-and eventually drop this after all usages are changed.
-"""
-
-import os
-import sys
-from typing import List
-
-from pip._vendor import platformdirs as _appdirs
-
-
-def user_cache_dir(appname: str) -> str:
- return _appdirs.user_cache_dir(appname, appauthor=False)
-
-
-def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
- # Use ~/Application Support/pip, if the directory exists.
- path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
- if os.path.isdir(path):
- return path
-
- # Use a Linux-like ~/.config/pip, by default.
- linux_like_path = "~/.config/"
- if appname:
- linux_like_path = os.path.join(linux_like_path, appname)
-
- return os.path.expanduser(linux_like_path)
-
-
-def user_config_dir(appname: str, roaming: bool = True) -> str:
- if sys.platform == "darwin":
- return _macos_user_config_dir(appname, roaming)
-
- return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
-
-
-# for the discussion regarding site_config_dir locations
-# see <https://github.com/pypa/pip/issues/1733>
-def site_config_dirs(appname: str) -> List[str]:
- if sys.platform == "darwin":
- return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
-
- dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
- if sys.platform == "win32":
- return [dirval]
-
- # Unix-y system. Look in /etc as well.
- return dirval.split(os.pathsep) + ["/etc"]
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/compat.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/compat.py
deleted file mode 100644
index 3f4d300..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/compat.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""Stuff that differs in different Python versions and platform
-distributions."""
-
-import logging
-import os
-import sys
-
-__all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
-
-
-logger = logging.getLogger(__name__)
-
-
-def has_tls() -> bool:
- try:
- import _ssl # noqa: F401 # ignore unused
-
- return True
- except ImportError:
- pass
-
- from pip._vendor.urllib3.util import IS_PYOPENSSL
-
- return IS_PYOPENSSL
-
-
-def get_path_uid(path: str) -> int:
- """
- Return path's uid.
-
- Does not follow symlinks:
- https://github.com/pypa/pip/pull/935#discussion_r5307003
-
- Placed this function in compat due to differences on AIX and
- Jython, that should eventually go away.
-
- :raises OSError: When path is a symlink or can't be read.
- """
- if hasattr(os, "O_NOFOLLOW"):
- fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
- file_uid = os.fstat(fd).st_uid
- os.close(fd)
- else: # AIX and Jython
- # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
- if not os.path.islink(path):
- # older versions of Jython don't have `os.fstat`
- file_uid = os.stat(path).st_uid
- else:
- # raise OSError for parity with os.O_NOFOLLOW above
- raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
- return file_uid
-
-
-# packages in the stdlib that may have installation metadata, but should not be
-# considered 'installed'. this theoretically could be determined based on
-# dist.location (py27:`sysconfig.get_paths()['stdlib']`,
-# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
-# make this ineffective, so hard-coding
-stdlib_pkgs = {"python", "wsgiref", "argparse"}
-
-
-# windows detection, covers cpython and ironpython
-WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/compatibility_tags.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/compatibility_tags.py
deleted file mode 100644
index b6ed9a7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/compatibility_tags.py
+++ /dev/null
@@ -1,165 +0,0 @@
-"""Generate and work with PEP 425 Compatibility Tags.
-"""
-
-import re
-from typing import List, Optional, Tuple
-
-from pip._vendor.packaging.tags import (
- PythonVersion,
- Tag,
- compatible_tags,
- cpython_tags,
- generic_tags,
- interpreter_name,
- interpreter_version,
- mac_platforms,
-)
-
-_osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
-
-
-def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
- # Only use up to the first two numbers.
- return "".join(map(str, version_info[:2]))
-
-
-def _mac_platforms(arch: str) -> List[str]:
- match = _osx_arch_pat.match(arch)
- if match:
- name, major, minor, actual_arch = match.groups()
- mac_version = (int(major), int(minor))
- arches = [
- # Since we have always only checked that the platform starts
- # with "macosx", for backwards-compatibility we extract the
- # actual prefix provided by the user in case they provided
- # something like "macosxcustom_". It may be good to remove
- # this as undocumented or deprecate it in the future.
- "{}_{}".format(name, arch[len("macosx_") :])
- for arch in mac_platforms(mac_version, actual_arch)
- ]
- else:
- # arch pattern didn't match (?!)
- arches = [arch]
- return arches
-
-
-def _custom_manylinux_platforms(arch: str) -> List[str]:
- arches = [arch]
- arch_prefix, arch_sep, arch_suffix = arch.partition("_")
- if arch_prefix == "manylinux2014":
- # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
- # with the exception of wheels depending on ncurses. PEP 599 states
- # manylinux1/manylinux2010 wheels should be considered
- # manylinux2014 wheels:
- # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
- if arch_suffix in {"i686", "x86_64"}:
- arches.append("manylinux2010" + arch_sep + arch_suffix)
- arches.append("manylinux1" + arch_sep + arch_suffix)
- elif arch_prefix == "manylinux2010":
- # manylinux1 wheels run on most manylinux2010 systems with the
- # exception of wheels depending on ncurses. PEP 571 states
- # manylinux1 wheels should be considered manylinux2010 wheels:
- # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
- arches.append("manylinux1" + arch_sep + arch_suffix)
- return arches
-
-
-def _get_custom_platforms(arch: str) -> List[str]:
- arch_prefix, arch_sep, arch_suffix = arch.partition("_")
- if arch.startswith("macosx"):
- arches = _mac_platforms(arch)
- elif arch_prefix in ["manylinux2014", "manylinux2010"]:
- arches = _custom_manylinux_platforms(arch)
- else:
- arches = [arch]
- return arches
-
-
-def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
- if not platforms:
- return None
-
- seen = set()
- result = []
-
- for p in platforms:
- if p in seen:
- continue
- additions = [c for c in _get_custom_platforms(p) if c not in seen]
- seen.update(additions)
- result.extend(additions)
-
- return result
-
-
-def _get_python_version(version: str) -> PythonVersion:
- if len(version) > 1:
- return int(version[0]), int(version[1:])
- else:
- return (int(version[0]),)
-
-
-def _get_custom_interpreter(
- implementation: Optional[str] = None, version: Optional[str] = None
-) -> str:
- if implementation is None:
- implementation = interpreter_name()
- if version is None:
- version = interpreter_version()
- return f"{implementation}{version}"
-
-
-def get_supported(
- version: Optional[str] = None,
- platforms: Optional[List[str]] = None,
- impl: Optional[str] = None,
- abis: Optional[List[str]] = None,
-) -> List[Tag]:
- """Return a list of supported tags for each version specified in
- `versions`.
-
- :param version: a string version, of the form "33" or "32",
- or None. The version will be assumed to support our ABI.
- :param platform: specify a list of platforms you want valid
- tags for, or None. If None, use the local system platform.
- :param impl: specify the exact implementation you want valid
- tags for, or None. If None, use the local interpreter impl.
- :param abis: specify a list of abis you want valid
- tags for, or None. If None, use the local interpreter abi.
- """
- supported: List[Tag] = []
-
- python_version: Optional[PythonVersion] = None
- if version is not None:
- python_version = _get_python_version(version)
-
- interpreter = _get_custom_interpreter(impl, version)
-
- platforms = _expand_allowed_platforms(platforms)
-
- is_cpython = (impl or interpreter_name()) == "cp"
- if is_cpython:
- supported.extend(
- cpython_tags(
- python_version=python_version,
- abis=abis,
- platforms=platforms,
- )
- )
- else:
- supported.extend(
- generic_tags(
- interpreter=interpreter,
- abis=abis,
- platforms=platforms,
- )
- )
- supported.extend(
- compatible_tags(
- python_version=python_version,
- interpreter=interpreter,
- platforms=platforms,
- )
- )
-
- return supported
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/datetime.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/datetime.py
deleted file mode 100644
index 8668b3b..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/datetime.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""For when pip wants to check the date or time.
-"""
-
-import datetime
-
-
-def today_is_later_than(year: int, month: int, day: int) -> bool:
- today = datetime.date.today()
- given = datetime.date(year, month, day)
-
- return today > given
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/deprecation.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/deprecation.py
deleted file mode 100644
index 72bd6f2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/deprecation.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
-A module that implements tooling to enable easy warnings about deprecations.
-"""
-
-import logging
-import warnings
-from typing import Any, Optional, TextIO, Type, Union
-
-from pip._vendor.packaging.version import parse
-
-from pip import __version__ as current_version # NOTE: tests patch this name.
-
-DEPRECATION_MSG_PREFIX = "DEPRECATION: "
-
-
-class PipDeprecationWarning(Warning):
- pass
-
-
-_original_showwarning: Any = None
-
-
-# Warnings <-> Logging Integration
-def _showwarning(
- message: Union[Warning, str],
- category: Type[Warning],
- filename: str,
- lineno: int,
- file: Optional[TextIO] = None,
- line: Optional[str] = None,
-) -> None:
- if file is not None:
- if _original_showwarning is not None:
- _original_showwarning(message, category, filename, lineno, file, line)
- elif issubclass(category, PipDeprecationWarning):
- # We use a specially named logger which will handle all of the
- # deprecation messages for pip.
- logger = logging.getLogger("pip._internal.deprecations")
- logger.warning(message)
- else:
- _original_showwarning(message, category, filename, lineno, file, line)
-
-
-def install_warning_logger() -> None:
- # Enable our Deprecation Warnings
- warnings.simplefilter("default", PipDeprecationWarning, append=True)
-
- global _original_showwarning
-
- if _original_showwarning is None:
- _original_showwarning = warnings.showwarning
- warnings.showwarning = _showwarning
-
-
-def deprecated(
- *,
- reason: str,
- replacement: Optional[str],
- gone_in: Optional[str],
- feature_flag: Optional[str] = None,
- issue: Optional[int] = None,
-) -> None:
- """Helper to deprecate existing functionality.
-
- reason:
- Textual reason shown to the user about why this functionality has
- been deprecated. Should be a complete sentence.
- replacement:
- Textual suggestion shown to the user about what alternative
- functionality they can use.
- gone_in:
- The version of pip does this functionality should get removed in.
- Raises an error if pip's current version is greater than or equal to
- this.
- feature_flag:
- Command-line flag of the form --use-feature={feature_flag} for testing
- upcoming functionality.
- issue:
- Issue number on the tracker that would serve as a useful place for
- users to find related discussion and provide feedback.
- """
-
- # Determine whether or not the feature is already gone in this version.
- is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
-
- message_parts = [
- (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
- (
- gone_in,
- "pip {} will enforce this behaviour change."
- if not is_gone
- else "Since pip {}, this is no longer supported.",
- ),
- (
- replacement,
- "A possible replacement is {}.",
- ),
- (
- feature_flag,
- "You can use the flag --use-feature={} to test the upcoming behaviour."
- if not is_gone
- else None,
- ),
- (
- issue,
- "Discussion can be found at https://github.com/pypa/pip/issues/{}",
- ),
- ]
-
- message = " ".join(
- format_str.format(value)
- for value, format_str in message_parts
- if format_str is not None and value is not None
- )
-
- # Raise as an error if this behaviour is deprecated.
- if is_gone:
- raise PipDeprecationWarning(message)
-
- warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/direct_url_helpers.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/direct_url_helpers.py
deleted file mode 100644
index 0e8e5e1..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/direct_url_helpers.py
+++ /dev/null
@@ -1,87 +0,0 @@
-from typing import Optional
-
-from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
-from pip._internal.models.link import Link
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs import vcs
-
-
-def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
- """Convert a DirectUrl to a pip requirement string."""
- direct_url.validate() # if invalid, this is a pip bug
- requirement = name + " @ "
- fragments = []
- if isinstance(direct_url.info, VcsInfo):
- requirement += "{}+{}@{}".format(
- direct_url.info.vcs, direct_url.url, direct_url.info.commit_id
- )
- elif isinstance(direct_url.info, ArchiveInfo):
- requirement += direct_url.url
- if direct_url.info.hash:
- fragments.append(direct_url.info.hash)
- else:
- assert isinstance(direct_url.info, DirInfo)
- requirement += direct_url.url
- if direct_url.subdirectory:
- fragments.append("subdirectory=" + direct_url.subdirectory)
- if fragments:
- requirement += "#" + "&".join(fragments)
- return requirement
-
-
-def direct_url_for_editable(source_dir: str) -> DirectUrl:
- return DirectUrl(
- url=path_to_url(source_dir),
- info=DirInfo(editable=True),
- )
-
-
-def direct_url_from_link(
- link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
-) -> DirectUrl:
- if link.is_vcs:
- vcs_backend = vcs.get_backend_for_scheme(link.scheme)
- assert vcs_backend
- url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
- link.url_without_fragment
- )
- # For VCS links, we need to find out and add commit_id.
- if link_is_in_wheel_cache:
- # If the requested VCS link corresponds to a cached
- # wheel, it means the requested revision was an
- # immutable commit hash, otherwise it would not have
- # been cached. In that case we don't have a source_dir
- # with the VCS checkout.
- assert requested_revision
- commit_id = requested_revision
- else:
- # If the wheel was not in cache, it means we have
- # had to checkout from VCS to build and we have a source_dir
- # which we can inspect to find out the commit id.
- assert source_dir
- commit_id = vcs_backend.get_revision(source_dir)
- return DirectUrl(
- url=url,
- info=VcsInfo(
- vcs=vcs_backend.name,
- commit_id=commit_id,
- requested_revision=requested_revision,
- ),
- subdirectory=link.subdirectory_fragment,
- )
- elif link.is_existing_dir():
- return DirectUrl(
- url=link.url_without_fragment,
- info=DirInfo(),
- subdirectory=link.subdirectory_fragment,
- )
- else:
- hash = None
- hash_name = link.hash_name
- if hash_name:
- hash = f"{hash_name}={link.hash}"
- return DirectUrl(
- url=link.url_without_fragment,
- info=ArchiveInfo(hash=hash),
- subdirectory=link.subdirectory_fragment,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/egg_link.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/egg_link.py
deleted file mode 100644
index 4a384a6..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/egg_link.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import os
-import re
-import sys
-from typing import List, Optional
-
-from pip._internal.locations import site_packages, user_site
-from pip._internal.utils.virtualenv import (
- running_under_virtualenv,
- virtualenv_no_global,
-)
-
-__all__ = [
- "egg_link_path_from_sys_path",
- "egg_link_path_from_location",
-]
-
-
-def _egg_link_names(raw_name: str) -> List[str]:
- """
- Convert a Name metadata value to a .egg-link name, by applying
- the same substitution as pkg_resources's safe_name function.
- Note: we cannot use canonicalize_name because it has a different logic.
-
- We also look for the raw name (without normalization) as setuptools 69 changed
- the way it names .egg-link files (https://github.com/pypa/setuptools/issues/4167).
- """
- return [
- re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link",
- f"{raw_name}.egg-link",
- ]
-
-
-def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
- """
- Look for a .egg-link file for project name, by walking sys.path.
- """
- egg_link_names = _egg_link_names(raw_name)
- for path_item in sys.path:
- for egg_link_name in egg_link_names:
- egg_link = os.path.join(path_item, egg_link_name)
- if os.path.isfile(egg_link):
- return egg_link
- return None
-
-
-def egg_link_path_from_location(raw_name: str) -> Optional[str]:
- """
- Return the path for the .egg-link file if it exists, otherwise, None.
-
- There's 3 scenarios:
- 1) not in a virtualenv
- try to find in site.USER_SITE, then site_packages
- 2) in a no-global virtualenv
- try to find in site_packages
- 3) in a yes-global virtualenv
- try to find in site_packages, then site.USER_SITE
- (don't look in global location)
-
- For #1 and #3, there could be odd cases, where there's an egg-link in 2
- locations.
-
- This method will just return the first one found.
- """
- sites: List[str] = []
- if running_under_virtualenv():
- sites.append(site_packages)
- if not virtualenv_no_global() and user_site:
- sites.append(user_site)
- else:
- if user_site:
- sites.append(user_site)
- sites.append(site_packages)
-
- egg_link_names = _egg_link_names(raw_name)
- for site in sites:
- for egg_link_name in egg_link_names:
- egglink = os.path.join(site, egg_link_name)
- if os.path.isfile(egglink):
- return egglink
- return None
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/encoding.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/encoding.py
deleted file mode 100644
index 008f06a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/encoding.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import codecs
-import locale
-import re
-import sys
-from typing import List, Tuple
-
-BOMS: List[Tuple[bytes, str]] = [
- (codecs.BOM_UTF8, "utf-8"),
- (codecs.BOM_UTF16, "utf-16"),
- (codecs.BOM_UTF16_BE, "utf-16-be"),
- (codecs.BOM_UTF16_LE, "utf-16-le"),
- (codecs.BOM_UTF32, "utf-32"),
- (codecs.BOM_UTF32_BE, "utf-32-be"),
- (codecs.BOM_UTF32_LE, "utf-32-le"),
-]
-
-ENCODING_RE = re.compile(rb"coding[:=]\s*([-\w.]+)")
-
-
-def auto_decode(data: bytes) -> str:
- """Check a bytes string for a BOM to correctly detect the encoding
-
- Fallback to locale.getpreferredencoding(False) like open() on Python3"""
- for bom, encoding in BOMS:
- if data.startswith(bom):
- return data[len(bom) :].decode(encoding)
- # Lets check the first two lines as in PEP263
- for line in data.split(b"\n")[:2]:
- if line[0:1] == b"#" and ENCODING_RE.search(line):
- result = ENCODING_RE.search(line)
- assert result is not None
- encoding = result.groups()[0].decode("ascii")
- return data.decode(encoding)
- return data.decode(
- locale.getpreferredencoding(False) or sys.getdefaultencoding(),
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/entrypoints.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/entrypoints.py
deleted file mode 100644
index 1501369..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/entrypoints.py
+++ /dev/null
@@ -1,84 +0,0 @@
-import itertools
-import os
-import shutil
-import sys
-from typing import List, Optional
-
-from pip._internal.cli.main import main
-from pip._internal.utils.compat import WINDOWS
-
-_EXECUTABLE_NAMES = [
- "pip",
- f"pip{sys.version_info.major}",
- f"pip{sys.version_info.major}.{sys.version_info.minor}",
-]
-if WINDOWS:
- _allowed_extensions = {"", ".exe"}
- _EXECUTABLE_NAMES = [
- "".join(parts)
- for parts in itertools.product(_EXECUTABLE_NAMES, _allowed_extensions)
- ]
-
-
-def _wrapper(args: Optional[List[str]] = None) -> int:
- """Central wrapper for all old entrypoints.
-
- Historically pip has had several entrypoints defined. Because of issues
- arising from PATH, sys.path, multiple Pythons, their interactions, and most
- of them having a pip installed, users suffer every time an entrypoint gets
- moved.
-
- To alleviate this pain, and provide a mechanism for warning users and
- directing them to an appropriate place for help, we now define all of
- our old entrypoints as wrappers for the current one.
- """
- sys.stderr.write(
- "WARNING: pip is being invoked by an old script wrapper. This will "
- "fail in a future version of pip.\n"
- "Please see https://github.com/pypa/pip/issues/5599 for advice on "
- "fixing the underlying issue.\n"
- "To avoid this problem you can invoke Python with '-m pip' instead of "
- "running pip directly.\n"
- )
- return main(args)
-
-
-def get_best_invocation_for_this_pip() -> str:
- """Try to figure out the best way to invoke pip in the current environment."""
- binary_directory = "Scripts" if WINDOWS else "bin"
- binary_prefix = os.path.join(sys.prefix, binary_directory)
-
- # Try to use pip[X[.Y]] names, if those executables for this environment are
- # the first on PATH with that name.
- path_parts = os.path.normcase(os.environ.get("PATH", "")).split(os.pathsep)
- exe_are_in_PATH = os.path.normcase(binary_prefix) in path_parts
- if exe_are_in_PATH:
- for exe_name in _EXECUTABLE_NAMES:
- found_executable = shutil.which(exe_name)
- binary_executable = os.path.join(binary_prefix, exe_name)
- if (
- found_executable
- and os.path.exists(binary_executable)
- and os.path.samefile(
- found_executable,
- binary_executable,
- )
- ):
- return exe_name
-
- # Use the `-m` invocation, if there's no "nice" invocation.
- return f"{get_best_invocation_for_this_python()} -m pip"
-
-
-def get_best_invocation_for_this_python() -> str:
- """Try to figure out the best way to invoke the current Python."""
- exe = sys.executable
- exe_name = os.path.basename(exe)
-
- # Try to use the basename, if it's the first executable.
- found_executable = shutil.which(exe_name)
- if found_executable and os.path.samefile(found_executable, exe):
- return exe_name
-
- # Use the full executable name, because we couldn't find something simpler.
- return exe
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/filesystem.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/filesystem.py
deleted file mode 100644
index 83c2df7..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/filesystem.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import fnmatch
-import os
-import os.path
-import random
-import sys
-from contextlib import contextmanager
-from tempfile import NamedTemporaryFile
-from typing import Any, BinaryIO, Generator, List, Union, cast
-
-from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
-
-from pip._internal.utils.compat import get_path_uid
-from pip._internal.utils.misc import format_size
-
-
-def check_path_owner(path: str) -> bool:
- # If we don't have a way to check the effective uid of this process, then
- # we'll just assume that we own the directory.
- if sys.platform == "win32" or not hasattr(os, "geteuid"):
- return True
-
- assert os.path.isabs(path)
-
- previous = None
- while path != previous:
- if os.path.lexists(path):
- # Check if path is writable by current user.
- if os.geteuid() == 0:
- # Special handling for root user in order to handle properly
- # cases where users use sudo without -H flag.
- try:
- path_uid = get_path_uid(path)
- except OSError:
- return False
- return path_uid == 0
- else:
- return os.access(path, os.W_OK)
- else:
- previous, path = path, os.path.dirname(path)
- return False # assume we don't own the path
-
-
-@contextmanager
-def adjacent_tmp_file(path: str, **kwargs: Any) -> Generator[BinaryIO, None, None]:
- """Return a file-like object pointing to a tmp file next to path.
-
- The file is created securely and is ensured to be written to disk
- after the context reaches its end.
-
- kwargs will be passed to tempfile.NamedTemporaryFile to control
- the way the temporary file will be opened.
- """
- with NamedTemporaryFile(
- delete=False,
- dir=os.path.dirname(path),
- prefix=os.path.basename(path),
- suffix=".tmp",
- **kwargs,
- ) as f:
- result = cast(BinaryIO, f)
- try:
- yield result
- finally:
- result.flush()
- os.fsync(result.fileno())
-
-
-# Tenacity raises RetryError by default, explicitly raise the original exception
-_replace_retry = retry(reraise=True, stop=stop_after_delay(1), wait=wait_fixed(0.25))
-
-replace = _replace_retry(os.replace)
-
-
-# test_writable_dir and _test_writable_dir_win are copied from Flit,
-# with the author's agreement to also place them under pip's license.
-def test_writable_dir(path: str) -> bool:
- """Check if a directory is writable.
-
- Uses os.access() on POSIX, tries creating files on Windows.
- """
- # If the directory doesn't exist, find the closest parent that does.
- while not os.path.isdir(path):
- parent = os.path.dirname(path)
- if parent == path:
- break # Should never get here, but infinite loops are bad
- path = parent
-
- if os.name == "posix":
- return os.access(path, os.W_OK)
-
- return _test_writable_dir_win(path)
-
-
-def _test_writable_dir_win(path: str) -> bool:
- # os.access doesn't work on Windows: http://bugs.python.org/issue2528
- # and we can't use tempfile: http://bugs.python.org/issue22107
- basename = "accesstest_deleteme_fishfingers_custard_"
- alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
- for _ in range(10):
- name = basename + "".join(random.choice(alphabet) for _ in range(6))
- file = os.path.join(path, name)
- try:
- fd = os.open(file, os.O_RDWR | os.O_CREAT | os.O_EXCL)
- except FileExistsError:
- pass
- except PermissionError:
- # This could be because there's a directory with the same name.
- # But it's highly unlikely there's a directory called that,
- # so we'll assume it's because the parent dir is not writable.
- # This could as well be because the parent dir is not readable,
- # due to non-privileged user access.
- return False
- else:
- os.close(fd)
- os.unlink(file)
- return True
-
- # This should never be reached
- raise OSError("Unexpected condition testing for writable directory")
-
-
-def find_files(path: str, pattern: str) -> List[str]:
- """Returns a list of absolute paths of files beneath path, recursively,
- with filenames which match the UNIX-style shell glob pattern."""
- result: List[str] = []
- for root, _, files in os.walk(path):
- matches = fnmatch.filter(files, pattern)
- result.extend(os.path.join(root, f) for f in matches)
- return result
-
-
-def file_size(path: str) -> Union[int, float]:
- # If it's a symlink, return 0.
- if os.path.islink(path):
- return 0
- return os.path.getsize(path)
-
-
-def format_file_size(path: str) -> str:
- return format_size(file_size(path))
-
-
-def directory_size(path: str) -> Union[int, float]:
- size = 0.0
- for root, _dirs, files in os.walk(path):
- for filename in files:
- file_path = os.path.join(root, filename)
- size += file_size(file_path)
- return size
-
-
-def format_directory_size(path: str) -> str:
- return format_size(directory_size(path))
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/filetypes.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/filetypes.py
deleted file mode 100644
index 5948570..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/filetypes.py
+++ /dev/null
@@ -1,27 +0,0 @@
-"""Filetype information.
-"""
-
-from typing import Tuple
-
-from pip._internal.utils.misc import splitext
-
-WHEEL_EXTENSION = ".whl"
-BZ2_EXTENSIONS: Tuple[str, ...] = (".tar.bz2", ".tbz")
-XZ_EXTENSIONS: Tuple[str, ...] = (
- ".tar.xz",
- ".txz",
- ".tlz",
- ".tar.lz",
- ".tar.lzma",
-)
-ZIP_EXTENSIONS: Tuple[str, ...] = (".zip", WHEEL_EXTENSION)
-TAR_EXTENSIONS: Tuple[str, ...] = (".tar.gz", ".tgz", ".tar")
-ARCHIVE_EXTENSIONS = ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
-
-
-def is_archive_file(name: str) -> bool:
- """Return True if `name` is a considered as an archive file."""
- ext = splitext(name)[1].lower()
- if ext in ARCHIVE_EXTENSIONS:
- return True
- return False
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/glibc.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/glibc.py
deleted file mode 100644
index 81342af..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/glibc.py
+++ /dev/null
@@ -1,88 +0,0 @@
-import os
-import sys
-from typing import Optional, Tuple
-
-
-def glibc_version_string() -> Optional[str]:
- "Returns glibc version string, or None if not using glibc."
- return glibc_version_string_confstr() or glibc_version_string_ctypes()
-
-
-def glibc_version_string_confstr() -> Optional[str]:
- "Primary implementation of glibc_version_string using os.confstr."
- # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
- # to be broken or missing. This strategy is used in the standard library
- # platform module:
- # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
- if sys.platform == "win32":
- return None
- try:
- gnu_libc_version = os.confstr("CS_GNU_LIBC_VERSION")
- if gnu_libc_version is None:
- return None
- # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
- _, version = gnu_libc_version.split()
- except (AttributeError, OSError, ValueError):
- # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
- return None
- return version
-
-
-def glibc_version_string_ctypes() -> Optional[str]:
- "Fallback implementation of glibc_version_string using ctypes."
-
- try:
- import ctypes
- except ImportError:
- return None
-
- # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
- # manpage says, "If filename is NULL, then the returned handle is for the
- # main program". This way we can let the linker do the work to figure out
- # which libc our process is actually using.
- process_namespace = ctypes.CDLL(None)
- try:
- gnu_get_libc_version = process_namespace.gnu_get_libc_version
- except AttributeError:
- # Symbol doesn't exist -> therefore, we are not linked to
- # glibc.
- return None
-
- # Call gnu_get_libc_version, which returns a string like "2.5"
- gnu_get_libc_version.restype = ctypes.c_char_p
- version_str = gnu_get_libc_version()
- # py2 / py3 compatibility:
- if not isinstance(version_str, str):
- version_str = version_str.decode("ascii")
-
- return version_str
-
-
-# platform.libc_ver regularly returns completely nonsensical glibc
-# versions. E.g. on my computer, platform says:
-#
-# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.7')
-# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
-# ('glibc', '2.9')
-#
-# But the truth is:
-#
-# ~$ ldd --version
-# ldd (Debian GLIBC 2.22-11) 2.22
-#
-# This is unfortunate, because it means that the linehaul data on libc
-# versions that was generated by pip 8.1.2 and earlier is useless and
-# misleading. Solution: instead of using platform, use our code that actually
-# works.
-def libc_ver() -> Tuple[str, str]:
- """Try to determine the glibc version
-
- Returns a tuple of strings (lib, version) which default to empty strings
- in case the lookup fails.
- """
- glibc_version = glibc_version_string()
- if glibc_version is None:
- return ("", "")
- else:
- return ("glibc", glibc_version)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/hashes.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/hashes.py
deleted file mode 100644
index 843cffc..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/hashes.py
+++ /dev/null
@@ -1,151 +0,0 @@
-import hashlib
-from typing import TYPE_CHECKING, BinaryIO, Dict, Iterable, List, Optional
-
-from pip._internal.exceptions import HashMismatch, HashMissing, InstallationError
-from pip._internal.utils.misc import read_chunks
-
-if TYPE_CHECKING:
- from hashlib import _Hash
-
- # NoReturn introduced in 3.6.2; imported only for type checking to maintain
- # pip compatibility with older patch versions of Python 3.6
- from typing import NoReturn
-
-
-# The recommended hash algo of the moment. Change this whenever the state of
-# the art changes; it won't hurt backward compatibility.
-FAVORITE_HASH = "sha256"
-
-
-# Names of hashlib algorithms allowed by the --hash option and ``pip hash``
-# Currently, those are the ones at least as collision-resistant as sha256.
-STRONG_HASHES = ["sha256", "sha384", "sha512"]
-
-
-class Hashes:
- """A wrapper that builds multiple hashes at once and checks them against
- known-good values
-
- """
-
- def __init__(self, hashes: Optional[Dict[str, List[str]]] = None) -> None:
- """
- :param hashes: A dict of algorithm names pointing to lists of allowed
- hex digests
- """
- allowed = {}
- if hashes is not None:
- for alg, keys in hashes.items():
- # Make sure values are always sorted (to ease equality checks)
- allowed[alg] = sorted(keys)
- self._allowed = allowed
-
- def __and__(self, other: "Hashes") -> "Hashes":
- if not isinstance(other, Hashes):
- return NotImplemented
-
- # If either of the Hashes object is entirely empty (i.e. no hash
- # specified at all), all hashes from the other object are allowed.
- if not other:
- return self
- if not self:
- return other
-
- # Otherwise only hashes that present in both objects are allowed.
- new = {}
- for alg, values in other._allowed.items():
- if alg not in self._allowed:
- continue
- new[alg] = [v for v in values if v in self._allowed[alg]]
- return Hashes(new)
-
- @property
- def digest_count(self) -> int:
- return sum(len(digests) for digests in self._allowed.values())
-
- def is_hash_allowed(self, hash_name: str, hex_digest: str) -> bool:
- """Return whether the given hex digest is allowed."""
- return hex_digest in self._allowed.get(hash_name, [])
-
- def check_against_chunks(self, chunks: Iterable[bytes]) -> None:
- """Check good hashes against ones built from iterable of chunks of
- data.
-
- Raise HashMismatch if none match.
-
- """
- gots = {}
- for hash_name in self._allowed.keys():
- try:
- gots[hash_name] = hashlib.new(hash_name)
- except (ValueError, TypeError):
- raise InstallationError(f"Unknown hash name: {hash_name}")
-
- for chunk in chunks:
- for hash in gots.values():
- hash.update(chunk)
-
- for hash_name, got in gots.items():
- if got.hexdigest() in self._allowed[hash_name]:
- return
- self._raise(gots)
-
- def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
- raise HashMismatch(self._allowed, gots)
-
- def check_against_file(self, file: BinaryIO) -> None:
- """Check good hashes against a file-like object
-
- Raise HashMismatch if none match.
-
- """
- return self.check_against_chunks(read_chunks(file))
-
- def check_against_path(self, path: str) -> None:
- with open(path, "rb") as file:
- return self.check_against_file(file)
-
- def has_one_of(self, hashes: Dict[str, str]) -> bool:
- """Return whether any of the given hashes are allowed."""
- for hash_name, hex_digest in hashes.items():
- if self.is_hash_allowed(hash_name, hex_digest):
- return True
- return False
-
- def __bool__(self) -> bool:
- """Return whether I know any known-good hashes."""
- return bool(self._allowed)
-
- def __eq__(self, other: object) -> bool:
- if not isinstance(other, Hashes):
- return NotImplemented
- return self._allowed == other._allowed
-
- def __hash__(self) -> int:
- return hash(
- ",".join(
- sorted(
- ":".join((alg, digest))
- for alg, digest_list in self._allowed.items()
- for digest in digest_list
- )
- )
- )
-
-
-class MissingHashes(Hashes):
- """A workalike for Hashes used when we're missing a hash for a requirement
-
- It computes the actual hash of the requirement and raises a HashMissing
- exception showing it to the user.
-
- """
-
- def __init__(self) -> None:
- """Don't offer the ``hashes`` kwarg."""
- # Pass our favorite hash in to generate a "gotten hash". With the
- # empty list, it will never match, so an error will always raise.
- super().__init__(hashes={FAVORITE_HASH: []})
-
- def _raise(self, gots: Dict[str, "_Hash"]) -> "NoReturn":
- raise HashMissing(gots[FAVORITE_HASH].hexdigest())
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/logging.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/logging.py
deleted file mode 100644
index 95982df..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/logging.py
+++ /dev/null
@@ -1,348 +0,0 @@
-import contextlib
-import errno
-import logging
-import logging.handlers
-import os
-import sys
-import threading
-from dataclasses import dataclass
-from io import TextIOWrapper
-from logging import Filter
-from typing import Any, ClassVar, Generator, List, Optional, TextIO, Type
-
-from pip._vendor.rich.console import (
- Console,
- ConsoleOptions,
- ConsoleRenderable,
- RenderableType,
- RenderResult,
- RichCast,
-)
-from pip._vendor.rich.highlighter import NullHighlighter
-from pip._vendor.rich.logging import RichHandler
-from pip._vendor.rich.segment import Segment
-from pip._vendor.rich.style import Style
-
-from pip._internal.utils._log import VERBOSE, getLogger
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
-from pip._internal.utils.misc import ensure_dir
-
-_log_state = threading.local()
-subprocess_logger = getLogger("pip.subprocessor")
-
-
-class BrokenStdoutLoggingError(Exception):
- """
- Raised if BrokenPipeError occurs for the stdout stream while logging.
- """
-
-
-def _is_broken_pipe_error(exc_class: Type[BaseException], exc: BaseException) -> bool:
- if exc_class is BrokenPipeError:
- return True
-
- # On Windows, a broken pipe can show up as EINVAL rather than EPIPE:
- # https://bugs.python.org/issue19612
- # https://bugs.python.org/issue30418
- if not WINDOWS:
- return False
-
- return isinstance(exc, OSError) and exc.errno in (errno.EINVAL, errno.EPIPE)
-
-
-@contextlib.contextmanager
-def indent_log(num: int = 2) -> Generator[None, None, None]:
- """
- A context manager which will cause the log output to be indented for any
- log messages emitted inside it.
- """
- # For thread-safety
- _log_state.indentation = get_indentation()
- _log_state.indentation += num
- try:
- yield
- finally:
- _log_state.indentation -= num
-
-
-def get_indentation() -> int:
- return getattr(_log_state, "indentation", 0)
-
-
-class IndentingFormatter(logging.Formatter):
- default_time_format = "%Y-%m-%dT%H:%M:%S"
-
- def __init__(
- self,
- *args: Any,
- add_timestamp: bool = False,
- **kwargs: Any,
- ) -> None:
- """
- A logging.Formatter that obeys the indent_log() context manager.
-
- :param add_timestamp: A bool indicating output lines should be prefixed
- with their record's timestamp.
- """
- self.add_timestamp = add_timestamp
- super().__init__(*args, **kwargs)
-
- def get_message_start(self, formatted: str, levelno: int) -> str:
- """
- Return the start of the formatted log message (not counting the
- prefix to add to each line).
- """
- if levelno < logging.WARNING:
- return ""
- if formatted.startswith(DEPRECATION_MSG_PREFIX):
- # Then the message already has a prefix. We don't want it to
- # look like "WARNING: DEPRECATION: ...."
- return ""
- if levelno < logging.ERROR:
- return "WARNING: "
-
- return "ERROR: "
-
- def format(self, record: logging.LogRecord) -> str:
- """
- Calls the standard formatter, but will indent all of the log message
- lines by our current indentation level.
- """
- formatted = super().format(record)
- message_start = self.get_message_start(formatted, record.levelno)
- formatted = message_start + formatted
-
- prefix = ""
- if self.add_timestamp:
- prefix = f"{self.formatTime(record)} "
- prefix += " " * get_indentation()
- formatted = "".join([prefix + line for line in formatted.splitlines(True)])
- return formatted
-
-
-@dataclass
-class IndentedRenderable:
- renderable: RenderableType
- indent: int
-
- def __rich_console__(
- self, console: Console, options: ConsoleOptions
- ) -> RenderResult:
- segments = console.render(self.renderable, options)
- lines = Segment.split_lines(segments)
- for line in lines:
- yield Segment(" " * self.indent)
- yield from line
- yield Segment("\n")
-
-
-class RichPipStreamHandler(RichHandler):
- KEYWORDS: ClassVar[Optional[List[str]]] = []
-
- def __init__(self, stream: Optional[TextIO], no_color: bool) -> None:
- super().__init__(
- console=Console(file=stream, no_color=no_color, soft_wrap=True),
- show_time=False,
- show_level=False,
- show_path=False,
- highlighter=NullHighlighter(),
- )
-
- # Our custom override on Rich's logger, to make things work as we need them to.
- def emit(self, record: logging.LogRecord) -> None:
- style: Optional[Style] = None
-
- # If we are given a diagnostic error to present, present it with indentation.
- assert isinstance(record.args, tuple)
- if getattr(record, "rich", False):
- (rich_renderable,) = record.args
- assert isinstance(
- rich_renderable, (ConsoleRenderable, RichCast, str)
- ), f"{rich_renderable} is not rich-console-renderable"
-
- renderable: RenderableType = IndentedRenderable(
- rich_renderable, indent=get_indentation()
- )
- else:
- message = self.format(record)
- renderable = self.render_message(record, message)
- if record.levelno is not None:
- if record.levelno >= logging.ERROR:
- style = Style(color="red")
- elif record.levelno >= logging.WARNING:
- style = Style(color="yellow")
-
- try:
- self.console.print(renderable, overflow="ignore", crop=False, style=style)
- except Exception:
- self.handleError(record)
-
- def handleError(self, record: logging.LogRecord) -> None:
- """Called when logging is unable to log some output."""
-
- exc_class, exc = sys.exc_info()[:2]
- # If a broken pipe occurred while calling write() or flush() on the
- # stdout stream in logging's Handler.emit(), then raise our special
- # exception so we can handle it in main() instead of logging the
- # broken pipe error and continuing.
- if (
- exc_class
- and exc
- and self.console.file is sys.stdout
- and _is_broken_pipe_error(exc_class, exc)
- ):
- raise BrokenStdoutLoggingError()
-
- return super().handleError(record)
-
-
-class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
- def _open(self) -> TextIOWrapper:
- ensure_dir(os.path.dirname(self.baseFilename))
- return super()._open()
-
-
-class MaxLevelFilter(Filter):
- def __init__(self, level: int) -> None:
- self.level = level
-
- def filter(self, record: logging.LogRecord) -> bool:
- return record.levelno < self.level
-
-
-class ExcludeLoggerFilter(Filter):
-
- """
- A logging Filter that excludes records from a logger (or its children).
- """
-
- def filter(self, record: logging.LogRecord) -> bool:
- # The base Filter class allows only records from a logger (or its
- # children).
- return not super().filter(record)
-
-
-def setup_logging(verbosity: int, no_color: bool, user_log_file: Optional[str]) -> int:
- """Configures and sets up all of the logging
-
- Returns the requested logging level, as its integer value.
- """
-
- # Determine the level to be logging at.
- if verbosity >= 2:
- level_number = logging.DEBUG
- elif verbosity == 1:
- level_number = VERBOSE
- elif verbosity == -1:
- level_number = logging.WARNING
- elif verbosity == -2:
- level_number = logging.ERROR
- elif verbosity <= -3:
- level_number = logging.CRITICAL
- else:
- level_number = logging.INFO
-
- level = logging.getLevelName(level_number)
-
- # The "root" logger should match the "console" level *unless* we also need
- # to log to a user log file.
- include_user_log = user_log_file is not None
- if include_user_log:
- additional_log_file = user_log_file
- root_level = "DEBUG"
- else:
- additional_log_file = "/dev/null"
- root_level = level
-
- # Disable any logging besides WARNING unless we have DEBUG level logging
- # enabled for vendored libraries.
- vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
-
- # Shorthands for clarity
- log_streams = {
- "stdout": "ext://sys.stdout",
- "stderr": "ext://sys.stderr",
- }
- handler_classes = {
- "stream": "pip._internal.utils.logging.RichPipStreamHandler",
- "file": "pip._internal.utils.logging.BetterRotatingFileHandler",
- }
- handlers = ["console", "console_errors", "console_subprocess"] + (
- ["user_log"] if include_user_log else []
- )
-
- logging.config.dictConfig(
- {
- "version": 1,
- "disable_existing_loggers": False,
- "filters": {
- "exclude_warnings": {
- "()": "pip._internal.utils.logging.MaxLevelFilter",
- "level": logging.WARNING,
- },
- "restrict_to_subprocess": {
- "()": "logging.Filter",
- "name": subprocess_logger.name,
- },
- "exclude_subprocess": {
- "()": "pip._internal.utils.logging.ExcludeLoggerFilter",
- "name": subprocess_logger.name,
- },
- },
- "formatters": {
- "indent": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- },
- "indent_with_timestamp": {
- "()": IndentingFormatter,
- "format": "%(message)s",
- "add_timestamp": True,
- },
- },
- "handlers": {
- "console": {
- "level": level,
- "class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stdout"],
- "filters": ["exclude_subprocess", "exclude_warnings"],
- "formatter": "indent",
- },
- "console_errors": {
- "level": "WARNING",
- "class": handler_classes["stream"],
- "no_color": no_color,
- "stream": log_streams["stderr"],
- "filters": ["exclude_subprocess"],
- "formatter": "indent",
- },
- # A handler responsible for logging to the console messages
- # from the "subprocessor" logger.
- "console_subprocess": {
- "level": level,
- "class": handler_classes["stream"],
- "stream": log_streams["stderr"],
- "no_color": no_color,
- "filters": ["restrict_to_subprocess"],
- "formatter": "indent",
- },
- "user_log": {
- "level": "DEBUG",
- "class": handler_classes["file"],
- "filename": additional_log_file,
- "encoding": "utf-8",
- "delay": True,
- "formatter": "indent_with_timestamp",
- },
- },
- "root": {
- "level": root_level,
- "handlers": handlers,
- },
- "loggers": {"pip._vendor": {"level": vendored_log_level}},
- }
- )
-
- return level_number
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/misc.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/misc.py
deleted file mode 100644
index 1ad3f61..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/misc.py
+++ /dev/null
@@ -1,783 +0,0 @@
-import contextlib
-import errno
-import getpass
-import hashlib
-import io
-import logging
-import os
-import posixpath
-import shutil
-import stat
-import sys
-import sysconfig
-import urllib.parse
-from functools import partial
-from io import StringIO
-from itertools import filterfalse, tee, zip_longest
-from pathlib import Path
-from types import FunctionType, TracebackType
-from typing import (
- Any,
- BinaryIO,
- Callable,
- ContextManager,
- Dict,
- Generator,
- Iterable,
- Iterator,
- List,
- Optional,
- TextIO,
- Tuple,
- Type,
- TypeVar,
- Union,
- cast,
-)
-
-from pip._vendor.packaging.requirements import Requirement
-from pip._vendor.pyproject_hooks import BuildBackendHookCaller
-from pip._vendor.tenacity import retry, stop_after_delay, wait_fixed
-
-from pip import __version__
-from pip._internal.exceptions import CommandError, ExternallyManagedEnvironment
-from pip._internal.locations import get_major_minor_version
-from pip._internal.utils.compat import WINDOWS
-from pip._internal.utils.virtualenv import running_under_virtualenv
-
-__all__ = [
- "rmtree",
- "display_path",
- "backup_dir",
- "ask",
- "splitext",
- "format_size",
- "is_installable_dir",
- "normalize_path",
- "renames",
- "get_prog",
- "captured_stdout",
- "ensure_dir",
- "remove_auth_from_url",
- "check_externally_managed",
- "ConfiguredBuildBackendHookCaller",
-]
-
-logger = logging.getLogger(__name__)
-
-T = TypeVar("T")
-ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType]
-VersionInfo = Tuple[int, int, int]
-NetlocTuple = Tuple[str, Tuple[Optional[str], Optional[str]]]
-OnExc = Callable[[FunctionType, Path, BaseException], Any]
-OnErr = Callable[[FunctionType, Path, ExcInfo], Any]
-
-
-def get_pip_version() -> str:
- pip_pkg_dir = os.path.join(os.path.dirname(__file__), "..", "..")
- pip_pkg_dir = os.path.abspath(pip_pkg_dir)
-
- return f"pip {__version__} from {pip_pkg_dir} (python {get_major_minor_version()})"
-
-
-def normalize_version_info(py_version_info: Tuple[int, ...]) -> Tuple[int, int, int]:
- """
- Convert a tuple of ints representing a Python version to one of length
- three.
-
- :param py_version_info: a tuple of ints representing a Python version,
- or None to specify no version. The tuple can have any length.
-
- :return: a tuple of length three if `py_version_info` is non-None.
- Otherwise, return `py_version_info` unchanged (i.e. None).
- """
- if len(py_version_info) < 3:
- py_version_info += (3 - len(py_version_info)) * (0,)
- elif len(py_version_info) > 3:
- py_version_info = py_version_info[:3]
-
- return cast("VersionInfo", py_version_info)
-
-
-def ensure_dir(path: str) -> None:
- """os.path.makedirs without EEXIST."""
- try:
- os.makedirs(path)
- except OSError as e:
- # Windows can raise spurious ENOTEMPTY errors. See #6426.
- if e.errno != errno.EEXIST and e.errno != errno.ENOTEMPTY:
- raise
-
-
-def get_prog() -> str:
- try:
- prog = os.path.basename(sys.argv[0])
- if prog in ("__main__.py", "-c"):
- return f"{sys.executable} -m pip"
- else:
- return prog
- except (AttributeError, TypeError, IndexError):
- pass
- return "pip"
-
-
-# Retry every half second for up to 3 seconds
-# Tenacity raises RetryError by default, explicitly raise the original exception
-@retry(reraise=True, stop=stop_after_delay(3), wait=wait_fixed(0.5))
-def rmtree(
- dir: str,
- ignore_errors: bool = False,
- onexc: Optional[OnExc] = None,
-) -> None:
- if ignore_errors:
- onexc = _onerror_ignore
- if onexc is None:
- onexc = _onerror_reraise
- handler: OnErr = partial(
- # `[func, path, Union[ExcInfo, BaseException]] -> Any` is equivalent to
- # `Union[([func, path, ExcInfo] -> Any), ([func, path, BaseException] -> Any)]`.
- cast(Union[OnExc, OnErr], rmtree_errorhandler),
- onexc=onexc,
- )
- if sys.version_info >= (3, 12):
- # See https://docs.python.org/3.12/whatsnew/3.12.html#shutil.
- shutil.rmtree(dir, onexc=handler) # type: ignore
- else:
- shutil.rmtree(dir, onerror=handler) # type: ignore
-
-
-def _onerror_ignore(*_args: Any) -> None:
- pass
-
-
-def _onerror_reraise(*_args: Any) -> None:
- raise
-
-
-def rmtree_errorhandler(
- func: FunctionType,
- path: Path,
- exc_info: Union[ExcInfo, BaseException],
- *,
- onexc: OnExc = _onerror_reraise,
-) -> None:
- """
- `rmtree` error handler to 'force' a file remove (i.e. like `rm -f`).
-
- * If a file is readonly then it's write flag is set and operation is
- retried.
-
- * `onerror` is the original callback from `rmtree(... onerror=onerror)`
- that is chained at the end if the "rm -f" still fails.
- """
- try:
- st_mode = os.stat(path).st_mode
- except OSError:
- # it's equivalent to os.path.exists
- return
-
- if not st_mode & stat.S_IWRITE:
- # convert to read/write
- try:
- os.chmod(path, st_mode | stat.S_IWRITE)
- except OSError:
- pass
- else:
- # use the original function to repeat the operation
- try:
- func(path)
- return
- except OSError:
- pass
-
- if not isinstance(exc_info, BaseException):
- _, exc_info, _ = exc_info
- onexc(func, path, exc_info)
-
-
-def display_path(path: str) -> str:
- """Gives the display value for a given path, making it relative to cwd
- if possible."""
- path = os.path.normcase(os.path.abspath(path))
- if path.startswith(os.getcwd() + os.path.sep):
- path = "." + path[len(os.getcwd()) :]
- return path
-
-
-def backup_dir(dir: str, ext: str = ".bak") -> str:
- """Figure out the name of a directory to back up the given dir to
- (adding .bak, .bak2, etc)"""
- n = 1
- extension = ext
- while os.path.exists(dir + extension):
- n += 1
- extension = ext + str(n)
- return dir + extension
-
-
-def ask_path_exists(message: str, options: Iterable[str]) -> str:
- for action in os.environ.get("PIP_EXISTS_ACTION", "").split():
- if action in options:
- return action
- return ask(message, options)
-
-
-def _check_no_input(message: str) -> None:
- """Raise an error if no input is allowed."""
- if os.environ.get("PIP_NO_INPUT"):
- raise Exception(
- f"No input was expected ($PIP_NO_INPUT set); question: {message}"
- )
-
-
-def ask(message: str, options: Iterable[str]) -> str:
- """Ask the message interactively, with the given possible responses"""
- while 1:
- _check_no_input(message)
- response = input(message)
- response = response.strip().lower()
- if response not in options:
- print(
- "Your response ({!r}) was not one of the expected responses: "
- "{}".format(response, ", ".join(options))
- )
- else:
- return response
-
-
-def ask_input(message: str) -> str:
- """Ask for input interactively."""
- _check_no_input(message)
- return input(message)
-
-
-def ask_password(message: str) -> str:
- """Ask for a password interactively."""
- _check_no_input(message)
- return getpass.getpass(message)
-
-
-def strtobool(val: str) -> int:
- """Convert a string representation of truth to true (1) or false (0).
-
- True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
- are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
- 'val' is anything else.
- """
- val = val.lower()
- if val in ("y", "yes", "t", "true", "on", "1"):
- return 1
- elif val in ("n", "no", "f", "false", "off", "0"):
- return 0
- else:
- raise ValueError(f"invalid truth value {val!r}")
-
-
-def format_size(bytes: float) -> str:
- if bytes > 1000 * 1000:
- return f"{bytes / 1000.0 / 1000:.1f} MB"
- elif bytes > 10 * 1000:
- return f"{int(bytes / 1000)} kB"
- elif bytes > 1000:
- return f"{bytes / 1000.0:.1f} kB"
- else:
- return f"{int(bytes)} bytes"
-
-
-def tabulate(rows: Iterable[Iterable[Any]]) -> Tuple[List[str], List[int]]:
- """Return a list of formatted rows and a list of column sizes.
-
- For example::
-
- >>> tabulate([['foobar', 2000], [0xdeadbeef]])
- (['foobar 2000', '3735928559'], [10, 4])
- """
- rows = [tuple(map(str, row)) for row in rows]
- sizes = [max(map(len, col)) for col in zip_longest(*rows, fillvalue="")]
- table = [" ".join(map(str.ljust, row, sizes)).rstrip() for row in rows]
- return table, sizes
-
-
-def is_installable_dir(path: str) -> bool:
- """Is path is a directory containing pyproject.toml or setup.py?
-
- If pyproject.toml exists, this is a PEP 517 project. Otherwise we look for
- a legacy setuptools layout by identifying setup.py. We don't check for the
- setup.cfg because using it without setup.py is only available for PEP 517
- projects, which are already covered by the pyproject.toml check.
- """
- if not os.path.isdir(path):
- return False
- if os.path.isfile(os.path.join(path, "pyproject.toml")):
- return True
- if os.path.isfile(os.path.join(path, "setup.py")):
- return True
- return False
-
-
-def read_chunks(
- file: BinaryIO, size: int = io.DEFAULT_BUFFER_SIZE
-) -> Generator[bytes, None, None]:
- """Yield pieces of data from a file-like object until EOF."""
- while True:
- chunk = file.read(size)
- if not chunk:
- break
- yield chunk
-
-
-def normalize_path(path: str, resolve_symlinks: bool = True) -> str:
- """
- Convert a path to its canonical, case-normalized, absolute version.
-
- """
- path = os.path.expanduser(path)
- if resolve_symlinks:
- path = os.path.realpath(path)
- else:
- path = os.path.abspath(path)
- return os.path.normcase(path)
-
-
-def splitext(path: str) -> Tuple[str, str]:
- """Like os.path.splitext, but take off .tar too"""
- base, ext = posixpath.splitext(path)
- if base.lower().endswith(".tar"):
- ext = base[-4:] + ext
- base = base[:-4]
- return base, ext
-
-
-def renames(old: str, new: str) -> None:
- """Like os.renames(), but handles renaming across devices."""
- # Implementation borrowed from os.renames().
- head, tail = os.path.split(new)
- if head and tail and not os.path.exists(head):
- os.makedirs(head)
-
- shutil.move(old, new)
-
- head, tail = os.path.split(old)
- if head and tail:
- try:
- os.removedirs(head)
- except OSError:
- pass
-
-
-def is_local(path: str) -> bool:
- """
- Return True if path is within sys.prefix, if we're running in a virtualenv.
-
- If we're not in a virtualenv, all paths are considered "local."
-
- Caution: this function assumes the head of path has been normalized
- with normalize_path.
- """
- if not running_under_virtualenv():
- return True
- return path.startswith(normalize_path(sys.prefix))
-
-
-def write_output(msg: Any, *args: Any) -> None:
- logger.info(msg, *args)
-
-
-class StreamWrapper(StringIO):
- orig_stream: TextIO
-
- @classmethod
- def from_stream(cls, orig_stream: TextIO) -> "StreamWrapper":
- ret = cls()
- ret.orig_stream = orig_stream
- return ret
-
- # compileall.compile_dir() needs stdout.encoding to print to stdout
- # type ignore is because TextIOBase.encoding is writeable
- @property
- def encoding(self) -> str: # type: ignore
- return self.orig_stream.encoding
-
-
-@contextlib.contextmanager
-def captured_output(stream_name: str) -> Generator[StreamWrapper, None, None]:
- """Return a context manager used by captured_stdout/stdin/stderr
- that temporarily replaces the sys stream *stream_name* with a StringIO.
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- orig_stdout = getattr(sys, stream_name)
- setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout))
- try:
- yield getattr(sys, stream_name)
- finally:
- setattr(sys, stream_name, orig_stdout)
-
-
-def captured_stdout() -> ContextManager[StreamWrapper]:
- """Capture the output of sys.stdout:
-
- with captured_stdout() as stdout:
- print('hello')
- self.assertEqual(stdout.getvalue(), 'hello\n')
-
- Taken from Lib/support/__init__.py in the CPython repo.
- """
- return captured_output("stdout")
-
-
-def captured_stderr() -> ContextManager[StreamWrapper]:
- """
- See captured_stdout().
- """
- return captured_output("stderr")
-
-
-# Simulates an enum
-def enum(*sequential: Any, **named: Any) -> Type[Any]:
- enums = dict(zip(sequential, range(len(sequential))), **named)
- reverse = {value: key for key, value in enums.items()}
- enums["reverse_mapping"] = reverse
- return type("Enum", (), enums)
-
-
-def build_netloc(host: str, port: Optional[int]) -> str:
- """
- Build a netloc from a host-port pair
- """
- if port is None:
- return host
- if ":" in host:
- # Only wrap host with square brackets when it is IPv6
- host = f"[{host}]"
- return f"{host}:{port}"
-
-
-def build_url_from_netloc(netloc: str, scheme: str = "https") -> str:
- """
- Build a full URL from a netloc.
- """
- if netloc.count(":") >= 2 and "@" not in netloc and "[" not in netloc:
- # It must be a bare IPv6 address, so wrap it with brackets.
- netloc = f"[{netloc}]"
- return f"{scheme}://{netloc}"
-
-
-def parse_netloc(netloc: str) -> Tuple[Optional[str], Optional[int]]:
- """
- Return the host-port pair from a netloc.
- """
- url = build_url_from_netloc(netloc)
- parsed = urllib.parse.urlparse(url)
- return parsed.hostname, parsed.port
-
-
-def split_auth_from_netloc(netloc: str) -> NetlocTuple:
- """
- Parse out and remove the auth information from a netloc.
-
- Returns: (netloc, (username, password)).
- """
- if "@" not in netloc:
- return netloc, (None, None)
-
- # Split from the right because that's how urllib.parse.urlsplit()
- # behaves if more than one @ is present (which can be checked using
- # the password attribute of urlsplit()'s return value).
- auth, netloc = netloc.rsplit("@", 1)
- pw: Optional[str] = None
- if ":" in auth:
- # Split from the left because that's how urllib.parse.urlsplit()
- # behaves if more than one : is present (which again can be checked
- # using the password attribute of the return value)
- user, pw = auth.split(":", 1)
- else:
- user, pw = auth, None
-
- user = urllib.parse.unquote(user)
- if pw is not None:
- pw = urllib.parse.unquote(pw)
-
- return netloc, (user, pw)
-
-
-def redact_netloc(netloc: str) -> str:
- """
- Replace the sensitive data in a netloc with "****", if it exists.
-
- For example:
- - "user:pass@example.com" returns "user:****@example.com"
- - "accesstoken@example.com" returns "****@example.com"
- """
- netloc, (user, password) = split_auth_from_netloc(netloc)
- if user is None:
- return netloc
- if password is None:
- user = "****"
- password = ""
- else:
- user = urllib.parse.quote(user)
- password = ":****"
- return f"{user}{password}@{netloc}"
-
-
-def _transform_url(
- url: str, transform_netloc: Callable[[str], Tuple[Any, ...]]
-) -> Tuple[str, NetlocTuple]:
- """Transform and replace netloc in a url.
-
- transform_netloc is a function taking the netloc and returning a
- tuple. The first element of this tuple is the new netloc. The
- entire tuple is returned.
-
- Returns a tuple containing the transformed url as item 0 and the
- original tuple returned by transform_netloc as item 1.
- """
- purl = urllib.parse.urlsplit(url)
- netloc_tuple = transform_netloc(purl.netloc)
- # stripped url
- url_pieces = (purl.scheme, netloc_tuple[0], purl.path, purl.query, purl.fragment)
- surl = urllib.parse.urlunsplit(url_pieces)
- return surl, cast("NetlocTuple", netloc_tuple)
-
-
-def _get_netloc(netloc: str) -> NetlocTuple:
- return split_auth_from_netloc(netloc)
-
-
-def _redact_netloc(netloc: str) -> Tuple[str]:
- return (redact_netloc(netloc),)
-
-
-def split_auth_netloc_from_url(
- url: str,
-) -> Tuple[str, str, Tuple[Optional[str], Optional[str]]]:
- """
- Parse a url into separate netloc, auth, and url with no auth.
-
- Returns: (url_without_auth, netloc, (username, password))
- """
- url_without_auth, (netloc, auth) = _transform_url(url, _get_netloc)
- return url_without_auth, netloc, auth
-
-
-def remove_auth_from_url(url: str) -> str:
- """Return a copy of url with 'username:password@' removed."""
- # username/pass params are passed to subversion through flags
- # and are not recognized in the url.
- return _transform_url(url, _get_netloc)[0]
-
-
-def redact_auth_from_url(url: str) -> str:
- """Replace the password in a given url with ****."""
- return _transform_url(url, _redact_netloc)[0]
-
-
-def redact_auth_from_requirement(req: Requirement) -> str:
- """Replace the password in a given requirement url with ****."""
- if not req.url:
- return str(req)
- return str(req).replace(req.url, redact_auth_from_url(req.url))
-
-
-class HiddenText:
- def __init__(self, secret: str, redacted: str) -> None:
- self.secret = secret
- self.redacted = redacted
-
- def __repr__(self) -> str:
- return f"<HiddenText {str(self)!r}>"
-
- def __str__(self) -> str:
- return self.redacted
-
- # This is useful for testing.
- def __eq__(self, other: Any) -> bool:
- if type(self) != type(other):
- return False
-
- # The string being used for redaction doesn't also have to match,
- # just the raw, original string.
- return self.secret == other.secret
-
-
-def hide_value(value: str) -> HiddenText:
- return HiddenText(value, redacted="****")
-
-
-def hide_url(url: str) -> HiddenText:
- redacted = redact_auth_from_url(url)
- return HiddenText(url, redacted=redacted)
-
-
-def protect_pip_from_modification_on_windows(modifying_pip: bool) -> None:
- """Protection of pip.exe from modification on Windows
-
- On Windows, any operation modifying pip should be run as:
- python -m pip ...
- """
- pip_names = [
- "pip",
- f"pip{sys.version_info.major}",
- f"pip{sys.version_info.major}.{sys.version_info.minor}",
- ]
-
- # See https://github.com/pypa/pip/issues/1299 for more discussion
- should_show_use_python_msg = (
- modifying_pip and WINDOWS and os.path.basename(sys.argv[0]) in pip_names
- )
-
- if should_show_use_python_msg:
- new_command = [sys.executable, "-m", "pip"] + sys.argv[1:]
- raise CommandError(
- "To modify pip, please run the following command:\n{}".format(
- " ".join(new_command)
- )
- )
-
-
-def check_externally_managed() -> None:
- """Check whether the current environment is externally managed.
-
- If the ``EXTERNALLY-MANAGED`` config file is found, the current environment
- is considered externally managed, and an ExternallyManagedEnvironment is
- raised.
- """
- if running_under_virtualenv():
- return
- marker = os.path.join(sysconfig.get_path("stdlib"), "EXTERNALLY-MANAGED")
- if not os.path.isfile(marker):
- return
- raise ExternallyManagedEnvironment.from_config(marker)
-
-
-def is_console_interactive() -> bool:
- """Is this console interactive?"""
- return sys.stdin is not None and sys.stdin.isatty()
-
-
-def hash_file(path: str, blocksize: int = 1 << 20) -> Tuple[Any, int]:
- """Return (hash, length) for path using hashlib.sha256()"""
-
- h = hashlib.sha256()
- length = 0
- with open(path, "rb") as f:
- for block in read_chunks(f, size=blocksize):
- length += len(block)
- h.update(block)
- return h, length
-
-
-def pairwise(iterable: Iterable[Any]) -> Iterator[Tuple[Any, Any]]:
- """
- Return paired elements.
-
- For example:
- s -> (s0, s1), (s2, s3), (s4, s5), ...
- """
- iterable = iter(iterable)
- return zip_longest(iterable, iterable)
-
-
-def partition(
- pred: Callable[[T], bool],
- iterable: Iterable[T],
-) -> Tuple[Iterable[T], Iterable[T]]:
- """
- Use a predicate to partition entries into false entries and true entries,
- like
-
- partition(is_odd, range(10)) --> 0 2 4 6 8 and 1 3 5 7 9
- """
- t1, t2 = tee(iterable)
- return filterfalse(pred, t1), filter(pred, t2)
-
-
-class ConfiguredBuildBackendHookCaller(BuildBackendHookCaller):
- def __init__(
- self,
- config_holder: Any,
- source_dir: str,
- build_backend: str,
- backend_path: Optional[str] = None,
- runner: Optional[Callable[..., None]] = None,
- python_executable: Optional[str] = None,
- ):
- super().__init__(
- source_dir, build_backend, backend_path, runner, python_executable
- )
- self.config_holder = config_holder
-
- def build_wheel(
- self,
- wheel_directory: str,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
- metadata_directory: Optional[str] = None,
- ) -> str:
- cs = self.config_holder.config_settings
- return super().build_wheel(
- wheel_directory, config_settings=cs, metadata_directory=metadata_directory
- )
-
- def build_sdist(
- self,
- sdist_directory: str,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
- ) -> str:
- cs = self.config_holder.config_settings
- return super().build_sdist(sdist_directory, config_settings=cs)
-
- def build_editable(
- self,
- wheel_directory: str,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
- metadata_directory: Optional[str] = None,
- ) -> str:
- cs = self.config_holder.config_settings
- return super().build_editable(
- wheel_directory, config_settings=cs, metadata_directory=metadata_directory
- )
-
- def get_requires_for_build_wheel(
- self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
- ) -> List[str]:
- cs = self.config_holder.config_settings
- return super().get_requires_for_build_wheel(config_settings=cs)
-
- def get_requires_for_build_sdist(
- self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
- ) -> List[str]:
- cs = self.config_holder.config_settings
- return super().get_requires_for_build_sdist(config_settings=cs)
-
- def get_requires_for_build_editable(
- self, config_settings: Optional[Dict[str, Union[str, List[str]]]] = None
- ) -> List[str]:
- cs = self.config_holder.config_settings
- return super().get_requires_for_build_editable(config_settings=cs)
-
- def prepare_metadata_for_build_wheel(
- self,
- metadata_directory: str,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
- _allow_fallback: bool = True,
- ) -> str:
- cs = self.config_holder.config_settings
- return super().prepare_metadata_for_build_wheel(
- metadata_directory=metadata_directory,
- config_settings=cs,
- _allow_fallback=_allow_fallback,
- )
-
- def prepare_metadata_for_build_editable(
- self,
- metadata_directory: str,
- config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
- _allow_fallback: bool = True,
- ) -> str:
- cs = self.config_holder.config_settings
- return super().prepare_metadata_for_build_editable(
- metadata_directory=metadata_directory,
- config_settings=cs,
- _allow_fallback=_allow_fallback,
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/models.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/models.py
deleted file mode 100644
index b6bb21a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/models.py
+++ /dev/null
@@ -1,39 +0,0 @@
-"""Utilities for defining models
-"""
-
-import operator
-from typing import Any, Callable, Type
-
-
-class KeyBasedCompareMixin:
- """Provides comparison capabilities that is based on a key"""
-
- __slots__ = ["_compare_key", "_defining_class"]
-
- def __init__(self, key: Any, defining_class: Type["KeyBasedCompareMixin"]) -> None:
- self._compare_key = key
- self._defining_class = defining_class
-
- def __hash__(self) -> int:
- return hash(self._compare_key)
-
- def __lt__(self, other: Any) -> bool:
- return self._compare(other, operator.__lt__)
-
- def __le__(self, other: Any) -> bool:
- return self._compare(other, operator.__le__)
-
- def __gt__(self, other: Any) -> bool:
- return self._compare(other, operator.__gt__)
-
- def __ge__(self, other: Any) -> bool:
- return self._compare(other, operator.__ge__)
-
- def __eq__(self, other: Any) -> bool:
- return self._compare(other, operator.__eq__)
-
- def _compare(self, other: Any, method: Callable[[Any, Any], bool]) -> bool:
- if not isinstance(other, self._defining_class):
- return NotImplemented
-
- return method(self._compare_key, other._compare_key)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/packaging.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/packaging.py
deleted file mode 100644
index b9f6af4..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/packaging.py
+++ /dev/null
@@ -1,57 +0,0 @@
-import functools
-import logging
-import re
-from typing import NewType, Optional, Tuple, cast
-
-from pip._vendor.packaging import specifiers, version
-from pip._vendor.packaging.requirements import Requirement
-
-NormalizedExtra = NewType("NormalizedExtra", str)
-
-logger = logging.getLogger(__name__)
-
-
-def check_requires_python(
- requires_python: Optional[str], version_info: Tuple[int, ...]
-) -> bool:
- """
- Check if the given Python version matches a "Requires-Python" specifier.
-
- :param version_info: A 3-tuple of ints representing a Python
- major-minor-micro version to check (e.g. `sys.version_info[:3]`).
-
- :return: `True` if the given Python version satisfies the requirement.
- Otherwise, return `False`.
-
- :raises InvalidSpecifier: If `requires_python` has an invalid format.
- """
- if requires_python is None:
- # The package provides no information
- return True
- requires_python_specifier = specifiers.SpecifierSet(requires_python)
-
- python_version = version.parse(".".join(map(str, version_info)))
- return python_version in requires_python_specifier
-
-
-@functools.lru_cache(maxsize=512)
-def get_requirement(req_string: str) -> Requirement:
- """Construct a packaging.Requirement object with caching"""
- # Parsing requirement strings is expensive, and is also expected to happen
- # with a low diversity of different arguments (at least relative the number
- # constructed). This method adds a cache to requirement object creation to
- # minimize repeated parsing of the same string to construct equivalent
- # Requirement objects.
- return Requirement(req_string)
-
-
-def safe_extra(extra: str) -> NormalizedExtra:
- """Convert an arbitrary string to a standard 'extra' name
-
- Any runs of non-alphanumeric characters are replaced with a single '_',
- and the result is always lowercased.
-
- This function is duplicated from ``pkg_resources``. Note that this is not
- the same to either ``canonicalize_name`` or ``_egg_link_name``.
- """
- return cast(NormalizedExtra, re.sub("[^A-Za-z0-9.-]+", "_", extra).lower())
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/setuptools_build.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/setuptools_build.py
deleted file mode 100644
index 96d1b24..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/setuptools_build.py
+++ /dev/null
@@ -1,146 +0,0 @@
-import sys
-import textwrap
-from typing import List, Optional, Sequence
-
-# Shim to wrap setup.py invocation with setuptools
-# Note that __file__ is handled via two {!r} *and* %r, to ensure that paths on
-# Windows are correctly handled (it should be "C:\\Users" not "C:\Users").
-_SETUPTOOLS_SHIM = textwrap.dedent(
- """
- exec(compile('''
- # This is <pip-setuptools-caller> -- a caller that pip uses to run setup.py
- #
- # - It imports setuptools before invoking setup.py, to enable projects that directly
- # import from `distutils.core` to work with newer packaging standards.
- # - It provides a clear error message when setuptools is not installed.
- # - It sets `sys.argv[0]` to the underlying `setup.py`, when invoking `setup.py` so
- # setuptools doesn't think the script is `-c`. This avoids the following warning:
- # manifest_maker: standard file '-c' not found".
- # - It generates a shim setup.py, for handling setup.cfg-only projects.
- import os, sys, tokenize
-
- try:
- import setuptools
- except ImportError as error:
- print(
- "ERROR: Can not execute `setup.py` since setuptools is not available in "
- "the build environment.",
- file=sys.stderr,
- )
- sys.exit(1)
-
- __file__ = %r
- sys.argv[0] = __file__
-
- if os.path.exists(__file__):
- filename = __file__
- with tokenize.open(__file__) as f:
- setup_py_code = f.read()
- else:
- filename = "<auto-generated setuptools caller>"
- setup_py_code = "from setuptools import setup; setup()"
-
- exec(compile(setup_py_code, filename, "exec"))
- ''' % ({!r},), "<pip-setuptools-caller>", "exec"))
- """
-).rstrip()
-
-
-def make_setuptools_shim_args(
- setup_py_path: str,
- global_options: Optional[Sequence[str]] = None,
- no_user_config: bool = False,
- unbuffered_output: bool = False,
-) -> List[str]:
- """
- Get setuptools command arguments with shim wrapped setup file invocation.
-
- :param setup_py_path: The path to setup.py to be wrapped.
- :param global_options: Additional global options.
- :param no_user_config: If True, disables personal user configuration.
- :param unbuffered_output: If True, adds the unbuffered switch to the
- argument list.
- """
- args = [sys.executable]
- if unbuffered_output:
- args += ["-u"]
- args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)]
- if global_options:
- args += global_options
- if no_user_config:
- args += ["--no-user-cfg"]
- return args
-
-
-def make_setuptools_bdist_wheel_args(
- setup_py_path: str,
- global_options: Sequence[str],
- build_options: Sequence[str],
- destination_dir: str,
-) -> List[str]:
- # NOTE: Eventually, we'd want to also -S to the flags here, when we're
- # isolating. Currently, it breaks Python in virtualenvs, because it
- # relies on site.py to find parts of the standard library outside the
- # virtualenv.
- args = make_setuptools_shim_args(
- setup_py_path, global_options=global_options, unbuffered_output=True
- )
- args += ["bdist_wheel", "-d", destination_dir]
- args += build_options
- return args
-
-
-def make_setuptools_clean_args(
- setup_py_path: str,
- global_options: Sequence[str],
-) -> List[str]:
- args = make_setuptools_shim_args(
- setup_py_path, global_options=global_options, unbuffered_output=True
- )
- args += ["clean", "--all"]
- return args
-
-
-def make_setuptools_develop_args(
- setup_py_path: str,
- *,
- global_options: Sequence[str],
- no_user_config: bool,
- prefix: Optional[str],
- home: Optional[str],
- use_user_site: bool,
-) -> List[str]:
- assert not (use_user_site and prefix)
-
- args = make_setuptools_shim_args(
- setup_py_path,
- global_options=global_options,
- no_user_config=no_user_config,
- )
-
- args += ["develop", "--no-deps"]
-
- if prefix:
- args += ["--prefix", prefix]
- if home is not None:
- args += ["--install-dir", home]
-
- if use_user_site:
- args += ["--user", "--prefix="]
-
- return args
-
-
-def make_setuptools_egg_info_args(
- setup_py_path: str,
- egg_info_dir: Optional[str],
- no_user_config: bool,
-) -> List[str]:
- args = make_setuptools_shim_args(setup_py_path, no_user_config=no_user_config)
-
- args += ["egg_info"]
-
- if egg_info_dir:
- args += ["--egg-base", egg_info_dir]
-
- return args
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/subprocess.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/subprocess.py
deleted file mode 100644
index 79580b0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/subprocess.py
+++ /dev/null
@@ -1,260 +0,0 @@
-import logging
-import os
-import shlex
-import subprocess
-from typing import (
- TYPE_CHECKING,
- Any,
- Callable,
- Iterable,
- List,
- Mapping,
- Optional,
- Union,
-)
-
-from pip._vendor.rich.markup import escape
-
-from pip._internal.cli.spinners import SpinnerInterface, open_spinner
-from pip._internal.exceptions import InstallationSubprocessError
-from pip._internal.utils.logging import VERBOSE, subprocess_logger
-from pip._internal.utils.misc import HiddenText
-
-if TYPE_CHECKING:
- # Literal was introduced in Python 3.8.
- #
- # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
- from typing import Literal
-
-CommandArgs = List[Union[str, HiddenText]]
-
-
-def make_command(*args: Union[str, HiddenText, CommandArgs]) -> CommandArgs:
- """
- Create a CommandArgs object.
- """
- command_args: CommandArgs = []
- for arg in args:
- # Check for list instead of CommandArgs since CommandArgs is
- # only known during type-checking.
- if isinstance(arg, list):
- command_args.extend(arg)
- else:
- # Otherwise, arg is str or HiddenText.
- command_args.append(arg)
-
- return command_args
-
-
-def format_command_args(args: Union[List[str], CommandArgs]) -> str:
- """
- Format command arguments for display.
- """
- # For HiddenText arguments, display the redacted form by calling str().
- # Also, we don't apply str() to arguments that aren't HiddenText since
- # this can trigger a UnicodeDecodeError in Python 2 if the argument
- # has type unicode and includes a non-ascii character. (The type
- # checker doesn't ensure the annotations are correct in all cases.)
- return " ".join(
- shlex.quote(str(arg)) if isinstance(arg, HiddenText) else shlex.quote(arg)
- for arg in args
- )
-
-
-def reveal_command_args(args: Union[List[str], CommandArgs]) -> List[str]:
- """
- Return the arguments in their raw, unredacted form.
- """
- return [arg.secret if isinstance(arg, HiddenText) else arg for arg in args]
-
-
-def call_subprocess(
- cmd: Union[List[str], CommandArgs],
- show_stdout: bool = False,
- cwd: Optional[str] = None,
- on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
- extra_ok_returncodes: Optional[Iterable[int]] = None,
- extra_environ: Optional[Mapping[str, Any]] = None,
- unset_environ: Optional[Iterable[str]] = None,
- spinner: Optional[SpinnerInterface] = None,
- log_failed_cmd: Optional[bool] = True,
- stdout_only: Optional[bool] = False,
- *,
- command_desc: str,
-) -> str:
- """
- Args:
- show_stdout: if true, use INFO to log the subprocess's stderr and
- stdout streams. Otherwise, use DEBUG. Defaults to False.
- extra_ok_returncodes: an iterable of integer return codes that are
- acceptable, in addition to 0. Defaults to None, which means [].
- unset_environ: an iterable of environment variable names to unset
- prior to calling subprocess.Popen().
- log_failed_cmd: if false, failed commands are not logged, only raised.
- stdout_only: if true, return only stdout, else return both. When true,
- logging of both stdout and stderr occurs when the subprocess has
- terminated, else logging occurs as subprocess output is produced.
- """
- if extra_ok_returncodes is None:
- extra_ok_returncodes = []
- if unset_environ is None:
- unset_environ = []
- # Most places in pip use show_stdout=False. What this means is--
- #
- # - We connect the child's output (combined stderr and stdout) to a
- # single pipe, which we read.
- # - We log this output to stderr at DEBUG level as it is received.
- # - If DEBUG logging isn't enabled (e.g. if --verbose logging wasn't
- # requested), then we show a spinner so the user can still see the
- # subprocess is in progress.
- # - If the subprocess exits with an error, we log the output to stderr
- # at ERROR level if it hasn't already been displayed to the console
- # (e.g. if --verbose logging wasn't enabled). This way we don't log
- # the output to the console twice.
- #
- # If show_stdout=True, then the above is still done, but with DEBUG
- # replaced by INFO.
- if show_stdout:
- # Then log the subprocess output at INFO level.
- log_subprocess: Callable[..., None] = subprocess_logger.info
- used_level = logging.INFO
- else:
- # Then log the subprocess output using VERBOSE. This also ensures
- # it will be logged to the log file (aka user_log), if enabled.
- log_subprocess = subprocess_logger.verbose
- used_level = VERBOSE
-
- # Whether the subprocess will be visible in the console.
- showing_subprocess = subprocess_logger.getEffectiveLevel() <= used_level
-
- # Only use the spinner if we're not showing the subprocess output
- # and we have a spinner.
- use_spinner = not showing_subprocess and spinner is not None
-
- log_subprocess("Running command %s", command_desc)
- env = os.environ.copy()
- if extra_environ:
- env.update(extra_environ)
- for name in unset_environ:
- env.pop(name, None)
- try:
- proc = subprocess.Popen(
- # Convert HiddenText objects to the underlying str.
- reveal_command_args(cmd),
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT if not stdout_only else subprocess.PIPE,
- cwd=cwd,
- env=env,
- errors="backslashreplace",
- )
- except Exception as exc:
- if log_failed_cmd:
- subprocess_logger.critical(
- "Error %s while executing command %s",
- exc,
- command_desc,
- )
- raise
- all_output = []
- if not stdout_only:
- assert proc.stdout
- assert proc.stdin
- proc.stdin.close()
- # In this mode, stdout and stderr are in the same pipe.
- while True:
- line: str = proc.stdout.readline()
- if not line:
- break
- line = line.rstrip()
- all_output.append(line + "\n")
-
- # Show the line immediately.
- log_subprocess(line)
- # Update the spinner.
- if use_spinner:
- assert spinner
- spinner.spin()
- try:
- proc.wait()
- finally:
- if proc.stdout:
- proc.stdout.close()
- output = "".join(all_output)
- else:
- # In this mode, stdout and stderr are in different pipes.
- # We must use communicate() which is the only safe way to read both.
- out, err = proc.communicate()
- # log line by line to preserve pip log indenting
- for out_line in out.splitlines():
- log_subprocess(out_line)
- all_output.append(out)
- for err_line in err.splitlines():
- log_subprocess(err_line)
- all_output.append(err)
- output = out
-
- proc_had_error = proc.returncode and proc.returncode not in extra_ok_returncodes
- if use_spinner:
- assert spinner
- if proc_had_error:
- spinner.finish("error")
- else:
- spinner.finish("done")
- if proc_had_error:
- if on_returncode == "raise":
- error = InstallationSubprocessError(
- command_description=command_desc,
- exit_code=proc.returncode,
- output_lines=all_output if not showing_subprocess else None,
- )
- if log_failed_cmd:
- subprocess_logger.error("%s", error, extra={"rich": True})
- subprocess_logger.verbose(
- "[bold magenta]full command[/]: [blue]%s[/]",
- escape(format_command_args(cmd)),
- extra={"markup": True},
- )
- subprocess_logger.verbose(
- "[bold magenta]cwd[/]: %s",
- escape(cwd or "[inherit]"),
- extra={"markup": True},
- )
-
- raise error
- elif on_returncode == "warn":
- subprocess_logger.warning(
- 'Command "%s" had error code %s in %s',
- command_desc,
- proc.returncode,
- cwd,
- )
- elif on_returncode == "ignore":
- pass
- else:
- raise ValueError(f"Invalid value: on_returncode={on_returncode!r}")
- return output
-
-
-def runner_with_spinner_message(message: str) -> Callable[..., None]:
- """Provide a subprocess_runner that shows a spinner message.
-
- Intended for use with for BuildBackendHookCaller. Thus, the runner has
- an API that matches what's expected by BuildBackendHookCaller.subprocess_runner.
- """
-
- def runner(
- cmd: List[str],
- cwd: Optional[str] = None,
- extra_environ: Optional[Mapping[str, Any]] = None,
- ) -> None:
- with open_spinner(message) as spinner:
- call_subprocess(
- cmd,
- command_desc=message,
- cwd=cwd,
- extra_environ=extra_environ,
- spinner=spinner,
- )
-
- return runner
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py
deleted file mode 100644
index 4eec5f3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/temp_dir.py
+++ /dev/null
@@ -1,296 +0,0 @@
-import errno
-import itertools
-import logging
-import os.path
-import tempfile
-import traceback
-from contextlib import ExitStack, contextmanager
-from pathlib import Path
-from typing import (
- Any,
- Callable,
- Dict,
- Generator,
- List,
- Optional,
- TypeVar,
- Union,
-)
-
-from pip._internal.utils.misc import enum, rmtree
-
-logger = logging.getLogger(__name__)
-
-_T = TypeVar("_T", bound="TempDirectory")
-
-
-# Kinds of temporary directories. Only needed for ones that are
-# globally-managed.
-tempdir_kinds = enum(
- BUILD_ENV="build-env",
- EPHEM_WHEEL_CACHE="ephem-wheel-cache",
- REQ_BUILD="req-build",
-)
-
-
-_tempdir_manager: Optional[ExitStack] = None
-
-
-@contextmanager
-def global_tempdir_manager() -> Generator[None, None, None]:
- global _tempdir_manager
- with ExitStack() as stack:
- old_tempdir_manager, _tempdir_manager = _tempdir_manager, stack
- try:
- yield
- finally:
- _tempdir_manager = old_tempdir_manager
-
-
-class TempDirectoryTypeRegistry:
- """Manages temp directory behavior"""
-
- def __init__(self) -> None:
- self._should_delete: Dict[str, bool] = {}
-
- def set_delete(self, kind: str, value: bool) -> None:
- """Indicate whether a TempDirectory of the given kind should be
- auto-deleted.
- """
- self._should_delete[kind] = value
-
- def get_delete(self, kind: str) -> bool:
- """Get configured auto-delete flag for a given TempDirectory type,
- default True.
- """
- return self._should_delete.get(kind, True)
-
-
-_tempdir_registry: Optional[TempDirectoryTypeRegistry] = None
-
-
-@contextmanager
-def tempdir_registry() -> Generator[TempDirectoryTypeRegistry, None, None]:
- """Provides a scoped global tempdir registry that can be used to dictate
- whether directories should be deleted.
- """
- global _tempdir_registry
- old_tempdir_registry = _tempdir_registry
- _tempdir_registry = TempDirectoryTypeRegistry()
- try:
- yield _tempdir_registry
- finally:
- _tempdir_registry = old_tempdir_registry
-
-
-class _Default:
- pass
-
-
-_default = _Default()
-
-
-class TempDirectory:
- """Helper class that owns and cleans up a temporary directory.
-
- This class can be used as a context manager or as an OO representation of a
- temporary directory.
-
- Attributes:
- path
- Location to the created temporary directory
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- Methods:
- cleanup()
- Deletes the temporary directory
-
- When used as a context manager, if the delete attribute is True, on
- exiting the context the temporary directory is deleted.
- """
-
- def __init__(
- self,
- path: Optional[str] = None,
- delete: Union[bool, None, _Default] = _default,
- kind: str = "temp",
- globally_managed: bool = False,
- ignore_cleanup_errors: bool = True,
- ):
- super().__init__()
-
- if delete is _default:
- if path is not None:
- # If we were given an explicit directory, resolve delete option
- # now.
- delete = False
- else:
- # Otherwise, we wait until cleanup and see what
- # tempdir_registry says.
- delete = None
-
- # The only time we specify path is in for editables where it
- # is the value of the --src option.
- if path is None:
- path = self._create(kind)
-
- self._path = path
- self._deleted = False
- self.delete = delete
- self.kind = kind
- self.ignore_cleanup_errors = ignore_cleanup_errors
-
- if globally_managed:
- assert _tempdir_manager is not None
- _tempdir_manager.enter_context(self)
-
- @property
- def path(self) -> str:
- assert not self._deleted, f"Attempted to access deleted path: {self._path}"
- return self._path
-
- def __repr__(self) -> str:
- return f"<{self.__class__.__name__} {self.path!r}>"
-
- def __enter__(self: _T) -> _T:
- return self
-
- def __exit__(self, exc: Any, value: Any, tb: Any) -> None:
- if self.delete is not None:
- delete = self.delete
- elif _tempdir_registry:
- delete = _tempdir_registry.get_delete(self.kind)
- else:
- delete = True
-
- if delete:
- self.cleanup()
-
- def _create(self, kind: str) -> str:
- """Create a temporary directory and store its path in self.path"""
- # We realpath here because some systems have their default tmpdir
- # symlinked to another directory. This tends to confuse build
- # scripts, so we canonicalize the path by traversing potential
- # symlinks here.
- path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
- logger.debug("Created temporary directory: %s", path)
- return path
-
- def cleanup(self) -> None:
- """Remove the temporary directory created and reset state"""
- self._deleted = True
- if not os.path.exists(self._path):
- return
-
- errors: List[BaseException] = []
-
- def onerror(
- func: Callable[..., Any],
- path: Path,
- exc_val: BaseException,
- ) -> None:
- """Log a warning for a `rmtree` error and continue"""
- formatted_exc = "\n".join(
- traceback.format_exception_only(type(exc_val), exc_val)
- )
- formatted_exc = formatted_exc.rstrip() # remove trailing new line
- if func in (os.unlink, os.remove, os.rmdir):
- logger.debug(
- "Failed to remove a temporary file '%s' due to %s.\n",
- path,
- formatted_exc,
- )
- else:
- logger.debug("%s failed with %s.", func.__qualname__, formatted_exc)
- errors.append(exc_val)
-
- if self.ignore_cleanup_errors:
- try:
- # first try with tenacity; retrying to handle ephemeral errors
- rmtree(self._path, ignore_errors=False)
- except OSError:
- # last pass ignore/log all errors
- rmtree(self._path, onexc=onerror)
- if errors:
- logger.warning(
- "Failed to remove contents in a temporary directory '%s'.\n"
- "You can safely remove it manually.",
- self._path,
- )
- else:
- rmtree(self._path)
-
-
-class AdjacentTempDirectory(TempDirectory):
- """Helper class that creates a temporary directory adjacent to a real one.
-
- Attributes:
- original
- The original directory to create a temp directory for.
- path
- After calling create() or entering, contains the full
- path to the temporary directory.
- delete
- Whether the directory should be deleted when exiting
- (when used as a contextmanager)
-
- """
-
- # The characters that may be used to name the temp directory
- # We always prepend a ~ and then rotate through these until
- # a usable name is found.
- # pkg_resources raises a different error for .dist-info folder
- # with leading '-' and invalid metadata
- LEADING_CHARS = "-~.=%0123456789"
-
- def __init__(self, original: str, delete: Optional[bool] = None) -> None:
- self.original = original.rstrip("/\\")
- super().__init__(delete=delete)
-
- @classmethod
- def _generate_names(cls, name: str) -> Generator[str, None, None]:
- """Generates a series of temporary names.
-
- The algorithm replaces the leading characters in the name
- with ones that are valid filesystem characters, but are not
- valid package names (for both Python and pip definitions of
- package).
- """
- for i in range(1, len(name)):
- for candidate in itertools.combinations_with_replacement(
- cls.LEADING_CHARS, i - 1
- ):
- new_name = "~" + "".join(candidate) + name[i:]
- if new_name != name:
- yield new_name
-
- # If we make it this far, we will have to make a longer name
- for i in range(len(cls.LEADING_CHARS)):
- for candidate in itertools.combinations_with_replacement(
- cls.LEADING_CHARS, i
- ):
- new_name = "~" + "".join(candidate) + name
- if new_name != name:
- yield new_name
-
- def _create(self, kind: str) -> str:
- root, name = os.path.split(self.original)
- for candidate in self._generate_names(name):
- path = os.path.join(root, candidate)
- try:
- os.mkdir(path)
- except OSError as ex:
- # Continue if the name exists already
- if ex.errno != errno.EEXIST:
- raise
- else:
- path = os.path.realpath(path)
- break
- else:
- # Final fallback on the default behavior.
- path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
-
- logger.debug("Created temporary directory: %s", path)
- return path
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/unpacking.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/unpacking.py
deleted file mode 100644
index 78b5c13..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/unpacking.py
+++ /dev/null
@@ -1,257 +0,0 @@
-"""Utilities related archives.
-"""
-
-import logging
-import os
-import shutil
-import stat
-import tarfile
-import zipfile
-from typing import Iterable, List, Optional
-from zipfile import ZipInfo
-
-from pip._internal.exceptions import InstallationError
-from pip._internal.utils.filetypes import (
- BZ2_EXTENSIONS,
- TAR_EXTENSIONS,
- XZ_EXTENSIONS,
- ZIP_EXTENSIONS,
-)
-from pip._internal.utils.misc import ensure_dir
-
-logger = logging.getLogger(__name__)
-
-
-SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS
-
-try:
- import bz2 # noqa
-
- SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS
-except ImportError:
- logger.debug("bz2 module is not available")
-
-try:
- # Only for Python 3.3+
- import lzma # noqa
-
- SUPPORTED_EXTENSIONS += XZ_EXTENSIONS
-except ImportError:
- logger.debug("lzma module is not available")
-
-
-def current_umask() -> int:
- """Get the current umask which involves having to set it temporarily."""
- mask = os.umask(0)
- os.umask(mask)
- return mask
-
-
-def split_leading_dir(path: str) -> List[str]:
- path = path.lstrip("/").lstrip("\\")
- if "/" in path and (
- ("\\" in path and path.find("/") < path.find("\\")) or "\\" not in path
- ):
- return path.split("/", 1)
- elif "\\" in path:
- return path.split("\\", 1)
- else:
- return [path, ""]
-
-
-def has_leading_dir(paths: Iterable[str]) -> bool:
- """Returns true if all the paths have the same leading path name
- (i.e., everything is in one subdirectory in an archive)"""
- common_prefix = None
- for path in paths:
- prefix, rest = split_leading_dir(path)
- if not prefix:
- return False
- elif common_prefix is None:
- common_prefix = prefix
- elif prefix != common_prefix:
- return False
- return True
-
-
-def is_within_directory(directory: str, target: str) -> bool:
- """
- Return true if the absolute path of target is within the directory
- """
- abs_directory = os.path.abspath(directory)
- abs_target = os.path.abspath(target)
-
- prefix = os.path.commonprefix([abs_directory, abs_target])
- return prefix == abs_directory
-
-
-def set_extracted_file_to_default_mode_plus_executable(path: str) -> None:
- """
- Make file present at path have execute for user/group/world
- (chmod +x) is no-op on windows per python docs
- """
- os.chmod(path, (0o777 & ~current_umask() | 0o111))
-
-
-def zip_item_is_executable(info: ZipInfo) -> bool:
- mode = info.external_attr >> 16
- # if mode and regular file and any execute permissions for
- # user/group/world?
- return bool(mode and stat.S_ISREG(mode) and mode & 0o111)
-
-
-def unzip_file(filename: str, location: str, flatten: bool = True) -> None:
- """
- Unzip the file (with path `filename`) to the destination `location`. All
- files are written based on system defaults and umask (i.e. permissions are
- not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- zipfp = open(filename, "rb")
- try:
- zip = zipfile.ZipFile(zipfp, allowZip64=True)
- leading = has_leading_dir(zip.namelist()) and flatten
- for info in zip.infolist():
- name = info.filename
- fn = name
- if leading:
- fn = split_leading_dir(name)[1]
- fn = os.path.join(location, fn)
- dir = os.path.dirname(fn)
- if not is_within_directory(location, fn):
- message = (
- "The zip file ({}) has a file ({}) trying to install "
- "outside target directory ({})"
- )
- raise InstallationError(message.format(filename, fn, location))
- if fn.endswith("/") or fn.endswith("\\"):
- # A directory
- ensure_dir(fn)
- else:
- ensure_dir(dir)
- # Don't use read() to avoid allocating an arbitrarily large
- # chunk of memory for the file's content
- fp = zip.open(name)
- try:
- with open(fn, "wb") as destfp:
- shutil.copyfileobj(fp, destfp)
- finally:
- fp.close()
- if zip_item_is_executable(info):
- set_extracted_file_to_default_mode_plus_executable(fn)
- finally:
- zipfp.close()
-
-
-def untar_file(filename: str, location: str) -> None:
- """
- Untar the file (with path `filename`) to the destination `location`.
- All files are written based on system defaults and umask (i.e. permissions
- are not preserved), except that regular file members with any execute
- permissions (user, group, or world) have "chmod +x" applied after being
- written. Note that for windows, any execute changes using os.chmod are
- no-ops per the python docs.
- """
- ensure_dir(location)
- if filename.lower().endswith(".gz") or filename.lower().endswith(".tgz"):
- mode = "r:gz"
- elif filename.lower().endswith(BZ2_EXTENSIONS):
- mode = "r:bz2"
- elif filename.lower().endswith(XZ_EXTENSIONS):
- mode = "r:xz"
- elif filename.lower().endswith(".tar"):
- mode = "r"
- else:
- logger.warning(
- "Cannot determine compression type for file %s",
- filename,
- )
- mode = "r:*"
- tar = tarfile.open(filename, mode, encoding="utf-8")
- try:
- leading = has_leading_dir([member.name for member in tar.getmembers()])
- for member in tar.getmembers():
- fn = member.name
- if leading:
- fn = split_leading_dir(fn)[1]
- path = os.path.join(location, fn)
- if not is_within_directory(location, path):
- message = (
- "The tar file ({}) has a file ({}) trying to install "
- "outside target directory ({})"
- )
- raise InstallationError(message.format(filename, path, location))
- if member.isdir():
- ensure_dir(path)
- elif member.issym():
- try:
- tar._extract_member(member, path)
- except Exception as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- "In the tar file %s the member %s is invalid: %s",
- filename,
- member.name,
- exc,
- )
- continue
- else:
- try:
- fp = tar.extractfile(member)
- except (KeyError, AttributeError) as exc:
- # Some corrupt tar files seem to produce this
- # (specifically bad symlinks)
- logger.warning(
- "In the tar file %s the member %s is invalid: %s",
- filename,
- member.name,
- exc,
- )
- continue
- ensure_dir(os.path.dirname(path))
- assert fp is not None
- with open(path, "wb") as destfp:
- shutil.copyfileobj(fp, destfp)
- fp.close()
- # Update the timestamp (useful for cython compiled files)
- tar.utime(member, path)
- # member have any execute permissions for user/group/world?
- if member.mode & 0o111:
- set_extracted_file_to_default_mode_plus_executable(path)
- finally:
- tar.close()
-
-
-def unpack_file(
- filename: str,
- location: str,
- content_type: Optional[str] = None,
-) -> None:
- filename = os.path.realpath(filename)
- if (
- content_type == "application/zip"
- or filename.lower().endswith(ZIP_EXTENSIONS)
- or zipfile.is_zipfile(filename)
- ):
- unzip_file(filename, location, flatten=not filename.endswith(".whl"))
- elif (
- content_type == "application/x-gzip"
- or tarfile.is_tarfile(filename)
- or filename.lower().endswith(TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)
- ):
- untar_file(filename, location)
- else:
- # FIXME: handle?
- # FIXME: magic signatures?
- logger.critical(
- "Cannot unpack file %s (downloaded from %s, content-type: %s); "
- "cannot detect archive format",
- filename,
- location,
- content_type,
- )
- raise InstallationError(f"Cannot determine archive format of {location}")
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/urls.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/urls.py
deleted file mode 100644
index 6ba2e04..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/urls.py
+++ /dev/null
@@ -1,62 +0,0 @@
-import os
-import string
-import urllib.parse
-import urllib.request
-from typing import Optional
-
-from .compat import WINDOWS
-
-
-def get_url_scheme(url: str) -> Optional[str]:
- if ":" not in url:
- return None
- return url.split(":", 1)[0].lower()
-
-
-def path_to_url(path: str) -> str:
- """
- Convert a path to a file: URL. The path will be made absolute and have
- quoted path parts.
- """
- path = os.path.normpath(os.path.abspath(path))
- url = urllib.parse.urljoin("file:", urllib.request.pathname2url(path))
- return url
-
-
-def url_to_path(url: str) -> str:
- """
- Convert a file: URL to a path.
- """
- assert url.startswith(
- "file:"
- ), f"You can only turn file: urls into filenames (not {url!r})"
-
- _, netloc, path, _, _ = urllib.parse.urlsplit(url)
-
- if not netloc or netloc == "localhost":
- # According to RFC 8089, same as empty authority.
- netloc = ""
- elif WINDOWS:
- # If we have a UNC path, prepend UNC share notation.
- netloc = "\\\\" + netloc
- else:
- raise ValueError(
- f"non-local file URIs are not supported on this platform: {url!r}"
- )
-
- path = urllib.request.url2pathname(netloc + path)
-
- # On Windows, urlsplit parses the path as something like "/C:/Users/foo".
- # This creates issues for path-related functions like io.open(), so we try
- # to detect and strip the leading slash.
- if (
- WINDOWS
- and not netloc # Not UNC.
- and len(path) >= 3
- and path[0] == "/" # Leading slash to strip.
- and path[1] in string.ascii_letters # Drive letter.
- and path[2:4] in (":", ":/") # Colon + end of string, or colon + absolute path.
- ):
- path = path[1:]
-
- return path
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py
deleted file mode 100644
index 882e36f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py
+++ /dev/null
@@ -1,104 +0,0 @@
-import logging
-import os
-import re
-import site
-import sys
-from typing import List, Optional
-
-logger = logging.getLogger(__name__)
-_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
- r"include-system-site-packages\s*=\s*(?P<value>true|false)"
-)
-
-
-def _running_under_venv() -> bool:
- """Checks if sys.base_prefix and sys.prefix match.
-
- This handles PEP 405 compliant virtual environments.
- """
- return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
-
-
-def _running_under_legacy_virtualenv() -> bool:
- """Checks if sys.real_prefix is set.
-
- This handles virtual environments created with pypa's virtualenv.
- """
- # pypa/virtualenv case
- return hasattr(sys, "real_prefix")
-
-
-def running_under_virtualenv() -> bool:
- """True if we're running inside a virtual environment, False otherwise."""
- return _running_under_venv() or _running_under_legacy_virtualenv()
-
-
-def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
- """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
-
- Returns None, if it could not read/access the file.
- """
- pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
- try:
- # Although PEP 405 does not specify, the built-in venv module always
- # writes with UTF-8. (pypa/pip#8717)
- with open(pyvenv_cfg_file, encoding="utf-8") as f:
- return f.read().splitlines() # avoids trailing newlines
- except OSError:
- return None
-
-
-def _no_global_under_venv() -> bool:
- """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
-
- PEP 405 specifies that when system site-packages are not supposed to be
- visible from a virtual environment, `pyvenv.cfg` must contain the following
- line:
-
- include-system-site-packages = false
-
- Additionally, log a warning if accessing the file fails.
- """
- cfg_lines = _get_pyvenv_cfg_lines()
- if cfg_lines is None:
- # We're not in a "sane" venv, so assume there is no system
- # site-packages access (since that's PEP 405's default state).
- logger.warning(
- "Could not access 'pyvenv.cfg' despite a virtual environment "
- "being active. Assuming global site-packages is not accessible "
- "in this environment."
- )
- return True
-
- for line in cfg_lines:
- match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
- if match is not None and match.group("value") == "false":
- return True
- return False
-
-
-def _no_global_under_legacy_virtualenv() -> bool:
- """Check if "no-global-site-packages.txt" exists beside site.py
-
- This mirrors logic in pypa/virtualenv for determining whether system
- site-packages are visible in the virtual environment.
- """
- site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
- no_global_site_packages_file = os.path.join(
- site_mod_dir,
- "no-global-site-packages.txt",
- )
- return os.path.exists(no_global_site_packages_file)
-
-
-def virtualenv_no_global() -> bool:
- """Returns a boolean, whether running in venv with no system site-packages."""
- # PEP 405 compliance needs to be checked first since virtualenv >=20 would
- # return True for both checks, but is only able to use the PEP 405 config.
- if _running_under_venv():
- return _no_global_under_venv()
-
- if _running_under_legacy_virtualenv():
- return _no_global_under_legacy_virtualenv()
-
- return False
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/utils/wheel.py b/venv/lib/python3.11/site-packages/pip/_internal/utils/wheel.py
deleted file mode 100644
index 3551f8f..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/utils/wheel.py
+++ /dev/null
@@ -1,134 +0,0 @@
-"""Support functions for working with wheel files.
-"""
-
-import logging
-from email.message import Message
-from email.parser import Parser
-from typing import Tuple
-from zipfile import BadZipFile, ZipFile
-
-from pip._vendor.packaging.utils import canonicalize_name
-
-from pip._internal.exceptions import UnsupportedWheel
-
-VERSION_COMPATIBLE = (1, 0)
-
-
-logger = logging.getLogger(__name__)
-
-
-def parse_wheel(wheel_zip: ZipFile, name: str) -> Tuple[str, Message]:
- """Extract information from the provided wheel, ensuring it meets basic
- standards.
-
- Returns the name of the .dist-info directory and the parsed WHEEL metadata.
- """
- try:
- info_dir = wheel_dist_info_dir(wheel_zip, name)
- metadata = wheel_metadata(wheel_zip, info_dir)
- version = wheel_version(metadata)
- except UnsupportedWheel as e:
- raise UnsupportedWheel(f"{name} has an invalid wheel, {str(e)}")
-
- check_compatibility(version, name)
-
- return info_dir, metadata
-
-
-def wheel_dist_info_dir(source: ZipFile, name: str) -> str:
- """Returns the name of the contained .dist-info directory.
-
- Raises AssertionError or UnsupportedWheel if not found, >1 found, or
- it doesn't match the provided name.
- """
- # Zip file path separators must be /
- subdirs = {p.split("/", 1)[0] for p in source.namelist()}
-
- info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
-
- if not info_dirs:
- raise UnsupportedWheel(".dist-info directory not found")
-
- if len(info_dirs) > 1:
- raise UnsupportedWheel(
- "multiple .dist-info directories found: {}".format(", ".join(info_dirs))
- )
-
- info_dir = info_dirs[0]
-
- info_dir_name = canonicalize_name(info_dir)
- canonical_name = canonicalize_name(name)
- if not info_dir_name.startswith(canonical_name):
- raise UnsupportedWheel(
- f".dist-info directory {info_dir!r} does not start with {canonical_name!r}"
- )
-
- return info_dir
-
-
-def read_wheel_metadata_file(source: ZipFile, path: str) -> bytes:
- try:
- return source.read(path)
- # BadZipFile for general corruption, KeyError for missing entry,
- # and RuntimeError for password-protected files
- except (BadZipFile, KeyError, RuntimeError) as e:
- raise UnsupportedWheel(f"could not read {path!r} file: {e!r}")
-
-
-def wheel_metadata(source: ZipFile, dist_info_dir: str) -> Message:
- """Return the WHEEL metadata of an extracted wheel, if possible.
- Otherwise, raise UnsupportedWheel.
- """
- path = f"{dist_info_dir}/WHEEL"
- # Zip file path separators must be /
- wheel_contents = read_wheel_metadata_file(source, path)
-
- try:
- wheel_text = wheel_contents.decode()
- except UnicodeDecodeError as e:
- raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
-
- # FeedParser (used by Parser) does not raise any exceptions. The returned
- # message may have .defects populated, but for backwards-compatibility we
- # currently ignore them.
- return Parser().parsestr(wheel_text)
-
-
-def wheel_version(wheel_data: Message) -> Tuple[int, ...]:
- """Given WHEEL metadata, return the parsed Wheel-Version.
- Otherwise, raise UnsupportedWheel.
- """
- version_text = wheel_data["Wheel-Version"]
- if version_text is None:
- raise UnsupportedWheel("WHEEL is missing Wheel-Version")
-
- version = version_text.strip()
-
- try:
- return tuple(map(int, version.split(".")))
- except ValueError:
- raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}")
-
-
-def check_compatibility(version: Tuple[int, ...], name: str) -> None:
- """Raises errors or warns if called with an incompatible Wheel-Version.
-
- pip should refuse to install a Wheel-Version that's a major series
- ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
- installing a version only minor version ahead (e.g 1.2 > 1.1).
-
- version: a 2-tuple representing a Wheel-Version (Major, Minor)
- name: name of wheel or package to raise exception about
-
- :raises UnsupportedWheel: when an incompatible Wheel-Version is given
- """
- if version[0] > VERSION_COMPATIBLE[0]:
- raise UnsupportedWheel(
- "{}'s Wheel-Version ({}) is not compatible with this version "
- "of pip".format(name, ".".join(map(str, version)))
- )
- elif version > VERSION_COMPATIBLE:
- logger.warning(
- "Installing from a newer Wheel-Version (%s)",
- ".".join(map(str, version)),
- )
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__init__.py b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__init__.py
deleted file mode 100644
index b6beddb..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-# Expose a limited set of classes and functions so callers outside of
-# the vcs package don't need to import deeper than `pip._internal.vcs`.
-# (The test directory may still need to import from a vcs sub-package.)
-# Import all vcs modules to register each VCS in the VcsSupport object.
-import pip._internal.vcs.bazaar
-import pip._internal.vcs.git
-import pip._internal.vcs.mercurial
-import pip._internal.vcs.subversion # noqa: F401
-from pip._internal.vcs.versioncontrol import ( # noqa: F401
- RemoteNotFoundError,
- RemoteNotValidError,
- is_url,
- make_vcs_requirement_url,
- vcs,
-)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 058060c..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc
deleted file mode 100644
index 874b93a..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc
deleted file mode 100644
index 6c092b0..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/git.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc
deleted file mode 100644
index 4aa50d2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc
deleted file mode 100644
index 8f4e3f2..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc b/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc
deleted file mode 100644
index 0f06f47..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/bazaar.py b/venv/lib/python3.11/site-packages/pip/_internal/vcs/bazaar.py
deleted file mode 100644
index 20a17ed..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/bazaar.py
+++ /dev/null
@@ -1,112 +0,0 @@
-import logging
-from typing import List, Optional, Tuple
-
-from pip._internal.utils.misc import HiddenText, display_path
-from pip._internal.utils.subprocess import make_command
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs.versioncontrol import (
- AuthInfo,
- RemoteNotFoundError,
- RevOptions,
- VersionControl,
- vcs,
-)
-
-logger = logging.getLogger(__name__)
-
-
-class Bazaar(VersionControl):
- name = "bzr"
- dirname = ".bzr"
- repo_name = "branch"
- schemes = (
- "bzr+http",
- "bzr+https",
- "bzr+ssh",
- "bzr+sftp",
- "bzr+ftp",
- "bzr+lp",
- "bzr+file",
- )
-
- @staticmethod
- def get_base_rev_args(rev: str) -> List[str]:
- return ["-r", rev]
-
- def fetch_new(
- self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
- ) -> None:
- rev_display = rev_options.to_display()
- logger.info(
- "Checking out %s%s to %s",
- url,
- rev_display,
- display_path(dest),
- )
- if verbosity <= 0:
- flag = "--quiet"
- elif verbosity == 1:
- flag = ""
- else:
- flag = f"-{'v'*verbosity}"
- cmd_args = make_command(
- "checkout", "--lightweight", flag, rev_options.to_args(), url, dest
- )
- self.run_command(cmd_args)
-
- def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- self.run_command(make_command("switch", url), cwd=dest)
-
- def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- output = self.run_command(
- make_command("info"), show_stdout=False, stdout_only=True, cwd=dest
- )
- if output.startswith("Standalone "):
- # Older versions of pip used to create standalone branches.
- # Convert the standalone branch to a checkout by calling "bzr bind".
- cmd_args = make_command("bind", "-q", url)
- self.run_command(cmd_args, cwd=dest)
-
- cmd_args = make_command("update", "-q", rev_options.to_args())
- self.run_command(cmd_args, cwd=dest)
-
- @classmethod
- def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
- # hotfix the URL scheme after removing bzr+ from bzr+ssh:// re-add it
- url, rev, user_pass = super().get_url_rev_and_auth(url)
- if url.startswith("ssh://"):
- url = "bzr+" + url
- return url, rev, user_pass
-
- @classmethod
- def get_remote_url(cls, location: str) -> str:
- urls = cls.run_command(
- ["info"], show_stdout=False, stdout_only=True, cwd=location
- )
- for line in urls.splitlines():
- line = line.strip()
- for x in ("checkout of branch: ", "parent branch: "):
- if line.startswith(x):
- repo = line.split(x)[1]
- if cls._is_local_repository(repo):
- return path_to_url(repo)
- return repo
- raise RemoteNotFoundError
-
- @classmethod
- def get_revision(cls, location: str) -> str:
- revision = cls.run_command(
- ["revno"],
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- )
- return revision.splitlines()[-1]
-
- @classmethod
- def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
- """Always assume the versions don't match"""
- return False
-
-
-vcs.register(Bazaar)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/git.py b/venv/lib/python3.11/site-packages/pip/_internal/vcs/git.py
deleted file mode 100644
index 8c242cf..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/git.py
+++ /dev/null
@@ -1,526 +0,0 @@
-import logging
-import os.path
-import pathlib
-import re
-import urllib.parse
-import urllib.request
-from typing import List, Optional, Tuple
-
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.utils.misc import HiddenText, display_path, hide_url
-from pip._internal.utils.subprocess import make_command
-from pip._internal.vcs.versioncontrol import (
- AuthInfo,
- RemoteNotFoundError,
- RemoteNotValidError,
- RevOptions,
- VersionControl,
- find_path_to_project_root_from_repo_root,
- vcs,
-)
-
-urlsplit = urllib.parse.urlsplit
-urlunsplit = urllib.parse.urlunsplit
-
-
-logger = logging.getLogger(__name__)
-
-
-GIT_VERSION_REGEX = re.compile(
- r"^git version " # Prefix.
- r"(\d+)" # Major.
- r"\.(\d+)" # Dot, minor.
- r"(?:\.(\d+))?" # Optional dot, patch.
- r".*$" # Suffix, including any pre- and post-release segments we don't care about.
-)
-
-HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$")
-
-# SCP (Secure copy protocol) shorthand. e.g. 'git@example.com:foo/bar.git'
-SCP_REGEX = re.compile(
- r"""^
- # Optional user, e.g. 'git@'
- (\w+@)?
- # Server, e.g. 'github.com'.
- ([^/:]+):
- # The server-side path. e.g. 'user/project.git'. Must start with an
- # alphanumeric character so as not to be confusable with a Windows paths
- # like 'C:/foo/bar' or 'C:\foo\bar'.
- (\w[^:]*)
- $""",
- re.VERBOSE,
-)
-
-
-def looks_like_hash(sha: str) -> bool:
- return bool(HASH_REGEX.match(sha))
-
-
-class Git(VersionControl):
- name = "git"
- dirname = ".git"
- repo_name = "clone"
- schemes = (
- "git+http",
- "git+https",
- "git+ssh",
- "git+git",
- "git+file",
- )
- # Prevent the user's environment variables from interfering with pip:
- # https://github.com/pypa/pip/issues/1130
- unset_environ = ("GIT_DIR", "GIT_WORK_TREE")
- default_arg_rev = "HEAD"
-
- @staticmethod
- def get_base_rev_args(rev: str) -> List[str]:
- return [rev]
-
- def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
- _, rev_options = self.get_url_rev_options(hide_url(url))
- if not rev_options.rev:
- return False
- if not self.is_commit_id_equal(dest, rev_options.rev):
- # the current commit is different from rev,
- # which means rev was something else than a commit hash
- return False
- # return False in the rare case rev is both a commit hash
- # and a tag or a branch; we don't want to cache in that case
- # because that branch/tag could point to something else in the future
- is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0])
- return not is_tag_or_branch
-
- def get_git_version(self) -> Tuple[int, ...]:
- version = self.run_command(
- ["version"],
- command_desc="git version",
- show_stdout=False,
- stdout_only=True,
- )
- match = GIT_VERSION_REGEX.match(version)
- if not match:
- logger.warning("Can't parse git version: %s", version)
- return ()
- return (int(match.group(1)), int(match.group(2)))
-
- @classmethod
- def get_current_branch(cls, location: str) -> Optional[str]:
- """
- Return the current branch, or None if HEAD isn't at a branch
- (e.g. detached HEAD).
- """
- # git-symbolic-ref exits with empty stdout if "HEAD" is a detached
- # HEAD rather than a symbolic ref. In addition, the -q causes the
- # command to exit with status code 1 instead of 128 in this case
- # and to suppress the message to stderr.
- args = ["symbolic-ref", "-q", "HEAD"]
- output = cls.run_command(
- args,
- extra_ok_returncodes=(1,),
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- )
- ref = output.strip()
-
- if ref.startswith("refs/heads/"):
- return ref[len("refs/heads/") :]
-
- return None
-
- @classmethod
- def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]:
- """
- Return (sha_or_none, is_branch), where sha_or_none is a commit hash
- if the revision names a remote branch or tag, otherwise None.
-
- Args:
- dest: the repository directory.
- rev: the revision name.
- """
- # Pass rev to pre-filter the list.
- output = cls.run_command(
- ["show-ref", rev],
- cwd=dest,
- show_stdout=False,
- stdout_only=True,
- on_returncode="ignore",
- )
- refs = {}
- # NOTE: We do not use splitlines here since that would split on other
- # unicode separators, which can be maliciously used to install a
- # different revision.
- for line in output.strip().split("\n"):
- line = line.rstrip("\r")
- if not line:
- continue
- try:
- ref_sha, ref_name = line.split(" ", maxsplit=2)
- except ValueError:
- # Include the offending line to simplify troubleshooting if
- # this error ever occurs.
- raise ValueError(f"unexpected show-ref line: {line!r}")
-
- refs[ref_name] = ref_sha
-
- branch_ref = f"refs/remotes/origin/{rev}"
- tag_ref = f"refs/tags/{rev}"
-
- sha = refs.get(branch_ref)
- if sha is not None:
- return (sha, True)
-
- sha = refs.get(tag_ref)
-
- return (sha, False)
-
- @classmethod
- def _should_fetch(cls, dest: str, rev: str) -> bool:
- """
- Return true if rev is a ref or is a commit that we don't have locally.
-
- Branches and tags are not considered in this method because they are
- assumed to be always available locally (which is a normal outcome of
- ``git clone`` and ``git fetch --tags``).
- """
- if rev.startswith("refs/"):
- # Always fetch remote refs.
- return True
-
- if not looks_like_hash(rev):
- # Git fetch would fail with abbreviated commits.
- return False
-
- if cls.has_commit(dest, rev):
- # Don't fetch if we have the commit locally.
- return False
-
- return True
-
- @classmethod
- def resolve_revision(
- cls, dest: str, url: HiddenText, rev_options: RevOptions
- ) -> RevOptions:
- """
- Resolve a revision to a new RevOptions object with the SHA1 of the
- branch, tag, or ref if found.
-
- Args:
- rev_options: a RevOptions object.
- """
- rev = rev_options.arg_rev
- # The arg_rev property's implementation for Git ensures that the
- # rev return value is always non-None.
- assert rev is not None
-
- sha, is_branch = cls.get_revision_sha(dest, rev)
-
- if sha is not None:
- rev_options = rev_options.make_new(sha)
- rev_options.branch_name = rev if is_branch else None
-
- return rev_options
-
- # Do not show a warning for the common case of something that has
- # the form of a Git commit hash.
- if not looks_like_hash(rev):
- logger.warning(
- "Did not find branch or tag '%s', assuming revision or ref.",
- rev,
- )
-
- if not cls._should_fetch(dest, rev):
- return rev_options
-
- # fetch the requested revision
- cls.run_command(
- make_command("fetch", "-q", url, rev_options.to_args()),
- cwd=dest,
- )
- # Change the revision to the SHA of the ref we fetched
- sha = cls.get_revision(dest, rev="FETCH_HEAD")
- rev_options = rev_options.make_new(sha)
-
- return rev_options
-
- @classmethod
- def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
- """
- Return whether the current commit hash equals the given name.
-
- Args:
- dest: the repository directory.
- name: a string name.
- """
- if not name:
- # Then avoid an unnecessary subprocess call.
- return False
-
- return cls.get_revision(dest) == name
-
- def fetch_new(
- self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
- ) -> None:
- rev_display = rev_options.to_display()
- logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest))
- if verbosity <= 0:
- flags: Tuple[str, ...] = ("--quiet",)
- elif verbosity == 1:
- flags = ()
- else:
- flags = ("--verbose", "--progress")
- if self.get_git_version() >= (2, 17):
- # Git added support for partial clone in 2.17
- # https://git-scm.com/docs/partial-clone
- # Speeds up cloning by functioning without a complete copy of repository
- self.run_command(
- make_command(
- "clone",
- "--filter=blob:none",
- *flags,
- url,
- dest,
- )
- )
- else:
- self.run_command(make_command("clone", *flags, url, dest))
-
- if rev_options.rev:
- # Then a specific revision was requested.
- rev_options = self.resolve_revision(dest, url, rev_options)
- branch_name = getattr(rev_options, "branch_name", None)
- logger.debug("Rev options %s, branch_name %s", rev_options, branch_name)
- if branch_name is None:
- # Only do a checkout if the current commit id doesn't match
- # the requested revision.
- if not self.is_commit_id_equal(dest, rev_options.rev):
- cmd_args = make_command(
- "checkout",
- "-q",
- rev_options.to_args(),
- )
- self.run_command(cmd_args, cwd=dest)
- elif self.get_current_branch(dest) != branch_name:
- # Then a specific branch was requested, and that branch
- # is not yet checked out.
- track_branch = f"origin/{branch_name}"
- cmd_args = [
- "checkout",
- "-b",
- branch_name,
- "--track",
- track_branch,
- ]
- self.run_command(cmd_args, cwd=dest)
- else:
- sha = self.get_revision(dest)
- rev_options = rev_options.make_new(sha)
-
- logger.info("Resolved %s to commit %s", url, rev_options.rev)
-
- #: repo may contain submodules
- self.update_submodules(dest)
-
- def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- self.run_command(
- make_command("config", "remote.origin.url", url),
- cwd=dest,
- )
- cmd_args = make_command("checkout", "-q", rev_options.to_args())
- self.run_command(cmd_args, cwd=dest)
-
- self.update_submodules(dest)
-
- def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- # First fetch changes from the default remote
- if self.get_git_version() >= (1, 9):
- # fetch tags in addition to everything else
- self.run_command(["fetch", "-q", "--tags"], cwd=dest)
- else:
- self.run_command(["fetch", "-q"], cwd=dest)
- # Then reset to wanted revision (maybe even origin/master)
- rev_options = self.resolve_revision(dest, url, rev_options)
- cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args())
- self.run_command(cmd_args, cwd=dest)
- #: update submodules
- self.update_submodules(dest)
-
- @classmethod
- def get_remote_url(cls, location: str) -> str:
- """
- Return URL of the first remote encountered.
-
- Raises RemoteNotFoundError if the repository does not have a remote
- url configured.
- """
- # We need to pass 1 for extra_ok_returncodes since the command
- # exits with return code 1 if there are no matching lines.
- stdout = cls.run_command(
- ["config", "--get-regexp", r"remote\..*\.url"],
- extra_ok_returncodes=(1,),
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- )
- remotes = stdout.splitlines()
- try:
- found_remote = remotes[0]
- except IndexError:
- raise RemoteNotFoundError
-
- for remote in remotes:
- if remote.startswith("remote.origin.url "):
- found_remote = remote
- break
- url = found_remote.split(" ")[1]
- return cls._git_remote_to_pip_url(url.strip())
-
- @staticmethod
- def _git_remote_to_pip_url(url: str) -> str:
- """
- Convert a remote url from what git uses to what pip accepts.
-
- There are 3 legal forms **url** may take:
-
- 1. A fully qualified url: ssh://git@example.com/foo/bar.git
- 2. A local project.git folder: /path/to/bare/repository.git
- 3. SCP shorthand for form 1: git@example.com:foo/bar.git
-
- Form 1 is output as-is. Form 2 must be converted to URI and form 3 must
- be converted to form 1.
-
- See the corresponding test test_git_remote_url_to_pip() for examples of
- sample inputs/outputs.
- """
- if re.match(r"\w+://", url):
- # This is already valid. Pass it though as-is.
- return url
- if os.path.exists(url):
- # A local bare remote (git clone --mirror).
- # Needs a file:// prefix.
- return pathlib.PurePath(url).as_uri()
- scp_match = SCP_REGEX.match(url)
- if scp_match:
- # Add an ssh:// prefix and replace the ':' with a '/'.
- return scp_match.expand(r"ssh://\1\2/\3")
- # Otherwise, bail out.
- raise RemoteNotValidError(url)
-
- @classmethod
- def has_commit(cls, location: str, rev: str) -> bool:
- """
- Check if rev is a commit that is available in the local repository.
- """
- try:
- cls.run_command(
- ["rev-parse", "-q", "--verify", "sha^" + rev],
- cwd=location,
- log_failed_cmd=False,
- )
- except InstallationError:
- return False
- else:
- return True
-
- @classmethod
- def get_revision(cls, location: str, rev: Optional[str] = None) -> str:
- if rev is None:
- rev = "HEAD"
- current_rev = cls.run_command(
- ["rev-parse", rev],
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- )
- return current_rev.strip()
-
- @classmethod
- def get_subdirectory(cls, location: str) -> Optional[str]:
- """
- Return the path to Python project root, relative to the repo root.
- Return None if the project root is in the repo root.
- """
- # find the repo root
- git_dir = cls.run_command(
- ["rev-parse", "--git-dir"],
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- ).strip()
- if not os.path.isabs(git_dir):
- git_dir = os.path.join(location, git_dir)
- repo_root = os.path.abspath(os.path.join(git_dir, ".."))
- return find_path_to_project_root_from_repo_root(location, repo_root)
-
- @classmethod
- def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
- """
- Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
- That's required because although they use SSH they sometimes don't
- work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
- parsing. Hence we remove it again afterwards and return it as a stub.
- """
- # Works around an apparent Git bug
- # (see https://article.gmane.org/gmane.comp.version-control.git/146500)
- scheme, netloc, path, query, fragment = urlsplit(url)
- if scheme.endswith("file"):
- initial_slashes = path[: -len(path.lstrip("/"))]
- newpath = initial_slashes + urllib.request.url2pathname(path).replace(
- "\\", "/"
- ).lstrip("/")
- after_plus = scheme.find("+") + 1
- url = scheme[:after_plus] + urlunsplit(
- (scheme[after_plus:], netloc, newpath, query, fragment),
- )
-
- if "://" not in url:
- assert "file:" not in url
- url = url.replace("git+", "git+ssh://")
- url, rev, user_pass = super().get_url_rev_and_auth(url)
- url = url.replace("ssh://", "")
- else:
- url, rev, user_pass = super().get_url_rev_and_auth(url)
-
- return url, rev, user_pass
-
- @classmethod
- def update_submodules(cls, location: str) -> None:
- if not os.path.exists(os.path.join(location, ".gitmodules")):
- return
- cls.run_command(
- ["submodule", "update", "--init", "--recursive", "-q"],
- cwd=location,
- )
-
- @classmethod
- def get_repository_root(cls, location: str) -> Optional[str]:
- loc = super().get_repository_root(location)
- if loc:
- return loc
- try:
- r = cls.run_command(
- ["rev-parse", "--show-toplevel"],
- cwd=location,
- show_stdout=False,
- stdout_only=True,
- on_returncode="raise",
- log_failed_cmd=False,
- )
- except BadCommand:
- logger.debug(
- "could not determine if %s is under git control "
- "because git is not available",
- location,
- )
- return None
- except InstallationError:
- return None
- return os.path.normpath(r.rstrip("\r\n"))
-
- @staticmethod
- def should_add_vcs_url_prefix(repo_url: str) -> bool:
- """In either https or ssh form, requirements must be prefixed with git+."""
- return True
-
-
-vcs.register(Git)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/mercurial.py b/venv/lib/python3.11/site-packages/pip/_internal/vcs/mercurial.py
deleted file mode 100644
index c183d41..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/mercurial.py
+++ /dev/null
@@ -1,163 +0,0 @@
-import configparser
-import logging
-import os
-from typing import List, Optional, Tuple
-
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.utils.misc import HiddenText, display_path
-from pip._internal.utils.subprocess import make_command
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs.versioncontrol import (
- RevOptions,
- VersionControl,
- find_path_to_project_root_from_repo_root,
- vcs,
-)
-
-logger = logging.getLogger(__name__)
-
-
-class Mercurial(VersionControl):
- name = "hg"
- dirname = ".hg"
- repo_name = "clone"
- schemes = (
- "hg+file",
- "hg+http",
- "hg+https",
- "hg+ssh",
- "hg+static-http",
- )
-
- @staticmethod
- def get_base_rev_args(rev: str) -> List[str]:
- return [f"--rev={rev}"]
-
- def fetch_new(
- self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
- ) -> None:
- rev_display = rev_options.to_display()
- logger.info(
- "Cloning hg %s%s to %s",
- url,
- rev_display,
- display_path(dest),
- )
- if verbosity <= 0:
- flags: Tuple[str, ...] = ("--quiet",)
- elif verbosity == 1:
- flags = ()
- elif verbosity == 2:
- flags = ("--verbose",)
- else:
- flags = ("--verbose", "--debug")
- self.run_command(make_command("clone", "--noupdate", *flags, url, dest))
- self.run_command(
- make_command("update", *flags, rev_options.to_args()),
- cwd=dest,
- )
-
- def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- repo_config = os.path.join(dest, self.dirname, "hgrc")
- config = configparser.RawConfigParser()
- try:
- config.read(repo_config)
- config.set("paths", "default", url.secret)
- with open(repo_config, "w") as config_file:
- config.write(config_file)
- except (OSError, configparser.NoSectionError) as exc:
- logger.warning("Could not switch Mercurial repository to %s: %s", url, exc)
- else:
- cmd_args = make_command("update", "-q", rev_options.to_args())
- self.run_command(cmd_args, cwd=dest)
-
- def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- self.run_command(["pull", "-q"], cwd=dest)
- cmd_args = make_command("update", "-q", rev_options.to_args())
- self.run_command(cmd_args, cwd=dest)
-
- @classmethod
- def get_remote_url(cls, location: str) -> str:
- url = cls.run_command(
- ["showconfig", "paths.default"],
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- ).strip()
- if cls._is_local_repository(url):
- url = path_to_url(url)
- return url.strip()
-
- @classmethod
- def get_revision(cls, location: str) -> str:
- """
- Return the repository-local changeset revision number, as an integer.
- """
- current_revision = cls.run_command(
- ["parents", "--template={rev}"],
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- ).strip()
- return current_revision
-
- @classmethod
- def get_requirement_revision(cls, location: str) -> str:
- """
- Return the changeset identification hash, as a 40-character
- hexadecimal string
- """
- current_rev_hash = cls.run_command(
- ["parents", "--template={node}"],
- show_stdout=False,
- stdout_only=True,
- cwd=location,
- ).strip()
- return current_rev_hash
-
- @classmethod
- def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
- """Always assume the versions don't match"""
- return False
-
- @classmethod
- def get_subdirectory(cls, location: str) -> Optional[str]:
- """
- Return the path to Python project root, relative to the repo root.
- Return None if the project root is in the repo root.
- """
- # find the repo root
- repo_root = cls.run_command(
- ["root"], show_stdout=False, stdout_only=True, cwd=location
- ).strip()
- if not os.path.isabs(repo_root):
- repo_root = os.path.abspath(os.path.join(location, repo_root))
- return find_path_to_project_root_from_repo_root(location, repo_root)
-
- @classmethod
- def get_repository_root(cls, location: str) -> Optional[str]:
- loc = super().get_repository_root(location)
- if loc:
- return loc
- try:
- r = cls.run_command(
- ["root"],
- cwd=location,
- show_stdout=False,
- stdout_only=True,
- on_returncode="raise",
- log_failed_cmd=False,
- )
- except BadCommand:
- logger.debug(
- "could not determine if %s is under hg control "
- "because hg is not available",
- location,
- )
- return None
- except InstallationError:
- return None
- return os.path.normpath(r.rstrip("\r\n"))
-
-
-vcs.register(Mercurial)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/subversion.py b/venv/lib/python3.11/site-packages/pip/_internal/vcs/subversion.py
deleted file mode 100644
index 16d93a6..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/subversion.py
+++ /dev/null
@@ -1,324 +0,0 @@
-import logging
-import os
-import re
-from typing import List, Optional, Tuple
-
-from pip._internal.utils.misc import (
- HiddenText,
- display_path,
- is_console_interactive,
- is_installable_dir,
- split_auth_from_netloc,
-)
-from pip._internal.utils.subprocess import CommandArgs, make_command
-from pip._internal.vcs.versioncontrol import (
- AuthInfo,
- RemoteNotFoundError,
- RevOptions,
- VersionControl,
- vcs,
-)
-
-logger = logging.getLogger(__name__)
-
-_svn_xml_url_re = re.compile('url="([^"]+)"')
-_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
-_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
-_svn_info_xml_url_re = re.compile(r"<url>(.*)</url>")
-
-
-class Subversion(VersionControl):
- name = "svn"
- dirname = ".svn"
- repo_name = "checkout"
- schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file")
-
- @classmethod
- def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
- return True
-
- @staticmethod
- def get_base_rev_args(rev: str) -> List[str]:
- return ["-r", rev]
-
- @classmethod
- def get_revision(cls, location: str) -> str:
- """
- Return the maximum revision for all files under a given location
- """
- # Note: taken from setuptools.command.egg_info
- revision = 0
-
- for base, dirs, _ in os.walk(location):
- if cls.dirname not in dirs:
- dirs[:] = []
- continue # no sense walking uncontrolled subdirs
- dirs.remove(cls.dirname)
- entries_fn = os.path.join(base, cls.dirname, "entries")
- if not os.path.exists(entries_fn):
- # FIXME: should we warn?
- continue
-
- dirurl, localrev = cls._get_svn_url_rev(base)
-
- if base == location:
- assert dirurl is not None
- base = dirurl + "/" # save the root url
- elif not dirurl or not dirurl.startswith(base):
- dirs[:] = []
- continue # not part of the same svn tree, skip it
- revision = max(revision, localrev)
- return str(revision)
-
- @classmethod
- def get_netloc_and_auth(
- cls, netloc: str, scheme: str
- ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
- """
- This override allows the auth information to be passed to svn via the
- --username and --password options instead of via the URL.
- """
- if scheme == "ssh":
- # The --username and --password options can't be used for
- # svn+ssh URLs, so keep the auth information in the URL.
- return super().get_netloc_and_auth(netloc, scheme)
-
- return split_auth_from_netloc(netloc)
-
- @classmethod
- def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
- # hotfix the URL scheme after removing svn+ from svn+ssh:// re-add it
- url, rev, user_pass = super().get_url_rev_and_auth(url)
- if url.startswith("ssh://"):
- url = "svn+" + url
- return url, rev, user_pass
-
- @staticmethod
- def make_rev_args(
- username: Optional[str], password: Optional[HiddenText]
- ) -> CommandArgs:
- extra_args: CommandArgs = []
- if username:
- extra_args += ["--username", username]
- if password:
- extra_args += ["--password", password]
-
- return extra_args
-
- @classmethod
- def get_remote_url(cls, location: str) -> str:
- # In cases where the source is in a subdirectory, we have to look up in
- # the location until we find a valid project root.
- orig_location = location
- while not is_installable_dir(location):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without
- # finding a Python project.
- logger.warning(
- "Could not find Python project for directory %s (tried all "
- "parent directories)",
- orig_location,
- )
- raise RemoteNotFoundError
-
- url, _rev = cls._get_svn_url_rev(location)
- if url is None:
- raise RemoteNotFoundError
-
- return url
-
- @classmethod
- def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]:
- from pip._internal.exceptions import InstallationError
-
- entries_path = os.path.join(location, cls.dirname, "entries")
- if os.path.exists(entries_path):
- with open(entries_path) as f:
- data = f.read()
- else: # subversion >= 1.7 does not have the 'entries' file
- data = ""
-
- url = None
- if data.startswith("8") or data.startswith("9") or data.startswith("10"):
- entries = list(map(str.splitlines, data.split("\n\x0c\n")))
- del entries[0][0] # get rid of the '8'
- url = entries[0][3]
- revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0]
- elif data.startswith("<?xml"):
- match = _svn_xml_url_re.search(data)
- if not match:
- raise ValueError(f"Badly formatted data: {data!r}")
- url = match.group(1) # get repository URL
- revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
- else:
- try:
- # subversion >= 1.7
- # Note that using get_remote_call_options is not necessary here
- # because `svn info` is being run against a local directory.
- # We don't need to worry about making sure interactive mode
- # is being used to prompt for passwords, because passwords
- # are only potentially needed for remote server requests.
- xml = cls.run_command(
- ["info", "--xml", location],
- show_stdout=False,
- stdout_only=True,
- )
- match = _svn_info_xml_url_re.search(xml)
- assert match is not None
- url = match.group(1)
- revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
- except InstallationError:
- url, revs = None, []
-
- if revs:
- rev = max(revs)
- else:
- rev = 0
-
- return url, rev
-
- @classmethod
- def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
- """Always assume the versions don't match"""
- return False
-
- def __init__(self, use_interactive: Optional[bool] = None) -> None:
- if use_interactive is None:
- use_interactive = is_console_interactive()
- self.use_interactive = use_interactive
-
- # This member is used to cache the fetched version of the current
- # ``svn`` client.
- # Special value definitions:
- # None: Not evaluated yet.
- # Empty tuple: Could not parse version.
- self._vcs_version: Optional[Tuple[int, ...]] = None
-
- super().__init__()
-
- def call_vcs_version(self) -> Tuple[int, ...]:
- """Query the version of the currently installed Subversion client.
-
- :return: A tuple containing the parts of the version information or
- ``()`` if the version returned from ``svn`` could not be parsed.
- :raises: BadCommand: If ``svn`` is not installed.
- """
- # Example versions:
- # svn, version 1.10.3 (r1842928)
- # compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
- # svn, version 1.7.14 (r1542130)
- # compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
- # svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0)
- # compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2
- version_prefix = "svn, version "
- version = self.run_command(["--version"], show_stdout=False, stdout_only=True)
- if not version.startswith(version_prefix):
- return ()
-
- version = version[len(version_prefix) :].split()[0]
- version_list = version.partition("-")[0].split(".")
- try:
- parsed_version = tuple(map(int, version_list))
- except ValueError:
- return ()
-
- return parsed_version
-
- def get_vcs_version(self) -> Tuple[int, ...]:
- """Return the version of the currently installed Subversion client.
-
- If the version of the Subversion client has already been queried,
- a cached value will be used.
-
- :return: A tuple containing the parts of the version information or
- ``()`` if the version returned from ``svn`` could not be parsed.
- :raises: BadCommand: If ``svn`` is not installed.
- """
- if self._vcs_version is not None:
- # Use cached version, if available.
- # If parsing the version failed previously (empty tuple),
- # do not attempt to parse it again.
- return self._vcs_version
-
- vcs_version = self.call_vcs_version()
- self._vcs_version = vcs_version
- return vcs_version
-
- def get_remote_call_options(self) -> CommandArgs:
- """Return options to be used on calls to Subversion that contact the server.
-
- These options are applicable for the following ``svn`` subcommands used
- in this class.
-
- - checkout
- - switch
- - update
-
- :return: A list of command line arguments to pass to ``svn``.
- """
- if not self.use_interactive:
- # --non-interactive switch is available since Subversion 0.14.4.
- # Subversion < 1.8 runs in interactive mode by default.
- return ["--non-interactive"]
-
- svn_version = self.get_vcs_version()
- # By default, Subversion >= 1.8 runs in non-interactive mode if
- # stdin is not a TTY. Since that is how pip invokes SVN, in
- # call_subprocess(), pip must pass --force-interactive to ensure
- # the user can be prompted for a password, if required.
- # SVN added the --force-interactive option in SVN 1.8. Since
- # e.g. RHEL/CentOS 7, which is supported until 2024, ships with
- # SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
- # can't safely add the option if the SVN version is < 1.8 (or unknown).
- if svn_version >= (1, 8):
- return ["--force-interactive"]
-
- return []
-
- def fetch_new(
- self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
- ) -> None:
- rev_display = rev_options.to_display()
- logger.info(
- "Checking out %s%s to %s",
- url,
- rev_display,
- display_path(dest),
- )
- if verbosity <= 0:
- flag = "--quiet"
- else:
- flag = ""
- cmd_args = make_command(
- "checkout",
- flag,
- self.get_remote_call_options(),
- rev_options.to_args(),
- url,
- dest,
- )
- self.run_command(cmd_args)
-
- def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- cmd_args = make_command(
- "switch",
- self.get_remote_call_options(),
- rev_options.to_args(),
- url,
- dest,
- )
- self.run_command(cmd_args)
-
- def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- cmd_args = make_command(
- "update",
- self.get_remote_call_options(),
- rev_options.to_args(),
- dest,
- )
- self.run_command(cmd_args)
-
-
-vcs.register(Subversion)
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/vcs/versioncontrol.py b/venv/lib/python3.11/site-packages/pip/_internal/vcs/versioncontrol.py
deleted file mode 100644
index 46ca279..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/vcs/versioncontrol.py
+++ /dev/null
@@ -1,705 +0,0 @@
-"""Handles all VCS (version control) support"""
-
-import logging
-import os
-import shutil
-import sys
-import urllib.parse
-from typing import (
- TYPE_CHECKING,
- Any,
- Dict,
- Iterable,
- Iterator,
- List,
- Mapping,
- Optional,
- Tuple,
- Type,
- Union,
-)
-
-from pip._internal.cli.spinners import SpinnerInterface
-from pip._internal.exceptions import BadCommand, InstallationError
-from pip._internal.utils.misc import (
- HiddenText,
- ask_path_exists,
- backup_dir,
- display_path,
- hide_url,
- hide_value,
- is_installable_dir,
- rmtree,
-)
-from pip._internal.utils.subprocess import (
- CommandArgs,
- call_subprocess,
- format_command_args,
- make_command,
-)
-from pip._internal.utils.urls import get_url_scheme
-
-if TYPE_CHECKING:
- # Literal was introduced in Python 3.8.
- #
- # TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
- from typing import Literal
-
-
-__all__ = ["vcs"]
-
-
-logger = logging.getLogger(__name__)
-
-AuthInfo = Tuple[Optional[str], Optional[str]]
-
-
-def is_url(name: str) -> bool:
- """
- Return true if the name looks like a URL.
- """
- scheme = get_url_scheme(name)
- if scheme is None:
- return False
- return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes
-
-
-def make_vcs_requirement_url(
- repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None
-) -> str:
- """
- Return the URL for a VCS requirement.
-
- Args:
- repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
- project_name: the (unescaped) project name.
- """
- egg_project_name = project_name.replace("-", "_")
- req = f"{repo_url}@{rev}#egg={egg_project_name}"
- if subdir:
- req += f"&subdirectory={subdir}"
-
- return req
-
-
-def find_path_to_project_root_from_repo_root(
- location: str, repo_root: str
-) -> Optional[str]:
- """
- Find the the Python project's root by searching up the filesystem from
- `location`. Return the path to project root relative to `repo_root`.
- Return None if the project root is `repo_root`, or cannot be found.
- """
- # find project root.
- orig_location = location
- while not is_installable_dir(location):
- last_location = location
- location = os.path.dirname(location)
- if location == last_location:
- # We've traversed up to the root of the filesystem without
- # finding a Python project.
- logger.warning(
- "Could not find a Python project for directory %s (tried all "
- "parent directories)",
- orig_location,
- )
- return None
-
- if os.path.samefile(repo_root, location):
- return None
-
- return os.path.relpath(location, repo_root)
-
-
-class RemoteNotFoundError(Exception):
- pass
-
-
-class RemoteNotValidError(Exception):
- def __init__(self, url: str):
- super().__init__(url)
- self.url = url
-
-
-class RevOptions:
-
- """
- Encapsulates a VCS-specific revision to install, along with any VCS
- install options.
-
- Instances of this class should be treated as if immutable.
- """
-
- def __init__(
- self,
- vc_class: Type["VersionControl"],
- rev: Optional[str] = None,
- extra_args: Optional[CommandArgs] = None,
- ) -> None:
- """
- Args:
- vc_class: a VersionControl subclass.
- rev: the name of the revision to install.
- extra_args: a list of extra options.
- """
- if extra_args is None:
- extra_args = []
-
- self.extra_args = extra_args
- self.rev = rev
- self.vc_class = vc_class
- self.branch_name: Optional[str] = None
-
- def __repr__(self) -> str:
- return f"<RevOptions {self.vc_class.name}: rev={self.rev!r}>"
-
- @property
- def arg_rev(self) -> Optional[str]:
- if self.rev is None:
- return self.vc_class.default_arg_rev
-
- return self.rev
-
- def to_args(self) -> CommandArgs:
- """
- Return the VCS-specific command arguments.
- """
- args: CommandArgs = []
- rev = self.arg_rev
- if rev is not None:
- args += self.vc_class.get_base_rev_args(rev)
- args += self.extra_args
-
- return args
-
- def to_display(self) -> str:
- if not self.rev:
- return ""
-
- return f" (to revision {self.rev})"
-
- def make_new(self, rev: str) -> "RevOptions":
- """
- Make a copy of the current instance, but with a new rev.
-
- Args:
- rev: the name of the revision for the new object.
- """
- return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
-
-
-class VcsSupport:
- _registry: Dict[str, "VersionControl"] = {}
- schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"]
-
- def __init__(self) -> None:
- # Register more schemes with urlparse for various version control
- # systems
- urllib.parse.uses_netloc.extend(self.schemes)
- super().__init__()
-
- def __iter__(self) -> Iterator[str]:
- return self._registry.__iter__()
-
- @property
- def backends(self) -> List["VersionControl"]:
- return list(self._registry.values())
-
- @property
- def dirnames(self) -> List[str]:
- return [backend.dirname for backend in self.backends]
-
- @property
- def all_schemes(self) -> List[str]:
- schemes: List[str] = []
- for backend in self.backends:
- schemes.extend(backend.schemes)
- return schemes
-
- def register(self, cls: Type["VersionControl"]) -> None:
- if not hasattr(cls, "name"):
- logger.warning("Cannot register VCS %s", cls.__name__)
- return
- if cls.name not in self._registry:
- self._registry[cls.name] = cls()
- logger.debug("Registered VCS backend: %s", cls.name)
-
- def unregister(self, name: str) -> None:
- if name in self._registry:
- del self._registry[name]
-
- def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]:
- """
- Return a VersionControl object if a repository of that type is found
- at the given directory.
- """
- vcs_backends = {}
- for vcs_backend in self._registry.values():
- repo_path = vcs_backend.get_repository_root(location)
- if not repo_path:
- continue
- logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name)
- vcs_backends[repo_path] = vcs_backend
-
- if not vcs_backends:
- return None
-
- # Choose the VCS in the inner-most directory. Since all repository
- # roots found here would be either `location` or one of its
- # parents, the longest path should have the most path components,
- # i.e. the backend representing the inner-most repository.
- inner_most_repo_path = max(vcs_backends, key=len)
- return vcs_backends[inner_most_repo_path]
-
- def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]:
- """
- Return a VersionControl object or None.
- """
- for vcs_backend in self._registry.values():
- if scheme in vcs_backend.schemes:
- return vcs_backend
- return None
-
- def get_backend(self, name: str) -> Optional["VersionControl"]:
- """
- Return a VersionControl object or None.
- """
- name = name.lower()
- return self._registry.get(name)
-
-
-vcs = VcsSupport()
-
-
-class VersionControl:
- name = ""
- dirname = ""
- repo_name = ""
- # List of supported schemes for this Version Control
- schemes: Tuple[str, ...] = ()
- # Iterable of environment variable names to pass to call_subprocess().
- unset_environ: Tuple[str, ...] = ()
- default_arg_rev: Optional[str] = None
-
- @classmethod
- def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
- """
- Return whether the vcs prefix (e.g. "git+") should be added to a
- repository's remote url when used in a requirement.
- """
- return not remote_url.lower().startswith(f"{cls.name}:")
-
- @classmethod
- def get_subdirectory(cls, location: str) -> Optional[str]:
- """
- Return the path to Python project root, relative to the repo root.
- Return None if the project root is in the repo root.
- """
- return None
-
- @classmethod
- def get_requirement_revision(cls, repo_dir: str) -> str:
- """
- Return the revision string that should be used in a requirement.
- """
- return cls.get_revision(repo_dir)
-
- @classmethod
- def get_src_requirement(cls, repo_dir: str, project_name: str) -> str:
- """
- Return the requirement string to use to redownload the files
- currently at the given repository directory.
-
- Args:
- project_name: the (unescaped) project name.
-
- The return value has a form similar to the following:
-
- {repository_url}@{revision}#egg={project_name}
- """
- repo_url = cls.get_remote_url(repo_dir)
-
- if cls.should_add_vcs_url_prefix(repo_url):
- repo_url = f"{cls.name}+{repo_url}"
-
- revision = cls.get_requirement_revision(repo_dir)
- subdir = cls.get_subdirectory(repo_dir)
- req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir)
-
- return req
-
- @staticmethod
- def get_base_rev_args(rev: str) -> List[str]:
- """
- Return the base revision arguments for a vcs command.
-
- Args:
- rev: the name of a revision to install. Cannot be None.
- """
- raise NotImplementedError
-
- def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
- """
- Return true if the commit hash checked out at dest matches
- the revision in url.
-
- Always return False, if the VCS does not support immutable commit
- hashes.
-
- This method does not check if there are local uncommitted changes
- in dest after checkout, as pip currently has no use case for that.
- """
- return False
-
- @classmethod
- def make_rev_options(
- cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None
- ) -> RevOptions:
- """
- Return a RevOptions object.
-
- Args:
- rev: the name of a revision to install.
- extra_args: a list of extra options.
- """
- return RevOptions(cls, rev, extra_args=extra_args)
-
- @classmethod
- def _is_local_repository(cls, repo: str) -> bool:
- """
- posix absolute paths start with os.path.sep,
- win32 ones start with drive (like c:\\folder)
- """
- drive, tail = os.path.splitdrive(repo)
- return repo.startswith(os.path.sep) or bool(drive)
-
- @classmethod
- def get_netloc_and_auth(
- cls, netloc: str, scheme: str
- ) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
- """
- Parse the repository URL's netloc, and return the new netloc to use
- along with auth information.
-
- Args:
- netloc: the original repository URL netloc.
- scheme: the repository URL's scheme without the vcs prefix.
-
- This is mainly for the Subversion class to override, so that auth
- information can be provided via the --username and --password options
- instead of through the URL. For other subclasses like Git without
- such an option, auth information must stay in the URL.
-
- Returns: (netloc, (username, password)).
- """
- return netloc, (None, None)
-
- @classmethod
- def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
- """
- Parse the repository URL to use, and return the URL, revision,
- and auth info to use.
-
- Returns: (url, rev, (username, password)).
- """
- scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
- if "+" not in scheme:
- raise ValueError(
- f"Sorry, {url!r} is a malformed VCS url. "
- "The format is <vcs>+<protocol>://<url>, "
- "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
- )
- # Remove the vcs prefix.
- scheme = scheme.split("+", 1)[1]
- netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
- rev = None
- if "@" in path:
- path, rev = path.rsplit("@", 1)
- if not rev:
- raise InstallationError(
- f"The URL {url!r} has an empty revision (after @) "
- "which is not supported. Include a revision after @ "
- "or remove @ from the URL."
- )
- url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
- return url, rev, user_pass
-
- @staticmethod
- def make_rev_args(
- username: Optional[str], password: Optional[HiddenText]
- ) -> CommandArgs:
- """
- Return the RevOptions "extra arguments" to use in obtain().
- """
- return []
-
- def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]:
- """
- Return the URL and RevOptions object to use in obtain(),
- as a tuple (url, rev_options).
- """
- secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
- username, secret_password = user_pass
- password: Optional[HiddenText] = None
- if secret_password is not None:
- password = hide_value(secret_password)
- extra_args = self.make_rev_args(username, password)
- rev_options = self.make_rev_options(rev, extra_args=extra_args)
-
- return hide_url(secret_url), rev_options
-
- @staticmethod
- def normalize_url(url: str) -> str:
- """
- Normalize a URL for comparison by unquoting it and removing any
- trailing slash.
- """
- return urllib.parse.unquote(url).rstrip("/")
-
- @classmethod
- def compare_urls(cls, url1: str, url2: str) -> bool:
- """
- Compare two repo URLs for identity, ignoring incidental differences.
- """
- return cls.normalize_url(url1) == cls.normalize_url(url2)
-
- def fetch_new(
- self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
- ) -> None:
- """
- Fetch a revision from a repository, in the case that this is the
- first fetch from the repository.
-
- Args:
- dest: the directory to fetch the repository to.
- rev_options: a RevOptions object.
- verbosity: verbosity level.
- """
- raise NotImplementedError
-
- def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- """
- Switch the repo at ``dest`` to point to ``URL``.
-
- Args:
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
- """
- Update an already-existing repo to the given ``rev_options``.
-
- Args:
- rev_options: a RevOptions object.
- """
- raise NotImplementedError
-
- @classmethod
- def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
- """
- Return whether the id of the current commit equals the given name.
-
- Args:
- dest: the repository directory.
- name: a string name.
- """
- raise NotImplementedError
-
- def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None:
- """
- Install or update in editable mode the package represented by this
- VersionControl object.
-
- :param dest: the repository directory in which to install or update.
- :param url: the repository URL starting with a vcs prefix.
- :param verbosity: verbosity level.
- """
- url, rev_options = self.get_url_rev_options(url)
-
- if not os.path.exists(dest):
- self.fetch_new(dest, url, rev_options, verbosity=verbosity)
- return
-
- rev_display = rev_options.to_display()
- if self.is_repository_directory(dest):
- existing_url = self.get_remote_url(dest)
- if self.compare_urls(existing_url, url.secret):
- logger.debug(
- "%s in %s exists, and has correct URL (%s)",
- self.repo_name.title(),
- display_path(dest),
- url,
- )
- if not self.is_commit_id_equal(dest, rev_options.rev):
- logger.info(
- "Updating %s %s%s",
- display_path(dest),
- self.repo_name,
- rev_display,
- )
- self.update(dest, url, rev_options)
- else:
- logger.info("Skipping because already up-to-date.")
- return
-
- logger.warning(
- "%s %s in %s exists with URL %s",
- self.name,
- self.repo_name,
- display_path(dest),
- existing_url,
- )
- prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))
- else:
- logger.warning(
- "Directory %s already exists, and is not a %s %s.",
- dest,
- self.name,
- self.repo_name,
- )
- # https://github.com/python/mypy/issues/1174
- prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore
-
- logger.warning(
- "The plan is to install the %s repository %s",
- self.name,
- url,
- )
- response = ask_path_exists(f"What to do? {prompt[0]}", prompt[1])
-
- if response == "a":
- sys.exit(-1)
-
- if response == "w":
- logger.warning("Deleting %s", display_path(dest))
- rmtree(dest)
- self.fetch_new(dest, url, rev_options, verbosity=verbosity)
- return
-
- if response == "b":
- dest_dir = backup_dir(dest)
- logger.warning("Backing up %s to %s", display_path(dest), dest_dir)
- shutil.move(dest, dest_dir)
- self.fetch_new(dest, url, rev_options, verbosity=verbosity)
- return
-
- # Do nothing if the response is "i".
- if response == "s":
- logger.info(
- "Switching %s %s to %s%s",
- self.repo_name,
- display_path(dest),
- url,
- rev_display,
- )
- self.switch(dest, url, rev_options)
-
- def unpack(self, location: str, url: HiddenText, verbosity: int) -> None:
- """
- Clean up current location and download the url repository
- (and vcs infos) into location
-
- :param url: the repository URL starting with a vcs prefix.
- :param verbosity: verbosity level.
- """
- if os.path.exists(location):
- rmtree(location)
- self.obtain(location, url=url, verbosity=verbosity)
-
- @classmethod
- def get_remote_url(cls, location: str) -> str:
- """
- Return the url used at location
-
- Raises RemoteNotFoundError if the repository does not have a remote
- url configured.
- """
- raise NotImplementedError
-
- @classmethod
- def get_revision(cls, location: str) -> str:
- """
- Return the current commit id of the files at the given location.
- """
- raise NotImplementedError
-
- @classmethod
- def run_command(
- cls,
- cmd: Union[List[str], CommandArgs],
- show_stdout: bool = True,
- cwd: Optional[str] = None,
- on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
- extra_ok_returncodes: Optional[Iterable[int]] = None,
- command_desc: Optional[str] = None,
- extra_environ: Optional[Mapping[str, Any]] = None,
- spinner: Optional[SpinnerInterface] = None,
- log_failed_cmd: bool = True,
- stdout_only: bool = False,
- ) -> str:
- """
- Run a VCS subcommand
- This is simply a wrapper around call_subprocess that adds the VCS
- command name, and checks that the VCS is available
- """
- cmd = make_command(cls.name, *cmd)
- if command_desc is None:
- command_desc = format_command_args(cmd)
- try:
- return call_subprocess(
- cmd,
- show_stdout,
- cwd,
- on_returncode=on_returncode,
- extra_ok_returncodes=extra_ok_returncodes,
- command_desc=command_desc,
- extra_environ=extra_environ,
- unset_environ=cls.unset_environ,
- spinner=spinner,
- log_failed_cmd=log_failed_cmd,
- stdout_only=stdout_only,
- )
- except FileNotFoundError:
- # errno.ENOENT = no such file or directory
- # In other words, the VCS executable isn't available
- raise BadCommand(
- f"Cannot find command {cls.name!r} - do you have "
- f"{cls.name!r} installed and in your PATH?"
- )
- except PermissionError:
- # errno.EACCES = Permission denied
- # This error occurs, for instance, when the command is installed
- # only for another user. So, the current user don't have
- # permission to call the other user command.
- raise BadCommand(
- f"No permission to execute {cls.name!r} - install it "
- f"locally, globally (ask admin), or check your PATH. "
- f"See possible solutions at "
- f"https://pip.pypa.io/en/latest/reference/pip_freeze/"
- f"#fixing-permission-denied."
- )
-
- @classmethod
- def is_repository_directory(cls, path: str) -> bool:
- """
- Return whether a directory path is a repository directory.
- """
- logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name)
- return os.path.exists(os.path.join(path, cls.dirname))
-
- @classmethod
- def get_repository_root(cls, location: str) -> Optional[str]:
- """
- Return the "root" (top-level) directory controlled by the vcs,
- or `None` if the directory is not in any.
-
- It is meant to be overridden to implement smarter detection
- mechanisms for specific vcs.
-
- This can do more than is_repository_directory() alone. For
- example, the Git override checks that Git is actually available.
- """
- if cls.is_repository_directory(location):
- return location
- return None
diff --git a/venv/lib/python3.11/site-packages/pip/_internal/wheel_builder.py b/venv/lib/python3.11/site-packages/pip/_internal/wheel_builder.py
deleted file mode 100644
index b1debe3..0000000
--- a/venv/lib/python3.11/site-packages/pip/_internal/wheel_builder.py
+++ /dev/null
@@ -1,354 +0,0 @@
-"""Orchestrator for building wheels from InstallRequirements.
-"""
-
-import logging
-import os.path
-import re
-import shutil
-from typing import Iterable, List, Optional, Tuple
-
-from pip._vendor.packaging.utils import canonicalize_name, canonicalize_version
-from pip._vendor.packaging.version import InvalidVersion, Version
-
-from pip._internal.cache import WheelCache
-from pip._internal.exceptions import InvalidWheelFilename, UnsupportedWheel
-from pip._internal.metadata import FilesystemWheel, get_wheel_distribution
-from pip._internal.models.link import Link
-from pip._internal.models.wheel import Wheel
-from pip._internal.operations.build.wheel import build_wheel_pep517
-from pip._internal.operations.build.wheel_editable import build_wheel_editable
-from pip._internal.operations.build.wheel_legacy import build_wheel_legacy
-from pip._internal.req.req_install import InstallRequirement
-from pip._internal.utils.logging import indent_log
-from pip._internal.utils.misc import ensure_dir, hash_file
-from pip._internal.utils.setuptools_build import make_setuptools_clean_args
-from pip._internal.utils.subprocess import call_subprocess
-from pip._internal.utils.temp_dir import TempDirectory
-from pip._internal.utils.urls import path_to_url
-from pip._internal.vcs import vcs
-
-logger = logging.getLogger(__name__)
-
-_egg_info_re = re.compile(r"([a-z0-9_.]+)-([a-z0-9_.!+-]+)", re.IGNORECASE)
-
-BuildResult = Tuple[List[InstallRequirement], List[InstallRequirement]]
-
-
-def _contains_egg_info(s: str) -> bool:
- """Determine whether the string looks like an egg_info.
-
- :param s: The string to parse. E.g. foo-2.1
- """
- return bool(_egg_info_re.search(s))
-
-
-def _should_build(
- req: InstallRequirement,
- need_wheel: bool,
-) -> bool:
- """Return whether an InstallRequirement should be built into a wheel."""
- if req.constraint:
- # never build requirements that are merely constraints
- return False
- if req.is_wheel:
- if need_wheel:
- logger.info(
- "Skipping %s, due to already being wheel.",
- req.name,
- )
- return False
-
- if need_wheel:
- # i.e. pip wheel, not pip install
- return True
-
- # From this point, this concerns the pip install command only
- # (need_wheel=False).
-
- if not req.source_dir:
- return False
-
- if req.editable:
- # we only build PEP 660 editable requirements
- return req.supports_pyproject_editable()
-
- return True
-
-
-def should_build_for_wheel_command(
- req: InstallRequirement,
-) -> bool:
- return _should_build(req, need_wheel=True)
-
-
-def should_build_for_install_command(
- req: InstallRequirement,
-) -> bool:
- return _should_build(req, need_wheel=False)
-
-
-def _should_cache(
- req: InstallRequirement,
-) -> Optional[bool]:
- """
- Return whether a built InstallRequirement can be stored in the persistent
- wheel cache, assuming the wheel cache is available, and _should_build()
- has determined a wheel needs to be built.
- """
- if req.editable or not req.source_dir:
- # never cache editable requirements
- return False
-
- if req.link and req.link.is_vcs:
- # VCS checkout. Do not cache
- # unless it points to an immutable commit hash.
- assert not req.editable
- assert req.source_dir
- vcs_backend = vcs.get_backend_for_scheme(req.link.scheme)
- assert vcs_backend
- if vcs_backend.is_immutable_rev_checkout(req.link.url, req.source_dir):
- return True
- return False
-
- assert req.link
- base, ext = req.link.splitext()
- if _contains_egg_info(base):
- return True
-
- # Otherwise, do not cache.
- return False
-
-
-def _get_cache_dir(
- req: InstallRequirement,
- wheel_cache: WheelCache,
-) -> str:
- """Return the persistent or temporary cache directory where the built
- wheel need to be stored.
- """
- cache_available = bool(wheel_cache.cache_dir)
- assert req.link
- if cache_available and _should_cache(req):
- cache_dir = wheel_cache.get_path_for_link(req.link)
- else:
- cache_dir = wheel_cache.get_ephem_path_for_link(req.link)
- return cache_dir
-
-
-def _verify_one(req: InstallRequirement, wheel_path: str) -> None:
- canonical_name = canonicalize_name(req.name or "")
- w = Wheel(os.path.basename(wheel_path))
- if canonicalize_name(w.name) != canonical_name:
- raise InvalidWheelFilename(
- f"Wheel has unexpected file name: expected {canonical_name!r}, "
- f"got {w.name!r}",
- )
- dist = get_wheel_distribution(FilesystemWheel(wheel_path), canonical_name)
- dist_verstr = str(dist.version)
- if canonicalize_version(dist_verstr) != canonicalize_version(w.version):
- raise InvalidWheelFilename(
- f"Wheel has unexpected file name: expected {dist_verstr!r}, "
- f"got {w.version!r}",
- )
- metadata_version_value = dist.metadata_version
- if metadata_version_value is None:
- raise UnsupportedWheel("Missing Metadata-Version")
- try:
- metadata_version = Version(metadata_version_value)
- except InvalidVersion:
- msg = f"Invalid Metadata-Version: {metadata_version_value}"
- raise UnsupportedWheel(msg)
- if metadata_version >= Version("1.2") and not isinstance(dist.version, Version):
- raise UnsupportedWheel(
- f"Metadata 1.2 mandates PEP 440 version, but {dist_verstr!r} is not"
- )
-
-
-def _build_one(
- req: InstallRequirement,
- output_dir: str,
- verify: bool,
- build_options: List[str],
- global_options: List[str],
- editable: bool,
-) -> Optional[str]:
- """Build one wheel.
-
- :return: The filename of the built wheel, or None if the build failed.
- """
- artifact = "editable" if editable else "wheel"
- try:
- ensure_dir(output_dir)
- except OSError as e:
- logger.warning(
- "Building %s for %s failed: %s",
- artifact,
- req.name,
- e,
- )
- return None
-
- # Install build deps into temporary directory (PEP 518)
- with req.build_env:
- wheel_path = _build_one_inside_env(
- req, output_dir, build_options, global_options, editable
- )
- if wheel_path and verify:
- try:
- _verify_one(req, wheel_path)
- except (InvalidWheelFilename, UnsupportedWheel) as e:
- logger.warning("Built %s for %s is invalid: %s", artifact, req.name, e)
- return None
- return wheel_path
-
-
-def _build_one_inside_env(
- req: InstallRequirement,
- output_dir: str,
- build_options: List[str],
- global_options: List[str],
- editable: bool,
-) -> Optional[str]:
- with TempDirectory(kind="wheel") as temp_dir:
- assert req.name
- if req.use_pep517:
- assert req.metadata_directory
- assert req.pep517_backend
- if global_options:
- logger.warning(
- "Ignoring --global-option when building %s using PEP 517", req.name
- )
- if build_options:
- logger.warning(
- "Ignoring --build-option when building %s using PEP 517", req.name
- )
- if editable:
- wheel_path = build_wheel_editable(
- name=req.name,
- backend=req.pep517_backend,
- metadata_directory=req.metadata_directory,
- tempd=temp_dir.path,
- )
- else:
- wheel_path = build_wheel_pep517(
- name=req.name,
- backend=req.pep517_backend,
- metadata_directory=req.metadata_directory,
- tempd=temp_dir.path,
- )
- else:
- wheel_path = build_wheel_legacy(
- name=req.name,
- setup_py_path=req.setup_py_path,
- source_dir=req.unpacked_source_directory,
- global_options=global_options,
- build_options=build_options,
- tempd=temp_dir.path,
- )
-
- if wheel_path is not None:
- wheel_name = os.path.basename(wheel_path)
- dest_path = os.path.join(output_dir, wheel_name)
- try:
- wheel_hash, length = hash_file(wheel_path)
- shutil.move(wheel_path, dest_path)
- logger.info(
- "Created wheel for %s: filename=%s size=%d sha256=%s",
- req.name,
- wheel_name,
- length,
- wheel_hash.hexdigest(),
- )
- logger.info("Stored in directory: %s", output_dir)
- return dest_path
- except Exception as e:
- logger.warning(
- "Building wheel for %s failed: %s",
- req.name,
- e,
- )
- # Ignore return, we can't do anything else useful.
- if not req.use_pep517:
- _clean_one_legacy(req, global_options)
- return None
-
-
-def _clean_one_legacy(req: InstallRequirement, global_options: List[str]) -> bool:
- clean_args = make_setuptools_clean_args(
- req.setup_py_path,
- global_options=global_options,
- )
-
- logger.info("Running setup.py clean for %s", req.name)
- try:
- call_subprocess(
- clean_args, command_desc="python setup.py clean", cwd=req.source_dir
- )
- return True
- except Exception:
- logger.error("Failed cleaning build dir for %s", req.name)
- return False
-
-
-def build(
- requirements: Iterable[InstallRequirement],
- wheel_cache: WheelCache,
- verify: bool,
- build_options: List[str],
- global_options: List[str],
-) -> BuildResult:
- """Build wheels.
-
- :return: The list of InstallRequirement that succeeded to build and
- the list of InstallRequirement that failed to build.
- """
- if not requirements:
- return [], []
-
- # Build the wheels.
- logger.info(
- "Building wheels for collected packages: %s",
- ", ".join(req.name for req in requirements), # type: ignore
- )
-
- with indent_log():
- build_successes, build_failures = [], []
- for req in requirements:
- assert req.name
- cache_dir = _get_cache_dir(req, wheel_cache)
- wheel_file = _build_one(
- req,
- cache_dir,
- verify,
- build_options,
- global_options,
- req.editable and req.permit_editable_wheels,
- )
- if wheel_file:
- # Record the download origin in the cache
- if req.download_info is not None:
- # download_info is guaranteed to be set because when we build an
- # InstallRequirement it has been through the preparer before, but
- # let's be cautious.
- wheel_cache.record_download_origin(cache_dir, req.download_info)
- # Update the link for this.
- req.link = Link(path_to_url(wheel_file))
- req.local_file_path = req.link.file_path
- assert req.link.is_wheel
- build_successes.append(req)
- else:
- build_failures.append(req)
-
- # notify success/failure
- if build_successes:
- logger.info(
- "Successfully built %s",
- " ".join([req.name for req in build_successes]), # type: ignore
- )
- if build_failures:
- logger.info(
- "Failed to build %s",
- " ".join([req.name for req in build_failures]), # type: ignore
- )
- # Return a list of requirements that failed to build
- return build_successes, build_failures