summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/setuptools/command
diff options
context:
space:
mode:
Diffstat (limited to 'venv/lib/python3.11/site-packages/setuptools/command')
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__init__.py12
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pycbin0 -> 643 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/alias.cpython-311.pycbin0 -> 3910 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pycbin0 -> 25592 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-311.pycbin0 -> 2198 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build.cpython-311.pycbin0 -> 7004 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_clib.cpython-311.pycbin0 -> 4131 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pycbin0 -> 22018 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pycbin0 -> 23184 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/develop.cpython-311.pycbin0 -> 10926 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/dist_info.cpython-311.pycbin0 -> 7988 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/easy_install.cpython-311.pycbin0 -> 119343 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/editable_wheel.cpython-311.pycbin0 -> 51421 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pycbin0 -> 39841 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install.cpython-311.pycbin0 -> 6823 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pycbin0 -> 4136 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pycbin0 -> 6418 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pycbin0 -> 4287 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/py36compat.cpython-311.pycbin0 -> 8044 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/register.cpython-311.pycbin0 -> 1134 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/rotate.cpython-311.pycbin0 -> 4194 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/saveopts.cpython-311.pycbin0 -> 1374 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pycbin0 -> 13446 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pycbin0 -> 7686 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/test.cpython-311.pycbin0 -> 14627 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload.cpython-311.pycbin0 -> 1098 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload_docs.cpython-311.pycbin0 -> 11949 bytes
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/alias.py78
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/bdist_egg.py457
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/bdist_rpm.py40
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/build.py146
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/build_clib.py101
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/build_ext.py383
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/build_py.py368
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/develop.py193
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/dist_info.py142
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/easy_install.py2312
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/editable_wheel.py844
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/egg_info.py763
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/install.py139
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/install_egg_info.py63
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/install_lib.py122
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/install_scripts.py70
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/launcher manifest.xml15
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/py36compat.py134
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/register.py18
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/rotate.py64
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/saveopts.py22
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/sdist.py210
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/setopt.py149
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/test.py251
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/upload.py17
-rw-r--r--venv/lib/python3.11/site-packages/setuptools/command/upload_docs.py213
53 files changed, 7326 insertions, 0 deletions
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__init__.py b/venv/lib/python3.11/site-packages/setuptools/command/__init__.py
new file mode 100644
index 0000000..5acd768
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__init__.py
@@ -0,0 +1,12 @@
+from distutils.command.bdist import bdist
+import sys
+
+if 'egg' not in bdist.format_commands:
+ try:
+ bdist.format_commands['egg'] = ('bdist_egg', "Python .egg file")
+ except TypeError:
+ # For backward compatibility with older distutils (stdlib)
+ bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+ bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..41769ec
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/__init__.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/alias.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/alias.cpython-311.pyc
new file mode 100644
index 0000000..140b1e4
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/alias.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc
new file mode 100644
index 0000000..2166664
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_egg.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-311.pyc
new file mode 100644
index 0000000..6b265ea
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/bdist_rpm.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc
new file mode 100644
index 0000000..a9ddd3e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_clib.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_clib.cpython-311.pyc
new file mode 100644
index 0000000..3f8198e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_clib.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc
new file mode 100644
index 0000000..dac975a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_ext.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc
new file mode 100644
index 0000000..d277dd8
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/build_py.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/develop.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/develop.cpython-311.pyc
new file mode 100644
index 0000000..e0c4acb
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/develop.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/dist_info.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/dist_info.cpython-311.pyc
new file mode 100644
index 0000000..c64225e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/dist_info.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/easy_install.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/easy_install.cpython-311.pyc
new file mode 100644
index 0000000..f01f0ed
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/easy_install.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/editable_wheel.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/editable_wheel.cpython-311.pyc
new file mode 100644
index 0000000..e3137fb
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/editable_wheel.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc
new file mode 100644
index 0000000..af07bea
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/egg_info.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc
new file mode 100644
index 0000000..c45f82a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc
new file mode 100644
index 0000000..ca4124b
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_egg_info.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc
new file mode 100644
index 0000000..38d55ff
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_lib.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc
new file mode 100644
index 0000000..7030f1a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/install_scripts.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/py36compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/py36compat.cpython-311.pyc
new file mode 100644
index 0000000..56aef72
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/py36compat.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/register.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/register.cpython-311.pyc
new file mode 100644
index 0000000..ca7c628
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/register.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/rotate.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/rotate.cpython-311.pyc
new file mode 100644
index 0000000..eef589f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/rotate.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/saveopts.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/saveopts.cpython-311.pyc
new file mode 100644
index 0000000..a23bd9c
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/saveopts.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc
new file mode 100644
index 0000000..844e566
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/sdist.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc
new file mode 100644
index 0000000..300ee66
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/setopt.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc
new file mode 100644
index 0000000..61abdb8
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/test.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload.cpython-311.pyc
new file mode 100644
index 0000000..3844e3f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload_docs.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload_docs.cpython-311.pyc
new file mode 100644
index 0000000..0739aba
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/__pycache__/upload_docs.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/alias.py b/venv/lib/python3.11/site-packages/setuptools/command/alias.py
new file mode 100644
index 0000000..452a924
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/alias.py
@@ -0,0 +1,78 @@
+from distutils.errors import DistutilsOptionError
+
+from setuptools.command.setopt import edit_config, option_base, config_file
+
+
+def shquote(arg):
+ """Quote an argument for later parsing by shlex.split()"""
+ for c in '"', "'", "\\", "#":
+ if c in arg:
+ return repr(arg)
+ if arg.split() != [arg]:
+ return repr(arg)
+ return arg
+
+
+class alias(option_base):
+ """Define a shortcut that invokes one or more commands"""
+
+ description = "define a shortcut to invoke one or more commands"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('remove', 'r', 'remove (unset) the alias'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.args = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.remove and len(self.args) != 1:
+ raise DistutilsOptionError(
+ "Must specify exactly one argument (the alias name) when "
+ "using --remove"
+ )
+
+ def run(self):
+ aliases = self.distribution.get_option_dict('aliases')
+
+ if not self.args:
+ print("Command Aliases")
+ print("---------------")
+ for alias in aliases:
+ print("setup.py alias", format_alias(alias, aliases))
+ return
+
+ elif len(self.args) == 1:
+ alias, = self.args
+ if self.remove:
+ command = None
+ elif alias in aliases:
+ print("setup.py alias", format_alias(alias, aliases))
+ return
+ else:
+ print("No alias definition found for %r" % alias)
+ return
+ else:
+ alias = self.args[0]
+ command = ' '.join(map(shquote, self.args[1:]))
+
+ edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+ source, command = aliases[name]
+ if source == config_file('global'):
+ source = '--global-config '
+ elif source == config_file('user'):
+ source = '--user-config '
+ elif source == config_file('local'):
+ source = ''
+ else:
+ source = '--filename=%r' % source
+ return source + name + ' ' + command
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/bdist_egg.py b/venv/lib/python3.11/site-packages/setuptools/command/bdist_egg.py
new file mode 100644
index 0000000..11a1c6b
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/bdist_egg.py
@@ -0,0 +1,457 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+from distutils.dir_util import remove_tree, mkpath
+from distutils import log
+from types import CodeType
+import sys
+import os
+import re
+import textwrap
+import marshal
+
+from pkg_resources import get_build_platform, Distribution
+from setuptools.extension import Library
+from setuptools import Command
+from .._path import ensure_directory
+
+from sysconfig import get_path, get_python_version
+
+
+def _get_purelib():
+ return get_path("purelib")
+
+
+def strip_module(filename):
+ if '.' in filename:
+ filename = os.path.splitext(filename)[0]
+ if filename.endswith('module'):
+ filename = filename[:-6]
+ return filename
+
+
+def sorted_walk(dir):
+ """Do os.walk in a reproducible way,
+ independent of indeterministic filesystem readdir order
+ """
+ for base, dirs, files in os.walk(dir):
+ dirs.sort()
+ files.sort()
+ yield base, dirs, files
+
+
+def write_stub(resource, pyfile):
+ _stub_template = textwrap.dedent("""
+ def __bootstrap__():
+ global __bootstrap__, __loader__, __file__
+ import sys, pkg_resources, importlib.util
+ __file__ = pkg_resources.resource_filename(__name__, %r)
+ __loader__ = None; del __bootstrap__, __loader__
+ spec = importlib.util.spec_from_file_location(__name__,__file__)
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ __bootstrap__()
+ """).lstrip()
+ with open(pyfile, 'w') as f:
+ f.write(_stub_template % resource)
+
+
+class bdist_egg(Command):
+ description = "create an \"egg\" distribution"
+
+ user_options = [
+ ('bdist-dir=', 'b',
+ "temporary directory for creating the distribution"),
+ ('plat-name=', 'p', "platform name to embed in generated filenames "
+ "(default: %s)" % get_build_platform()),
+ ('exclude-source-files', None,
+ "remove all .py files from the generated egg"),
+ ('keep-temp', 'k',
+ "keep the pseudo-installation tree around after " +
+ "creating the distribution archive"),
+ ('dist-dir=', 'd',
+ "directory to put final built distributions in"),
+ ('skip-build', None,
+ "skip rebuilding everything (for testing/debugging)"),
+ ]
+
+ boolean_options = [
+ 'keep-temp', 'skip-build', 'exclude-source-files'
+ ]
+
+ def initialize_options(self):
+ self.bdist_dir = None
+ self.plat_name = None
+ self.keep_temp = 0
+ self.dist_dir = None
+ self.skip_build = 0
+ self.egg_output = None
+ self.exclude_source_files = None
+
+ def finalize_options(self):
+ ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
+ self.egg_info = ei_cmd.egg_info
+
+ if self.bdist_dir is None:
+ bdist_base = self.get_finalized_command('bdist').bdist_base
+ self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+ if self.plat_name is None:
+ self.plat_name = get_build_platform()
+
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+ if self.egg_output is None:
+
+ # Compute filename of the output egg
+ basename = Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version,
+ get_python_version(),
+ self.distribution.has_ext_modules() and self.plat_name
+ ).egg_name()
+
+ self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
+
+ def do_install_data(self):
+ # Hack for packages that install data to install's --install-lib
+ self.get_finalized_command('install').install_lib = self.bdist_dir
+
+ site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
+ old, self.distribution.data_files = self.distribution.data_files, []
+
+ for item in old:
+ if isinstance(item, tuple) and len(item) == 2:
+ if os.path.isabs(item[0]):
+ realpath = os.path.realpath(item[0])
+ normalized = os.path.normcase(realpath)
+ if normalized == site_packages or normalized.startswith(
+ site_packages + os.sep
+ ):
+ item = realpath[len(site_packages) + 1:], item[1]
+ # XXX else: raise ???
+ self.distribution.data_files.append(item)
+
+ try:
+ log.info("installing package data to %s", self.bdist_dir)
+ self.call_command('install_data', force=0, root=None)
+ finally:
+ self.distribution.data_files = old
+
+ def get_outputs(self):
+ return [self.egg_output]
+
+ def call_command(self, cmdname, **kw):
+ """Invoke reinitialized command `cmdname` with keyword args"""
+ for dirname in INSTALL_DIRECTORY_ATTRS:
+ kw.setdefault(dirname, self.bdist_dir)
+ kw.setdefault('skip_build', self.skip_build)
+ kw.setdefault('dry_run', self.dry_run)
+ cmd = self.reinitialize_command(cmdname, **kw)
+ self.run_command(cmdname)
+ return cmd
+
+ def run(self): # noqa: C901 # is too complex (14) # FIXME
+ # Generate metadata first
+ self.run_command("egg_info")
+ # We run install_lib before install_data, because some data hacks
+ # pull their data path from the install_lib command.
+ log.info("installing library code to %s", self.bdist_dir)
+ instcmd = self.get_finalized_command('install')
+ old_root = instcmd.root
+ instcmd.root = None
+ if self.distribution.has_c_libraries() and not self.skip_build:
+ self.run_command('build_clib')
+ cmd = self.call_command('install_lib', warn_dir=0)
+ instcmd.root = old_root
+
+ all_outputs, ext_outputs = self.get_ext_outputs()
+ self.stubs = []
+ to_compile = []
+ for (p, ext_name) in enumerate(ext_outputs):
+ filename, ext = os.path.splitext(ext_name)
+ pyfile = os.path.join(self.bdist_dir, strip_module(filename) +
+ '.py')
+ self.stubs.append(pyfile)
+ log.info("creating stub loader for %s", ext_name)
+ if not self.dry_run:
+ write_stub(os.path.basename(ext_name), pyfile)
+ to_compile.append(pyfile)
+ ext_outputs[p] = ext_name.replace(os.sep, '/')
+
+ if to_compile:
+ cmd.byte_compile(to_compile)
+ if self.distribution.data_files:
+ self.do_install_data()
+
+ # Make the EGG-INFO directory
+ archive_root = self.bdist_dir
+ egg_info = os.path.join(archive_root, 'EGG-INFO')
+ self.mkpath(egg_info)
+ if self.distribution.scripts:
+ script_dir = os.path.join(egg_info, 'scripts')
+ log.info("installing scripts to %s", script_dir)
+ self.call_command('install_scripts', install_dir=script_dir,
+ no_ep=1)
+
+ self.copy_metadata_to(egg_info)
+ native_libs = os.path.join(egg_info, "native_libs.txt")
+ if all_outputs:
+ log.info("writing %s", native_libs)
+ if not self.dry_run:
+ ensure_directory(native_libs)
+ libs_file = open(native_libs, 'wt')
+ libs_file.write('\n'.join(all_outputs))
+ libs_file.write('\n')
+ libs_file.close()
+ elif os.path.isfile(native_libs):
+ log.info("removing %s", native_libs)
+ if not self.dry_run:
+ os.unlink(native_libs)
+
+ write_safety_flag(
+ os.path.join(archive_root, 'EGG-INFO'), self.zip_safe()
+ )
+
+ if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
+ log.warn(
+ "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+ if self.exclude_source_files:
+ self.zap_pyfiles()
+
+ # Make the archive
+ make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
+ dry_run=self.dry_run, mode=self.gen_header())
+ if not self.keep_temp:
+ remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+ # Add to 'Distribution.dist_files' so that the "upload" command works
+ getattr(self.distribution, 'dist_files', []).append(
+ ('bdist_egg', get_python_version(), self.egg_output))
+
+ def zap_pyfiles(self):
+ log.info("Removing .py files from temporary directory")
+ for base, dirs, files in walk_egg(self.bdist_dir):
+ for name in files:
+ path = os.path.join(base, name)
+
+ if name.endswith('.py'):
+ log.debug("Deleting %s", path)
+ os.unlink(path)
+
+ if base.endswith('__pycache__'):
+ path_old = path
+
+ pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
+ m = re.match(pattern, name)
+ path_new = os.path.join(
+ base, os.pardir, m.group('name') + '.pyc')
+ log.info(
+ "Renaming file from [%s] to [%s]"
+ % (path_old, path_new))
+ try:
+ os.remove(path_new)
+ except OSError:
+ pass
+ os.rename(path_old, path_new)
+
+ def zip_safe(self):
+ safe = getattr(self.distribution, 'zip_safe', None)
+ if safe is not None:
+ return safe
+ log.warn("zip_safe flag not set; analyzing archive contents...")
+ return analyze_egg(self.bdist_dir, self.stubs)
+
+ def gen_header(self):
+ return 'w'
+
+ def copy_metadata_to(self, target_dir):
+ "Copy metadata (egg info) to the target_dir"
+ # normalize the path (so that a forward-slash in egg_info will
+ # match using startswith below)
+ norm_egg_info = os.path.normpath(self.egg_info)
+ prefix = os.path.join(norm_egg_info, '')
+ for path in self.ei_cmd.filelist.files:
+ if path.startswith(prefix):
+ target = os.path.join(target_dir, path[len(prefix):])
+ ensure_directory(target)
+ self.copy_file(path, target)
+
+ def get_ext_outputs(self):
+ """Get a list of relative paths to C extensions in the output distro"""
+
+ all_outputs = []
+ ext_outputs = []
+
+ paths = {self.bdist_dir: ''}
+ for base, dirs, files in sorted_walk(self.bdist_dir):
+ for filename in files:
+ if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
+ all_outputs.append(paths[base] + filename)
+ for filename in dirs:
+ paths[os.path.join(base, filename)] = (paths[base] +
+ filename + '/')
+
+ if self.distribution.has_ext_modules():
+ build_cmd = self.get_finalized_command('build_ext')
+ for ext in build_cmd.extensions:
+ if isinstance(ext, Library):
+ continue
+ fullname = build_cmd.get_ext_fullname(ext.name)
+ filename = build_cmd.get_ext_filename(fullname)
+ if not os.path.basename(filename).startswith('dl-'):
+ if os.path.exists(os.path.join(self.bdist_dir, filename)):
+ ext_outputs.append(filename)
+
+ return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+def walk_egg(egg_dir):
+ """Walk an unpacked egg's contents, skipping the metadata directory"""
+ walker = sorted_walk(egg_dir)
+ base, dirs, files = next(walker)
+ if 'EGG-INFO' in dirs:
+ dirs.remove('EGG-INFO')
+ yield base, dirs, files
+ for bdf in walker:
+ yield bdf
+
+
+def analyze_egg(egg_dir, stubs):
+ # check for existing flag in EGG-INFO
+ for flag, fn in safety_flags.items():
+ if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
+ return flag
+ if not can_scan():
+ return False
+ safe = True
+ for base, dirs, files in walk_egg(egg_dir):
+ for name in files:
+ if name.endswith('.py') or name.endswith('.pyw'):
+ continue
+ elif name.endswith('.pyc') or name.endswith('.pyo'):
+ # always scan, even if we already know we're not safe
+ safe = scan_module(egg_dir, base, name, stubs) and safe
+ return safe
+
+
+def write_safety_flag(egg_dir, safe):
+ # Write or remove zip safety flag file(s)
+ for flag, fn in safety_flags.items():
+ fn = os.path.join(egg_dir, fn)
+ if os.path.exists(fn):
+ if safe is None or bool(safe) != flag:
+ os.unlink(fn)
+ elif safe is not None and bool(safe) == flag:
+ f = open(fn, 'wt')
+ f.write('\n')
+ f.close()
+
+
+safety_flags = {
+ True: 'zip-safe',
+ False: 'not-zip-safe',
+}
+
+
+def scan_module(egg_dir, base, name, stubs):
+ """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+ filename = os.path.join(base, name)
+ if filename[:-1] in stubs:
+ return True # Extension module
+ pkg = base[len(egg_dir) + 1:].replace(os.sep, '.')
+ module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
+ if sys.version_info < (3, 7):
+ skip = 12 # skip magic & date & file size
+ else:
+ skip = 16 # skip magic & reserved? & date & file size
+ f = open(filename, 'rb')
+ f.read(skip)
+ code = marshal.load(f)
+ f.close()
+ safe = True
+ symbols = dict.fromkeys(iter_symbols(code))
+ for bad in ['__file__', '__path__']:
+ if bad in symbols:
+ log.warn("%s: module references %s", module, bad)
+ safe = False
+ if 'inspect' in symbols:
+ for bad in [
+ 'getsource', 'getabsfile', 'getsourcefile', 'getfile'
+ 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
+ 'getinnerframes', 'getouterframes', 'stack', 'trace'
+ ]:
+ if bad in symbols:
+ log.warn("%s: module MAY be using inspect.%s", module, bad)
+ safe = False
+ return safe
+
+
+def iter_symbols(code):
+ """Yield names and strings used by `code` and its nested code objects"""
+ for name in code.co_names:
+ yield name
+ for const in code.co_consts:
+ if isinstance(const, str):
+ yield const
+ elif isinstance(const, CodeType):
+ for name in iter_symbols(const):
+ yield name
+
+
+def can_scan():
+ if not sys.platform.startswith('java') and sys.platform != 'cli':
+ # CPython, PyPy, etc.
+ return True
+ log.warn("Unable to analyze compiled code on this platform.")
+ log.warn("Please ask the author to include a 'zip_safe'"
+ " setting (either True or False) in the package's setup.py")
+
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = [
+ 'install_lib', 'install_dir', 'install_data', 'install_base'
+]
+
+
+def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True,
+ mode='w'):
+ """Create a zip file from all the files under 'base_dir'. The output
+ zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
+ Python module (if available) or the InfoZIP "zip" utility (if installed
+ and found on the default search path). If neither tool is available,
+ raises DistutilsExecError. Returns the name of the output zip file.
+ """
+ import zipfile
+
+ mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+ log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+ def visit(z, dirname, names):
+ for name in names:
+ path = os.path.normpath(os.path.join(dirname, name))
+ if os.path.isfile(path):
+ p = path[len(base_dir) + 1:]
+ if not dry_run:
+ z.write(path, p)
+ log.debug("adding '%s'", p)
+
+ compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
+ if not dry_run:
+ z = zipfile.ZipFile(zip_filename, mode, compression=compression)
+ for dirname, dirs, files in sorted_walk(base_dir):
+ visit(z, dirname, files)
+ z.close()
+ else:
+ for dirname, dirs, files in sorted_walk(base_dir):
+ visit(None, dirname, files)
+ return zip_filename
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/bdist_rpm.py b/venv/lib/python3.11/site-packages/setuptools/command/bdist_rpm.py
new file mode 100644
index 0000000..98bf5de
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/bdist_rpm.py
@@ -0,0 +1,40 @@
+import distutils.command.bdist_rpm as orig
+import warnings
+
+from setuptools import SetuptoolsDeprecationWarning
+
+
+class bdist_rpm(orig.bdist_rpm):
+ """
+ Override the default bdist_rpm behavior to do the following:
+
+ 1. Run egg_info to ensure the name and version are properly calculated.
+ 2. Always run 'install' using --single-version-externally-managed to
+ disable eggs in RPM distributions.
+ """
+
+ def run(self):
+ warnings.warn(
+ "bdist_rpm is deprecated and will be removed in a future "
+ "version. Use bdist_wheel (wheel packages) instead.",
+ SetuptoolsDeprecationWarning,
+ )
+
+ # ensure distro name is up-to-date
+ self.run_command('egg_info')
+
+ orig.bdist_rpm.run(self)
+
+ def _make_spec_file(self):
+ spec = orig.bdist_rpm._make_spec_file(self)
+ spec = [
+ line.replace(
+ "setup.py install ",
+ "setup.py install --single-version-externally-managed "
+ ).replace(
+ "%setup",
+ "%setup -n %{name}-%{unmangled_version}"
+ )
+ for line in spec
+ ]
+ return spec
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/build.py b/venv/lib/python3.11/site-packages/setuptools/command/build.py
new file mode 100644
index 0000000..c0676d8
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/build.py
@@ -0,0 +1,146 @@
+import sys
+import warnings
+from typing import TYPE_CHECKING, List, Dict
+from distutils.command.build import build as _build
+
+from setuptools import SetuptoolsDeprecationWarning
+
+if sys.version_info >= (3, 8):
+ from typing import Protocol
+elif TYPE_CHECKING:
+ from typing_extensions import Protocol
+else:
+ from abc import ABC as Protocol
+
+
+_ORIGINAL_SUBCOMMANDS = {"build_py", "build_clib", "build_ext", "build_scripts"}
+
+
+class build(_build):
+ # copy to avoid sharing the object with parent class
+ sub_commands = _build.sub_commands[:]
+
+ def get_sub_commands(self):
+ subcommands = {cmd[0] for cmd in _build.sub_commands}
+ if subcommands - _ORIGINAL_SUBCOMMANDS:
+ msg = """
+ It seems that you are using `distutils.command.build` to add
+ new subcommands. Using `distutils` directly is considered deprecated,
+ please use `setuptools.command.build`.
+ """
+ warnings.warn(msg, SetuptoolsDeprecationWarning)
+ self.sub_commands = _build.sub_commands
+ return super().get_sub_commands()
+
+
+class SubCommand(Protocol):
+ """In order to support editable installations (see :pep:`660`) all
+ build subcommands **SHOULD** implement this protocol. They also **MUST** inherit
+ from ``setuptools.Command``.
+
+ When creating an :pep:`editable wheel <660>`, ``setuptools`` will try to evaluate
+ custom ``build`` subcommands using the following procedure:
+
+ 1. ``setuptools`` will set the ``editable_mode`` attribute to ``True``
+ 2. ``setuptools`` will execute the ``run()`` command.
+
+ .. important::
+ Subcommands **SHOULD** take advantage of ``editable_mode=True`` to adequate
+ its behaviour or perform optimisations.
+
+ For example, if a subcommand don't need to generate any extra file and
+ everything it does is to copy a source file into the build directory,
+ ``run()`` **SHOULD** simply "early return".
+
+ Similarly, if the subcommand creates files that would be placed alongside
+ Python files in the final distribution, during an editable install
+ the command **SHOULD** generate these files "in place" (i.e. write them to
+ the original source directory, instead of using the build directory).
+ Note that ``get_output_mapping()`` should reflect that and include mappings
+ for "in place" builds accordingly.
+
+ 3. ``setuptools`` use any knowledge it can derive from the return values of
+ ``get_outputs()`` and ``get_output_mapping()`` to create an editable wheel.
+ When relevant ``setuptools`` **MAY** attempt to use file links based on the value
+ of ``get_output_mapping()``. Alternatively, ``setuptools`` **MAY** attempt to use
+ :doc:`import hooks <python:reference/import>` to redirect any attempt to import
+ to the directory with the original source code and other files built in place.
+
+ Please note that custom sub-commands **SHOULD NOT** rely on ``run()`` being
+ executed (or not) to provide correct return values for ``get_outputs()``,
+ ``get_output_mapping()`` or ``get_source_files()``. The ``get_*`` methods should
+ work independently of ``run()``.
+ """
+
+ editable_mode: bool = False
+ """Boolean flag that will be set to ``True`` when setuptools is used for an
+ editable installation (see :pep:`660`).
+ Implementations **SHOULD** explicitly set the default value of this attribute to
+ ``False``.
+ When subcommands run, they can use this flag to perform optimizations or change
+ their behaviour accordingly.
+ """
+
+ build_lib: str
+ """String representing the directory where the build artifacts should be stored,
+ e.g. ``build/lib``.
+ For example, if a distribution wants to provide a Python module named ``pkg.mod``,
+ then a corresponding file should be written to ``{build_lib}/package/module.py``.
+ A way of thinking about this is that the files saved under ``build_lib``
+ would be eventually copied to one of the directories in :obj:`site.PREFIXES`
+ upon installation.
+
+ A command that produces platform-independent files (e.g. compiling text templates
+ into Python functions), **CAN** initialize ``build_lib`` by copying its value from
+ the ``build_py`` command. On the other hand, a command that produces
+ platform-specific files **CAN** initialize ``build_lib`` by copying its value from
+ the ``build_ext`` command. In general this is done inside the ``finalize_options``
+ method with the help of the ``set_undefined_options`` command::
+
+ def finalize_options(self):
+ self.set_undefined_options("build_py", ("build_lib", "build_lib"))
+ ...
+ """
+
+ def initialize_options(self):
+ """(Required by the original :class:`setuptools.Command` interface)"""
+
+ def finalize_options(self):
+ """(Required by the original :class:`setuptools.Command` interface)"""
+
+ def run(self):
+ """(Required by the original :class:`setuptools.Command` interface)"""
+
+ def get_source_files(self) -> List[str]:
+ """
+ Return a list of all files that are used by the command to create the expected
+ outputs.
+ For example, if your build command transpiles Java files into Python, you should
+ list here all the Java files.
+ The primary purpose of this function is to help populating the ``sdist``
+ with all the files necessary to build the distribution.
+ All files should be strings relative to the project root directory.
+ """
+
+ def get_outputs(self) -> List[str]:
+ """
+ Return a list of files intended for distribution as they would have been
+ produced by the build.
+ These files should be strings in the form of
+ ``"{build_lib}/destination/file/path"``.
+
+ .. note::
+ The return value of ``get_output()`` should include all files used as keys
+ in ``get_output_mapping()`` plus files that are generated during the build
+ and don't correspond to any source file already present in the project.
+ """
+
+ def get_output_mapping(self) -> Dict[str, str]:
+ """
+ Return a mapping between destination files as they would be produced by the
+ build (dict keys) into the respective existing (source) files (dict values).
+ Existing (source) files should be represented as strings relative to the project
+ root directory.
+ Destination files should be strings in the form of
+ ``"{build_lib}/destination/file/path"``.
+ """
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/build_clib.py b/venv/lib/python3.11/site-packages/setuptools/command/build_clib.py
new file mode 100644
index 0000000..67ce244
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/build_clib.py
@@ -0,0 +1,101 @@
+import distutils.command.build_clib as orig
+from distutils.errors import DistutilsSetupError
+from distutils import log
+from setuptools.dep_util import newer_pairwise_group
+
+
+class build_clib(orig.build_clib):
+ """
+ Override the default build_clib behaviour to do the following:
+
+ 1. Implement a rudimentary timestamp-based dependency system
+ so 'compile()' doesn't run every time.
+ 2. Add more keys to the 'build_info' dictionary:
+ * obj_deps - specify dependencies for each object compiled.
+ this should be a dictionary mapping a key
+ with the source filename to a list of
+ dependencies. Use an empty string for global
+ dependencies.
+ * cflags - specify a list of additional flags to pass to
+ the compiler.
+ """
+
+ def build_libraries(self, libraries):
+ for (lib_name, build_info) in libraries:
+ sources = build_info.get('sources')
+ if sources is None or not isinstance(sources, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'sources' must be present and must be "
+ "a list of source filenames" % lib_name)
+ sources = list(sources)
+
+ log.info("building '%s' library", lib_name)
+
+ # Make sure everything is the correct type.
+ # obj_deps should be a dictionary of keys as sources
+ # and a list/tuple of files that are its dependencies.
+ obj_deps = build_info.get('obj_deps', dict())
+ if not isinstance(obj_deps, dict):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+ dependencies = []
+
+ # Get the global dependencies that are specified by the '' key.
+ # These will go into every source's dependency list.
+ global_deps = obj_deps.get('', list())
+ if not isinstance(global_deps, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+
+ # Build the list to be used by newer_pairwise_group
+ # each source will be auto-added to its dependencies.
+ for source in sources:
+ src_deps = [source]
+ src_deps.extend(global_deps)
+ extra_deps = obj_deps.get(source, list())
+ if not isinstance(extra_deps, (list, tuple)):
+ raise DistutilsSetupError(
+ "in 'libraries' option (library '%s'), "
+ "'obj_deps' must be a dictionary of "
+ "type 'source: list'" % lib_name)
+ src_deps.extend(extra_deps)
+ dependencies.append(src_deps)
+
+ expected_objects = self.compiler.object_filenames(
+ sources,
+ output_dir=self.build_temp,
+ )
+
+ if (
+ newer_pairwise_group(dependencies, expected_objects)
+ != ([], [])
+ ):
+ # First, compile the source code to object files in the library
+ # directory. (This should probably change to putting object
+ # files in a temporary build directory.)
+ macros = build_info.get('macros')
+ include_dirs = build_info.get('include_dirs')
+ cflags = build_info.get('cflags')
+ self.compiler.compile(
+ sources,
+ output_dir=self.build_temp,
+ macros=macros,
+ include_dirs=include_dirs,
+ extra_postargs=cflags,
+ debug=self.debug
+ )
+
+ # Now "link" the object files together into a static library.
+ # (On Unix at least, this isn't really linking -- it just
+ # builds an archive. Whatever.)
+ self.compiler.create_static_lib(
+ expected_objects,
+ lib_name,
+ output_dir=self.build_clib,
+ debug=self.debug
+ )
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/build_ext.py b/venv/lib/python3.11/site-packages/setuptools/command/build_ext.py
new file mode 100644
index 0000000..cbfe3ec
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/build_ext.py
@@ -0,0 +1,383 @@
+import os
+import sys
+import itertools
+from importlib.machinery import EXTENSION_SUFFIXES
+from importlib.util import cache_from_source as _compiled_file_name
+from typing import Dict, Iterator, List, Tuple
+
+from distutils.command.build_ext import build_ext as _du_build_ext
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+from distutils import log
+
+from setuptools.errors import BaseError
+from setuptools.extension import Extension, Library
+
+try:
+ # Attempt to use Cython for building extensions, if available
+ from Cython.Distutils.build_ext import build_ext as _build_ext
+ # Additionally, assert that the compiler module will load
+ # also. Ref #1229.
+ __import__('Cython.Compiler.Main')
+except ImportError:
+ _build_ext = _du_build_ext
+
+# make sure _config_vars is initialized
+get_config_var("LDSHARED")
+from distutils.sysconfig import _config_vars as _CONFIG_VARS # noqa
+
+
+def _customize_compiler_for_shlib(compiler):
+ if sys.platform == "darwin":
+ # building .dylib requires additional compiler flags on OSX; here we
+ # temporarily substitute the pyconfig.h variables so that distutils'
+ # 'customize_compiler' uses them before we build the shared libraries.
+ tmp = _CONFIG_VARS.copy()
+ try:
+ # XXX Help! I don't have any idea whether these are right...
+ _CONFIG_VARS['LDSHARED'] = (
+ "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup")
+ _CONFIG_VARS['CCSHARED'] = " -dynamiclib"
+ _CONFIG_VARS['SO'] = ".dylib"
+ customize_compiler(compiler)
+ finally:
+ _CONFIG_VARS.clear()
+ _CONFIG_VARS.update(tmp)
+ else:
+ customize_compiler(compiler)
+
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+ use_stubs = True
+elif os.name != 'nt':
+ try:
+ import dl
+ use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW')
+ except ImportError:
+ pass
+
+
+def if_dl(s):
+ return s if have_rtld else ''
+
+
+def get_abi3_suffix():
+ """Return the file extension for an abi3-compliant Extension()"""
+ for suffix in EXTENSION_SUFFIXES:
+ if '.abi3' in suffix: # Unix
+ return suffix
+ elif suffix == '.pyd': # Windows
+ return suffix
+
+
+class build_ext(_build_ext):
+ editable_mode: bool = False
+ inplace: bool = False
+
+ def run(self):
+ """Build extensions in build directory, then copy if --inplace"""
+ old_inplace, self.inplace = self.inplace, 0
+ _build_ext.run(self)
+ self.inplace = old_inplace
+ if old_inplace:
+ self.copy_extensions_to_source()
+
+ def _get_inplace_equivalent(self, build_py, ext: Extension) -> Tuple[str, str]:
+ fullname = self.get_ext_fullname(ext.name)
+ filename = self.get_ext_filename(fullname)
+ modpath = fullname.split('.')
+ package = '.'.join(modpath[:-1])
+ package_dir = build_py.get_package_dir(package)
+ inplace_file = os.path.join(package_dir, os.path.basename(filename))
+ regular_file = os.path.join(self.build_lib, filename)
+ return (inplace_file, regular_file)
+
+ def copy_extensions_to_source(self):
+ build_py = self.get_finalized_command('build_py')
+ for ext in self.extensions:
+ inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
+
+ # Always copy, even if source is older than destination, to ensure
+ # that the right extensions for the current Python/platform are
+ # used.
+ if os.path.exists(regular_file) or not ext.optional:
+ self.copy_file(regular_file, inplace_file, level=self.verbose)
+
+ if ext._needs_stub:
+ inplace_stub = self._get_equivalent_stub(ext, inplace_file)
+ self._write_stub_file(inplace_stub, ext, compile=True)
+ # Always compile stub and remove the original (leave the cache behind)
+ # (this behaviour was observed in previous iterations of the code)
+
+ def _get_equivalent_stub(self, ext: Extension, output_file: str) -> str:
+ dir_ = os.path.dirname(output_file)
+ _, _, name = ext.name.rpartition(".")
+ return f"{os.path.join(dir_, name)}.py"
+
+ def _get_output_mapping(self) -> Iterator[Tuple[str, str]]:
+ if not self.inplace:
+ return
+
+ build_py = self.get_finalized_command('build_py')
+ opt = self.get_finalized_command('install_lib').optimize or ""
+
+ for ext in self.extensions:
+ inplace_file, regular_file = self._get_inplace_equivalent(build_py, ext)
+ yield (regular_file, inplace_file)
+
+ if ext._needs_stub:
+ # This version of `build_ext` always builds artifacts in another dir,
+ # when "inplace=True" is given it just copies them back.
+ # This is done in the `copy_extensions_to_source` function, which
+ # always compile stub files via `_compile_and_remove_stub`.
+ # At the end of the process, a `.pyc` stub file is created without the
+ # corresponding `.py`.
+
+ inplace_stub = self._get_equivalent_stub(ext, inplace_file)
+ regular_stub = self._get_equivalent_stub(ext, regular_file)
+ inplace_cache = _compiled_file_name(inplace_stub, optimization=opt)
+ output_cache = _compiled_file_name(regular_stub, optimization=opt)
+ yield (output_cache, inplace_cache)
+
+ def get_ext_filename(self, fullname):
+ so_ext = os.getenv('SETUPTOOLS_EXT_SUFFIX')
+ if so_ext:
+ filename = os.path.join(*fullname.split('.')) + so_ext
+ else:
+ filename = _build_ext.get_ext_filename(self, fullname)
+ so_ext = get_config_var('EXT_SUFFIX')
+
+ if fullname in self.ext_map:
+ ext = self.ext_map[fullname]
+ use_abi3 = getattr(ext, 'py_limited_api') and get_abi3_suffix()
+ if use_abi3:
+ filename = filename[:-len(so_ext)]
+ so_ext = get_abi3_suffix()
+ filename = filename + so_ext
+ if isinstance(ext, Library):
+ fn, ext = os.path.splitext(filename)
+ return self.shlib_compiler.library_filename(fn, libtype)
+ elif use_stubs and ext._links_to_dynamic:
+ d, fn = os.path.split(filename)
+ return os.path.join(d, 'dl-' + fn)
+ return filename
+
+ def initialize_options(self):
+ _build_ext.initialize_options(self)
+ self.shlib_compiler = None
+ self.shlibs = []
+ self.ext_map = {}
+ self.editable_mode = False
+
+ def finalize_options(self):
+ _build_ext.finalize_options(self)
+ self.extensions = self.extensions or []
+ self.check_extensions_list(self.extensions)
+ self.shlibs = [ext for ext in self.extensions
+ if isinstance(ext, Library)]
+ if self.shlibs:
+ self.setup_shlib_compiler()
+ for ext in self.extensions:
+ ext._full_name = self.get_ext_fullname(ext.name)
+ for ext in self.extensions:
+ fullname = ext._full_name
+ self.ext_map[fullname] = ext
+
+ # distutils 3.1 will also ask for module names
+ # XXX what to do with conflicts?
+ self.ext_map[fullname.split('.')[-1]] = ext
+
+ ltd = self.shlibs and self.links_to_dynamic(ext) or False
+ ns = ltd and use_stubs and not isinstance(ext, Library)
+ ext._links_to_dynamic = ltd
+ ext._needs_stub = ns
+ filename = ext._file_name = self.get_ext_filename(fullname)
+ libdir = os.path.dirname(os.path.join(self.build_lib, filename))
+ if ltd and libdir not in ext.library_dirs:
+ ext.library_dirs.append(libdir)
+ if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+ ext.runtime_library_dirs.append(os.curdir)
+
+ if self.editable_mode:
+ self.inplace = True
+
+ def setup_shlib_compiler(self):
+ compiler = self.shlib_compiler = new_compiler(
+ compiler=self.compiler, dry_run=self.dry_run, force=self.force
+ )
+ _customize_compiler_for_shlib(compiler)
+
+ if self.include_dirs is not None:
+ compiler.set_include_dirs(self.include_dirs)
+ if self.define is not None:
+ # 'define' option is a list of (name,value) tuples
+ for (name, value) in self.define:
+ compiler.define_macro(name, value)
+ if self.undef is not None:
+ for macro in self.undef:
+ compiler.undefine_macro(macro)
+ if self.libraries is not None:
+ compiler.set_libraries(self.libraries)
+ if self.library_dirs is not None:
+ compiler.set_library_dirs(self.library_dirs)
+ if self.rpath is not None:
+ compiler.set_runtime_library_dirs(self.rpath)
+ if self.link_objects is not None:
+ compiler.set_link_objects(self.link_objects)
+
+ # hack so distutils' build_extension() builds a library instead
+ compiler.link_shared_object = link_shared_object.__get__(compiler)
+
+ def get_export_symbols(self, ext):
+ if isinstance(ext, Library):
+ return ext.export_symbols
+ return _build_ext.get_export_symbols(self, ext)
+
+ def build_extension(self, ext):
+ ext._convert_pyx_sources_to_lang()
+ _compiler = self.compiler
+ try:
+ if isinstance(ext, Library):
+ self.compiler = self.shlib_compiler
+ _build_ext.build_extension(self, ext)
+ if ext._needs_stub:
+ build_lib = self.get_finalized_command('build_py').build_lib
+ self.write_stub(build_lib, ext)
+ finally:
+ self.compiler = _compiler
+
+ def links_to_dynamic(self, ext):
+ """Return true if 'ext' links to a dynamic lib in the same package"""
+ # XXX this should check to ensure the lib is actually being built
+ # XXX as dynamic, and not just using a locally-found version or a
+ # XXX static-compiled version
+ libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+ pkg = '.'.join(ext._full_name.split('.')[:-1] + [''])
+ return any(pkg + libname in libnames for libname in ext.libraries)
+
+ def get_outputs(self) -> List[str]:
+ if self.inplace:
+ return list(self.get_output_mapping().keys())
+ return sorted(_build_ext.get_outputs(self) + self.__get_stubs_outputs())
+
+ def get_output_mapping(self) -> Dict[str, str]:
+ """See :class:`setuptools.commands.build.SubCommand`"""
+ mapping = self._get_output_mapping()
+ return dict(sorted(mapping, key=lambda x: x[0]))
+
+ def __get_stubs_outputs(self):
+ # assemble the base name for each extension that needs a stub
+ ns_ext_bases = (
+ os.path.join(self.build_lib, *ext._full_name.split('.'))
+ for ext in self.extensions
+ if ext._needs_stub
+ )
+ # pair each base with the extension
+ pairs = itertools.product(ns_ext_bases, self.__get_output_extensions())
+ return list(base + fnext for base, fnext in pairs)
+
+ def __get_output_extensions(self):
+ yield '.py'
+ yield '.pyc'
+ if self.get_finalized_command('build_py').optimize:
+ yield '.pyo'
+
+ def write_stub(self, output_dir, ext, compile=False):
+ stub_file = os.path.join(output_dir, *ext._full_name.split('.')) + '.py'
+ self._write_stub_file(stub_file, ext, compile)
+
+ def _write_stub_file(self, stub_file: str, ext: Extension, compile=False):
+ log.info("writing stub loader for %s to %s", ext._full_name, stub_file)
+ if compile and os.path.exists(stub_file):
+ raise BaseError(stub_file + " already exists! Please delete.")
+ if not self.dry_run:
+ f = open(stub_file, 'w')
+ f.write(
+ '\n'.join([
+ "def __bootstrap__():",
+ " global __bootstrap__, __file__, __loader__",
+ " import sys, os, pkg_resources, importlib.util" +
+ if_dl(", dl"),
+ " __file__ = pkg_resources.resource_filename"
+ "(__name__,%r)"
+ % os.path.basename(ext._file_name),
+ " del __bootstrap__",
+ " if '__loader__' in globals():",
+ " del __loader__",
+ if_dl(" old_flags = sys.getdlopenflags()"),
+ " old_dir = os.getcwd()",
+ " try:",
+ " os.chdir(os.path.dirname(__file__))",
+ if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
+ " spec = importlib.util.spec_from_file_location(",
+ " __name__, __file__)",
+ " mod = importlib.util.module_from_spec(spec)",
+ " spec.loader.exec_module(mod)",
+ " finally:",
+ if_dl(" sys.setdlopenflags(old_flags)"),
+ " os.chdir(old_dir)",
+ "__bootstrap__()",
+ "" # terminal \n
+ ])
+ )
+ f.close()
+ if compile:
+ self._compile_and_remove_stub(stub_file)
+
+ def _compile_and_remove_stub(self, stub_file: str):
+ from distutils.util import byte_compile
+
+ byte_compile([stub_file], optimize=0,
+ force=True, dry_run=self.dry_run)
+ optimize = self.get_finalized_command('install_lib').optimize
+ if optimize > 0:
+ byte_compile([stub_file], optimize=optimize,
+ force=True, dry_run=self.dry_run)
+ if os.path.exists(stub_file) and not self.dry_run:
+ os.unlink(stub_file)
+
+
+if use_stubs or os.name == 'nt':
+ # Build shared libraries
+ #
+ def link_shared_object(
+ self, objects, output_libname, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+ debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+ target_lang=None):
+ self.link(
+ self.SHARED_LIBRARY, objects, output_libname,
+ output_dir, libraries, library_dirs, runtime_library_dirs,
+ export_symbols, debug, extra_preargs, extra_postargs,
+ build_temp, target_lang
+ )
+else:
+ # Build static libraries everywhere else
+ libtype = 'static'
+
+ def link_shared_object(
+ self, objects, output_libname, output_dir=None, libraries=None,
+ library_dirs=None, runtime_library_dirs=None, export_symbols=None,
+ debug=0, extra_preargs=None, extra_postargs=None, build_temp=None,
+ target_lang=None):
+ # XXX we need to either disallow these attrs on Library instances,
+ # or warn/abort here if set, or something...
+ # libraries=None, library_dirs=None, runtime_library_dirs=None,
+ # export_symbols=None, extra_preargs=None, extra_postargs=None,
+ # build_temp=None
+
+ assert output_dir is None # distutils build_ext doesn't pass this
+ output_dir, filename = os.path.split(output_libname)
+ basename, ext = os.path.splitext(filename)
+ if self.library_filename("x").startswith('lib'):
+ # strip 'lib' prefix; this is kludgy if some platform uses
+ # a different prefix
+ basename = basename[3:]
+
+ self.create_static_lib(
+ objects, basename, output_dir, debug, target_lang
+ )
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/build_py.py b/venv/lib/python3.11/site-packages/setuptools/command/build_py.py
new file mode 100644
index 0000000..ec06274
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/build_py.py
@@ -0,0 +1,368 @@
+from functools import partial
+from glob import glob
+from distutils.util import convert_path
+import distutils.command.build_py as orig
+import os
+import fnmatch
+import textwrap
+import io
+import distutils.errors
+import itertools
+import stat
+import warnings
+from pathlib import Path
+from typing import Dict, Iterable, Iterator, List, Optional, Tuple
+
+from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
+from setuptools.extern.more_itertools import unique_everseen
+
+
+def make_writable(target):
+ os.chmod(target, os.stat(target).st_mode | stat.S_IWRITE)
+
+
+class build_py(orig.build_py):
+ """Enhanced 'build_py' command that includes data files with packages
+
+ The data files are specified via a 'package_data' argument to 'setup()'.
+ See 'setuptools.dist.Distribution' for more details.
+
+ Also, this version of the 'build_py' command allows you to specify both
+ 'py_modules' and 'packages' in the same setup operation.
+ """
+ editable_mode: bool = False
+ existing_egg_info_dir: Optional[str] = None #: Private API, internal use only.
+
+ def finalize_options(self):
+ orig.build_py.finalize_options(self)
+ self.package_data = self.distribution.package_data
+ self.exclude_package_data = self.distribution.exclude_package_data or {}
+ if 'data_files' in self.__dict__:
+ del self.__dict__['data_files']
+ self.__updated_files = []
+
+ def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
+ link=None, level=1):
+ # Overwrite base class to allow using links
+ if link:
+ infile = str(Path(infile).resolve())
+ outfile = str(Path(outfile).resolve())
+ return super().copy_file(infile, outfile, preserve_mode, preserve_times,
+ link, level)
+
+ def run(self):
+ """Build modules, packages, and copy data files to build directory"""
+ if not (self.py_modules or self.packages) or self.editable_mode:
+ return
+
+ if self.py_modules:
+ self.build_modules()
+
+ if self.packages:
+ self.build_packages()
+ self.build_package_data()
+
+ # Only compile actual .py files, using our base class' idea of what our
+ # output files are.
+ self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
+
+ def __getattr__(self, attr):
+ "lazily compute data files"
+ if attr == 'data_files':
+ self.data_files = self._get_data_files()
+ return self.data_files
+ return orig.build_py.__getattr__(self, attr)
+
+ def build_module(self, module, module_file, package):
+ outfile, copied = orig.build_py.build_module(self, module, module_file, package)
+ if copied:
+ self.__updated_files.append(outfile)
+ return outfile, copied
+
+ def _get_data_files(self):
+ """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+ self.analyze_manifest()
+ return list(map(self._get_pkg_data_files, self.packages or ()))
+
+ def get_data_files_without_manifest(self):
+ """
+ Generate list of ``(package,src_dir,build_dir,filenames)`` tuples,
+ but without triggering any attempt to analyze or build the manifest.
+ """
+ # Prevent eventual errors from unset `manifest_files`
+ # (that would otherwise be set by `analyze_manifest`)
+ self.__dict__.setdefault('manifest_files', {})
+ return list(map(self._get_pkg_data_files, self.packages or ()))
+
+ def _get_pkg_data_files(self, package):
+ # Locate package source directory
+ src_dir = self.get_package_dir(package)
+
+ # Compute package build directory
+ build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+ # Strip directory from globbed filenames
+ filenames = [
+ os.path.relpath(file, src_dir)
+ for file in self.find_data_files(package, src_dir)
+ ]
+ return package, src_dir, build_dir, filenames
+
+ def find_data_files(self, package, src_dir):
+ """Return filenames for package's data files in 'src_dir'"""
+ patterns = self._get_platform_patterns(
+ self.package_data,
+ package,
+ src_dir,
+ )
+ globs_expanded = map(partial(glob, recursive=True), patterns)
+ # flatten the expanded globs into an iterable of matches
+ globs_matches = itertools.chain.from_iterable(globs_expanded)
+ glob_files = filter(os.path.isfile, globs_matches)
+ files = itertools.chain(
+ self.manifest_files.get(package, []),
+ glob_files,
+ )
+ return self.exclude_data_files(package, src_dir, files)
+
+ def get_outputs(self, include_bytecode=1) -> List[str]:
+ """See :class:`setuptools.commands.build.SubCommand`"""
+ if self.editable_mode:
+ return list(self.get_output_mapping().keys())
+ return super().get_outputs(include_bytecode)
+
+ def get_output_mapping(self) -> Dict[str, str]:
+ """See :class:`setuptools.commands.build.SubCommand`"""
+ mapping = itertools.chain(
+ self._get_package_data_output_mapping(),
+ self._get_module_mapping(),
+ )
+ return dict(sorted(mapping, key=lambda x: x[0]))
+
+ def _get_module_mapping(self) -> Iterator[Tuple[str, str]]:
+ """Iterate over all modules producing (dest, src) pairs."""
+ for (package, module, module_file) in self.find_all_modules():
+ package = package.split('.')
+ filename = self.get_module_outfile(self.build_lib, package, module)
+ yield (filename, module_file)
+
+ def _get_package_data_output_mapping(self) -> Iterator[Tuple[str, str]]:
+ """Iterate over package data producing (dest, src) pairs."""
+ for package, src_dir, build_dir, filenames in self.data_files:
+ for filename in filenames:
+ target = os.path.join(build_dir, filename)
+ srcfile = os.path.join(src_dir, filename)
+ yield (target, srcfile)
+
+ def build_package_data(self):
+ """Copy data files into build directory"""
+ for target, srcfile in self._get_package_data_output_mapping():
+ self.mkpath(os.path.dirname(target))
+ _outf, _copied = self.copy_file(srcfile, target)
+ make_writable(target)
+
+ def analyze_manifest(self):
+ self.manifest_files = mf = {}
+ if not self.distribution.include_package_data:
+ return
+ src_dirs = {}
+ for package in self.packages or ():
+ # Locate package source directory
+ src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+ if (
+ getattr(self, 'existing_egg_info_dir', None)
+ and Path(self.existing_egg_info_dir, "SOURCES.txt").exists()
+ ):
+ egg_info_dir = self.existing_egg_info_dir
+ manifest = Path(egg_info_dir, "SOURCES.txt")
+ files = manifest.read_text(encoding="utf-8").splitlines()
+ else:
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ egg_info_dir = ei_cmd.egg_info
+ files = ei_cmd.filelist.files
+
+ check = _IncludePackageDataAbuse()
+ for path in self._filter_build_files(files, egg_info_dir):
+ d, f = os.path.split(assert_relative(path))
+ prev = None
+ oldf = f
+ while d and d != prev and d not in src_dirs:
+ prev = d
+ d, df = os.path.split(d)
+ f = os.path.join(df, f)
+ if d in src_dirs:
+ if f == oldf:
+ if check.is_module(f):
+ continue # it's a module, not data
+ else:
+ importable = check.importable_subpackage(src_dirs[d], f)
+ if importable:
+ check.warn(importable)
+ mf.setdefault(src_dirs[d], []).append(path)
+
+ def _filter_build_files(self, files: Iterable[str], egg_info: str) -> Iterator[str]:
+ """
+ ``build_meta`` may try to create egg_info outside of the project directory,
+ and this can be problematic for certain plugins (reported in issue #3500).
+
+ Extensions might also include between their sources files created on the
+ ``build_lib`` and ``build_temp`` directories.
+
+ This function should filter this case of invalid files out.
+ """
+ build = self.get_finalized_command("build")
+ build_dirs = (egg_info, self.build_lib, build.build_temp, build.build_base)
+ norm_dirs = [os.path.normpath(p) for p in build_dirs if p]
+
+ for file in files:
+ norm_path = os.path.normpath(file)
+ if not os.path.isabs(file) or all(d not in norm_path for d in norm_dirs):
+ yield file
+
+ def get_data_files(self):
+ pass # Lazily compute data files in _get_data_files() function.
+
+ def check_package(self, package, package_dir):
+ """Check namespace packages' __init__ for declare_namespace"""
+ try:
+ return self.packages_checked[package]
+ except KeyError:
+ pass
+
+ init_py = orig.build_py.check_package(self, package, package_dir)
+ self.packages_checked[package] = init_py
+
+ if not init_py or not self.distribution.namespace_packages:
+ return init_py
+
+ for pkg in self.distribution.namespace_packages:
+ if pkg == package or pkg.startswith(package + '.'):
+ break
+ else:
+ return init_py
+
+ with io.open(init_py, 'rb') as f:
+ contents = f.read()
+ if b'declare_namespace' not in contents:
+ raise distutils.errors.DistutilsError(
+ "Namespace package problem: %s is a namespace package, but "
+ "its\n__init__.py does not call declare_namespace()! Please "
+ 'fix it.\n(See the setuptools manual under '
+ '"Namespace Packages" for details.)\n"' % (package,)
+ )
+ return init_py
+
+ def initialize_options(self):
+ self.packages_checked = {}
+ orig.build_py.initialize_options(self)
+ self.editable_mode = False
+ self.existing_egg_info_dir = None
+
+ def get_package_dir(self, package):
+ res = orig.build_py.get_package_dir(self, package)
+ if self.distribution.src_root is not None:
+ return os.path.join(self.distribution.src_root, res)
+ return res
+
+ def exclude_data_files(self, package, src_dir, files):
+ """Filter filenames for package's data files in 'src_dir'"""
+ files = list(files)
+ patterns = self._get_platform_patterns(
+ self.exclude_package_data,
+ package,
+ src_dir,
+ )
+ match_groups = (fnmatch.filter(files, pattern) for pattern in patterns)
+ # flatten the groups of matches into an iterable of matches
+ matches = itertools.chain.from_iterable(match_groups)
+ bad = set(matches)
+ keepers = (fn for fn in files if fn not in bad)
+ # ditch dupes
+ return list(unique_everseen(keepers))
+
+ @staticmethod
+ def _get_platform_patterns(spec, package, src_dir):
+ """
+ yield platform-specific path patterns (suitable for glob
+ or fn_match) from a glob-based spec (such as
+ self.package_data or self.exclude_package_data)
+ matching package in src_dir.
+ """
+ raw_patterns = itertools.chain(
+ spec.get('', []),
+ spec.get(package, []),
+ )
+ return (
+ # Each pattern has to be converted to a platform-specific path
+ os.path.join(src_dir, convert_path(pattern))
+ for pattern in raw_patterns
+ )
+
+
+def assert_relative(path):
+ if not os.path.isabs(path):
+ return path
+ from distutils.errors import DistutilsSetupError
+
+ msg = (
+ textwrap.dedent(
+ """
+ Error: setup script specifies an absolute path:
+
+ %s
+
+ setup() arguments must *always* be /-separated paths relative to the
+ setup.py directory, *never* absolute paths.
+ """
+ ).lstrip()
+ % path
+ )
+ raise DistutilsSetupError(msg)
+
+
+class _IncludePackageDataAbuse:
+ """Inform users that package or module is included as 'data file'"""
+
+ MESSAGE = """\
+ Installing {importable!r} as data is deprecated, please list it in `packages`.
+ !!\n\n
+ ############################
+ # Package would be ignored #
+ ############################
+ Python recognizes {importable!r} as an importable package,
+ but it is not listed in the `packages` configuration of setuptools.
+
+ {importable!r} has been automatically added to the distribution only
+ because it may contain data files, but this behavior is likely to change
+ in future versions of setuptools (and therefore is considered deprecated).
+
+ Please make sure that {importable!r} is included as a package by using
+ the `packages` configuration field or the proper discovery methods
+ (for example by using `find_namespace_packages(...)`/`find_namespace:`
+ instead of `find_packages(...)`/`find:`).
+
+ You can read more about "package discovery" and "data files" on setuptools
+ documentation page.
+ \n\n!!
+ """
+
+ def __init__(self):
+ self._already_warned = set()
+
+ def is_module(self, file):
+ return file.endswith(".py") and file[:-len(".py")].isidentifier()
+
+ def importable_subpackage(self, parent, file):
+ pkg = Path(file).parent
+ parts = list(itertools.takewhile(str.isidentifier, pkg.parts))
+ if parts:
+ return ".".join([parent, *parts])
+ return None
+
+ def warn(self, importable):
+ if importable not in self._already_warned:
+ msg = textwrap.dedent(self.MESSAGE).format(importable=importable)
+ warnings.warn(msg, SetuptoolsDeprecationWarning, stacklevel=2)
+ self._already_warned.add(importable)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/develop.py b/venv/lib/python3.11/site-packages/setuptools/command/develop.py
new file mode 100644
index 0000000..24fb0a7
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/develop.py
@@ -0,0 +1,193 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsError, DistutilsOptionError
+import os
+import glob
+import io
+
+import pkg_resources
+from setuptools.command.easy_install import easy_install
+from setuptools import namespaces
+import setuptools
+
+
+class develop(namespaces.DevelopInstaller, easy_install):
+ """Set up package for development"""
+
+ description = "install package in 'development mode'"
+
+ user_options = easy_install.user_options + [
+ ("uninstall", "u", "Uninstall this source package"),
+ ("egg-path=", None, "Set the path to be used in the .egg-link file"),
+ ]
+
+ boolean_options = easy_install.boolean_options + ['uninstall']
+
+ command_consumes_arguments = False # override base
+
+ def run(self):
+ if self.uninstall:
+ self.multi_version = True
+ self.uninstall_link()
+ self.uninstall_namespaces()
+ else:
+ self.install_for_development()
+ self.warn_deprecated_options()
+
+ def initialize_options(self):
+ self.uninstall = None
+ self.egg_path = None
+ easy_install.initialize_options(self)
+ self.setup_path = None
+ self.always_copy_from = '.' # always copy eggs installed in curdir
+
+ def finalize_options(self):
+ ei = self.get_finalized_command("egg_info")
+ if ei.broken_egg_info:
+ template = "Please rename %r to %r before using 'develop'"
+ args = ei.egg_info, ei.broken_egg_info
+ raise DistutilsError(template % args)
+ self.args = [ei.egg_name]
+
+ easy_install.finalize_options(self)
+ self.expand_basedirs()
+ self.expand_dirs()
+ # pick up setup-dir .egg files only: no .egg-info
+ self.package_index.scan(glob.glob('*.egg'))
+
+ egg_link_fn = ei.egg_name + '.egg-link'
+ self.egg_link = os.path.join(self.install_dir, egg_link_fn)
+ self.egg_base = ei.egg_base
+ if self.egg_path is None:
+ self.egg_path = os.path.abspath(ei.egg_base)
+
+ target = pkg_resources.normalize_path(self.egg_base)
+ egg_path = pkg_resources.normalize_path(
+ os.path.join(self.install_dir, self.egg_path)
+ )
+ if egg_path != target:
+ raise DistutilsOptionError(
+ "--egg-path must be a relative path from the install"
+ " directory to " + target
+ )
+
+ # Make a distribution for the package's source
+ self.dist = pkg_resources.Distribution(
+ target,
+ pkg_resources.PathMetadata(target, os.path.abspath(ei.egg_info)),
+ project_name=ei.egg_name,
+ )
+
+ self.setup_path = self._resolve_setup_path(
+ self.egg_base,
+ self.install_dir,
+ self.egg_path,
+ )
+
+ @staticmethod
+ def _resolve_setup_path(egg_base, install_dir, egg_path):
+ """
+ Generate a path from egg_base back to '.' where the
+ setup script resides and ensure that path points to the
+ setup path from $install_dir/$egg_path.
+ """
+ path_to_setup = egg_base.replace(os.sep, '/').rstrip('/')
+ if path_to_setup != os.curdir:
+ path_to_setup = '../' * (path_to_setup.count('/') + 1)
+ resolved = pkg_resources.normalize_path(
+ os.path.join(install_dir, egg_path, path_to_setup)
+ )
+ if resolved != pkg_resources.normalize_path(os.curdir):
+ raise DistutilsOptionError(
+ "Can't get a consistent path to setup script from"
+ " installation directory",
+ resolved,
+ pkg_resources.normalize_path(os.curdir),
+ )
+ return path_to_setup
+
+ def install_for_development(self):
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ if setuptools.bootstrap_install_from:
+ self.easy_install(setuptools.bootstrap_install_from)
+ setuptools.bootstrap_install_from = None
+
+ self.install_namespaces()
+
+ # create an .egg-link in the installation dir, pointing to our egg
+ log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+ if not self.dry_run:
+ with open(self.egg_link, "w") as f:
+ f.write(self.egg_path + "\n" + self.setup_path)
+ # postprocess the installed distro, fixing up .pth, installing scripts,
+ # and handling requirements
+ self.process_distribution(None, self.dist, not self.no_deps)
+
+ def uninstall_link(self):
+ if os.path.exists(self.egg_link):
+ log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+ egg_link_file = open(self.egg_link)
+ contents = [line.rstrip() for line in egg_link_file]
+ egg_link_file.close()
+ if contents not in ([self.egg_path], [self.egg_path, self.setup_path]):
+ log.warn("Link points to %s: uninstall aborted", contents)
+ return
+ if not self.dry_run:
+ os.unlink(self.egg_link)
+ if not self.dry_run:
+ self.update_pth(self.dist) # remove any .pth link to us
+ if self.distribution.scripts:
+ # XXX should also check for entry point scripts!
+ log.warn("Note: you must uninstall or replace scripts manually!")
+
+ def install_egg_scripts(self, dist):
+ if dist is not self.dist:
+ # Installing a dependency, so fall back to normal behavior
+ return easy_install.install_egg_scripts(self, dist)
+
+ # create wrapper scripts in the script dir, pointing to dist.scripts
+
+ # new-style...
+ self.install_wrapper_scripts(dist)
+
+ # ...and old-style
+ for script_name in self.distribution.scripts or []:
+ script_path = os.path.abspath(convert_path(script_name))
+ script_name = os.path.basename(script_path)
+ with io.open(script_path) as strm:
+ script_text = strm.read()
+ self.install_script(dist, script_name, script_text, script_path)
+
+ def install_wrapper_scripts(self, dist):
+ dist = VersionlessRequirement(dist)
+ return easy_install.install_wrapper_scripts(self, dist)
+
+
+class VersionlessRequirement:
+ """
+ Adapt a pkg_resources.Distribution to simply return the project
+ name as the 'requirement' so that scripts will work across
+ multiple versions.
+
+ >>> from pkg_resources import Distribution
+ >>> dist = Distribution(project_name='foo', version='1.0')
+ >>> str(dist.as_requirement())
+ 'foo==1.0'
+ >>> adapted_dist = VersionlessRequirement(dist)
+ >>> str(adapted_dist.as_requirement())
+ 'foo'
+ """
+
+ def __init__(self, dist):
+ self.__dist = dist
+
+ def __getattr__(self, name):
+ return getattr(self.__dist, name)
+
+ def as_requirement(self):
+ return self.project_name
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/dist_info.py b/venv/lib/python3.11/site-packages/setuptools/command/dist_info.py
new file mode 100644
index 0000000..0685c94
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/dist_info.py
@@ -0,0 +1,142 @@
+"""
+Create a dist_info directory
+As defined in the wheel specification
+"""
+
+import os
+import re
+import shutil
+import sys
+import warnings
+from contextlib import contextmanager
+from inspect import cleandoc
+from pathlib import Path
+
+from distutils.core import Command
+from distutils import log
+from setuptools.extern import packaging
+from setuptools._deprecation_warning import SetuptoolsDeprecationWarning
+
+
+class dist_info(Command):
+
+ description = 'create a .dist-info directory'
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"
+ " DEPRECATED: use --output-dir."),
+ ('output-dir=', 'o', "directory inside of which the .dist-info will be"
+ "created (default: top of the source tree)"),
+ ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+ ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+ ('no-date', 'D', "Don't include date stamp [default]"),
+ ('keep-egg-info', None, "*TRANSITIONAL* will be removed in the future"),
+ ]
+
+ boolean_options = ['tag-date', 'keep-egg-info']
+ negative_opt = {'no-date': 'tag-date'}
+
+ def initialize_options(self):
+ self.egg_base = None
+ self.output_dir = None
+ self.name = None
+ self.dist_info_dir = None
+ self.tag_date = None
+ self.tag_build = None
+ self.keep_egg_info = False
+
+ def finalize_options(self):
+ if self.egg_base:
+ msg = "--egg-base is deprecated for dist_info command. Use --output-dir."
+ warnings.warn(msg, SetuptoolsDeprecationWarning)
+ self.output_dir = self.egg_base or self.output_dir
+
+ dist = self.distribution
+ project_dir = dist.src_root or os.curdir
+ self.output_dir = Path(self.output_dir or project_dir)
+
+ egg_info = self.reinitialize_command("egg_info")
+ egg_info.egg_base = str(self.output_dir)
+
+ if self.tag_date:
+ egg_info.tag_date = self.tag_date
+ else:
+ self.tag_date = egg_info.tag_date
+
+ if self.tag_build:
+ egg_info.tag_build = self.tag_build
+ else:
+ self.tag_build = egg_info.tag_build
+
+ egg_info.finalize_options()
+ self.egg_info = egg_info
+
+ name = _safe(dist.get_name())
+ version = _version(dist.get_version())
+ self.name = f"{name}-{version}"
+ self.dist_info_dir = os.path.join(self.output_dir, f"{self.name}.dist-info")
+
+ @contextmanager
+ def _maybe_bkp_dir(self, dir_path: str, requires_bkp: bool):
+ if requires_bkp:
+ bkp_name = f"{dir_path}.__bkp__"
+ _rm(bkp_name, ignore_errors=True)
+ _copy(dir_path, bkp_name, dirs_exist_ok=True, symlinks=True)
+ try:
+ yield
+ finally:
+ _rm(dir_path, ignore_errors=True)
+ shutil.move(bkp_name, dir_path)
+ else:
+ yield
+
+ def run(self):
+ self.output_dir.mkdir(parents=True, exist_ok=True)
+ self.egg_info.run()
+ egg_info_dir = self.egg_info.egg_info
+ assert os.path.isdir(egg_info_dir), ".egg-info dir should have been created"
+
+ log.info("creating '{}'".format(os.path.abspath(self.dist_info_dir)))
+ bdist_wheel = self.get_finalized_command('bdist_wheel')
+
+ # TODO: if bdist_wheel if merged into setuptools, just add "keep_egg_info" there
+ with self._maybe_bkp_dir(egg_info_dir, self.keep_egg_info):
+ bdist_wheel.egg2dist(egg_info_dir, self.dist_info_dir)
+
+
+def _safe(component: str) -> str:
+ """Escape a component used to form a wheel name according to PEP 491"""
+ return re.sub(r"[^\w\d.]+", "_", component)
+
+
+def _version(version: str) -> str:
+ """Convert an arbitrary string to a version string."""
+ v = version.replace(' ', '.')
+ try:
+ return str(packaging.version.Version(v)).replace("-", "_")
+ except packaging.version.InvalidVersion:
+ msg = f"""Invalid version: {version!r}.
+ !!\n\n
+ ###################
+ # Invalid version #
+ ###################
+ {version!r} is not valid according to PEP 440.\n
+ Please make sure specify a valid version for your package.
+ Also note that future releases of setuptools may halt the build process
+ if an invalid version is given.
+ \n\n!!
+ """
+ warnings.warn(cleandoc(msg))
+ return _safe(v).strip("_")
+
+
+def _rm(dir_name, **opts):
+ if os.path.isdir(dir_name):
+ shutil.rmtree(dir_name, **opts)
+
+
+def _copy(src, dst, **opts):
+ if sys.version_info < (3, 8):
+ opts.pop("dirs_exist_ok", None)
+ shutil.copytree(src, dst, **opts)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/easy_install.py b/venv/lib/python3.11/site-packages/setuptools/command/easy_install.py
new file mode 100644
index 0000000..444d3b3
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/easy_install.py
@@ -0,0 +1,2312 @@
+"""
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages. For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
+
+"""
+
+from glob import glob
+from distutils.util import get_platform
+from distutils.util import convert_path, subst_vars
+from distutils.errors import (
+ DistutilsArgError, DistutilsOptionError,
+ DistutilsError, DistutilsPlatformError,
+)
+from distutils import log, dir_util
+from distutils.command.build_scripts import first_line_re
+from distutils.spawn import find_executable
+from distutils.command import install
+import sys
+import os
+import zipimport
+import shutil
+import tempfile
+import zipfile
+import re
+import stat
+import random
+import textwrap
+import warnings
+import site
+import struct
+import contextlib
+import subprocess
+import shlex
+import io
+import configparser
+import sysconfig
+
+
+from sysconfig import get_path
+
+from setuptools import SetuptoolsDeprecationWarning
+
+from setuptools import Command
+from setuptools.sandbox import run_setup
+from setuptools.command import setopt
+from setuptools.archive_util import unpack_archive
+from setuptools.package_index import (
+ PackageIndex, parse_requirement_arg, URL_SCHEME,
+)
+from setuptools.command import bdist_egg, egg_info
+from setuptools.wheel import Wheel
+from pkg_resources import (
+ normalize_path, resource_string,
+ get_distribution, find_distributions, Environment, Requirement,
+ Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
+ VersionConflict, DEVELOP_DIST,
+)
+import pkg_resources
+from .._path import ensure_directory
+from ..extern.jaraco.text import yield_lines
+
+
+# Turn on PEP440Warnings
+warnings.filterwarnings("default", category=pkg_resources.PEP440Warning)
+
+__all__ = [
+ 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+ 'get_exe_prefixes',
+]
+
+
+def is_64bit():
+ return struct.calcsize("P") == 8
+
+
+def _to_bytes(s):
+ return s.encode('utf8')
+
+
+def isascii(s):
+ try:
+ s.encode('ascii')
+ return True
+ except UnicodeError:
+ return False
+
+
+def _one_liner(text):
+ return textwrap.dedent(text).strip().replace('\n', '; ')
+
+
+class easy_install(Command):
+ """Manage a download/build/install process"""
+ description = "Find/get/install Python packages"
+ command_consumes_arguments = True
+
+ user_options = [
+ ('prefix=', None, "installation prefix"),
+ ("zip-ok", "z", "install package as a zipfile"),
+ ("multi-version", "m", "make apps have to require() a version"),
+ ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+ ("install-dir=", "d", "install package to DIR"),
+ ("script-dir=", "s", "install scripts to DIR"),
+ ("exclude-scripts", "x", "Don't install scripts"),
+ ("always-copy", "a", "Copy all needed packages to install dir"),
+ ("index-url=", "i", "base URL of Python Package Index"),
+ ("find-links=", "f", "additional URL(s) to search for packages"),
+ ("build-directory=", "b",
+ "download/extract/build in DIR; keep the results"),
+ ('optimize=', 'O',
+ "also compile with optimization: -O1 for \"python -O\", "
+ "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+ ('record=', None,
+ "filename in which to record list of installed files"),
+ ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+ ('site-dirs=', 'S', "list of directories where .pth files work"),
+ ('editable', 'e', "Install specified packages in editable form"),
+ ('no-deps', 'N', "don't install dependencies"),
+ ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+ ('local-snapshots-ok', 'l',
+ "allow building eggs from local checkouts"),
+ ('version', None, "print version information and exit"),
+ ('no-find-links', None,
+ "Don't load find-links defined in packages being installed"),
+ ('user', None, "install in user site-package '%s'" % site.USER_SITE)
+ ]
+ boolean_options = [
+ 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
+ 'editable',
+ 'no-deps', 'local-snapshots-ok', 'version',
+ 'user'
+ ]
+
+ negative_opt = {'always-unzip': 'zip-ok'}
+ create_index = PackageIndex
+
+ def initialize_options(self):
+ warnings.warn(
+ "easy_install command is deprecated. "
+ "Use build and pip and other standards-based tools.",
+ EasyInstallDeprecationWarning,
+ )
+
+ # the --user option seems to be an opt-in one,
+ # so the default should be False.
+ self.user = 0
+ self.zip_ok = self.local_snapshots_ok = None
+ self.install_dir = self.script_dir = self.exclude_scripts = None
+ self.index_url = None
+ self.find_links = None
+ self.build_directory = None
+ self.args = None
+ self.optimize = self.record = None
+ self.upgrade = self.always_copy = self.multi_version = None
+ self.editable = self.no_deps = self.allow_hosts = None
+ self.root = self.prefix = self.no_report = None
+ self.version = None
+ self.install_purelib = None # for pure module distributions
+ self.install_platlib = None # non-pure (dists w/ extensions)
+ self.install_headers = None # for C/C++ headers
+ self.install_lib = None # set to either purelib or platlib
+ self.install_scripts = None
+ self.install_data = None
+ self.install_base = None
+ self.install_platbase = None
+ self.install_userbase = site.USER_BASE
+ self.install_usersite = site.USER_SITE
+ self.no_find_links = None
+
+ # Options not specifiable via command line
+ self.package_index = None
+ self.pth_file = self.always_copy_from = None
+ self.site_dirs = None
+ self.installed_projects = {}
+ # Always read easy_install options, even if we are subclassed, or have
+ # an independent instance created. This ensures that defaults will
+ # always come from the standard configuration file(s)' "easy_install"
+ # section, even if this is a "develop" or "install" command, or some
+ # other embedding.
+ self._dry_run = None
+ self.verbose = self.distribution.verbose
+ self.distribution._set_command_options(
+ self, self.distribution.get_option_dict('easy_install')
+ )
+
+ def delete_blockers(self, blockers):
+ extant_blockers = (
+ filename for filename in blockers
+ if os.path.exists(filename) or os.path.islink(filename)
+ )
+ list(map(self._delete_path, extant_blockers))
+
+ def _delete_path(self, path):
+ log.info("Deleting %s", path)
+ if self.dry_run:
+ return
+
+ is_tree = os.path.isdir(path) and not os.path.islink(path)
+ remover = rmtree if is_tree else os.unlink
+ remover(path)
+
+ @staticmethod
+ def _render_version():
+ """
+ Render the Setuptools version and installation details, then exit.
+ """
+ ver = '{}.{}'.format(*sys.version_info)
+ dist = get_distribution('setuptools')
+ tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})'
+ print(tmpl.format(**locals()))
+ raise SystemExit()
+
+ def finalize_options(self): # noqa: C901 # is too complex (25) # FIXME
+ self.version and self._render_version()
+
+ py_version = sys.version.split()[0]
+
+ self.config_vars = dict(sysconfig.get_config_vars())
+
+ self.config_vars.update({
+ 'dist_name': self.distribution.get_name(),
+ 'dist_version': self.distribution.get_version(),
+ 'dist_fullname': self.distribution.get_fullname(),
+ 'py_version': py_version,
+ 'py_version_short': f'{sys.version_info.major}.{sys.version_info.minor}',
+ 'py_version_nodot': f'{sys.version_info.major}{sys.version_info.minor}',
+ 'sys_prefix': self.config_vars['prefix'],
+ 'sys_exec_prefix': self.config_vars['exec_prefix'],
+ # Only python 3.2+ has abiflags
+ 'abiflags': getattr(sys, 'abiflags', ''),
+ 'platlibdir': getattr(sys, 'platlibdir', 'lib'),
+ })
+ with contextlib.suppress(AttributeError):
+ # only for distutils outside stdlib
+ self.config_vars.update({
+ 'implementation_lower': install._get_implementation().lower(),
+ 'implementation': install._get_implementation(),
+ })
+
+ # pypa/distutils#113 Python 3.9 compat
+ self.config_vars.setdefault(
+ 'py_version_nodot_plat',
+ getattr(sys, 'windir', '').replace('.', ''),
+ )
+
+ self.config_vars['userbase'] = self.install_userbase
+ self.config_vars['usersite'] = self.install_usersite
+ if self.user and not site.ENABLE_USER_SITE:
+ log.warn("WARNING: The user site-packages directory is disabled.")
+
+ self._fix_install_dir_for_user_site()
+
+ self.expand_basedirs()
+ self.expand_dirs()
+
+ self._expand(
+ 'install_dir', 'script_dir', 'build_directory',
+ 'site_dirs',
+ )
+ # If a non-default installation directory was specified, default the
+ # script directory to match it.
+ if self.script_dir is None:
+ self.script_dir = self.install_dir
+
+ if self.no_find_links is None:
+ self.no_find_links = False
+
+ # Let install_dir get set by install_lib command, which in turn
+ # gets its info from the install command, and takes into account
+ # --prefix and --home and all that other crud.
+ self.set_undefined_options(
+ 'install_lib', ('install_dir', 'install_dir')
+ )
+ # Likewise, set default script_dir from 'install_scripts.install_dir'
+ self.set_undefined_options(
+ 'install_scripts', ('install_dir', 'script_dir')
+ )
+
+ if self.user and self.install_purelib:
+ self.install_dir = self.install_purelib
+ self.script_dir = self.install_scripts
+ # default --record from the install command
+ self.set_undefined_options('install', ('record', 'record'))
+ self.all_site_dirs = get_site_dirs()
+ self.all_site_dirs.extend(self._process_site_dirs(self.site_dirs))
+
+ if not self.editable:
+ self.check_site_dir()
+ default_index = os.getenv("__EASYINSTALL_INDEX", "https://pypi.org/simple/")
+ # ^ Private API for testing purposes only
+ self.index_url = self.index_url or default_index
+ self.shadow_path = self.all_site_dirs[:]
+ for path_item in self.install_dir, normalize_path(self.script_dir):
+ if path_item not in self.shadow_path:
+ self.shadow_path.insert(0, path_item)
+
+ if self.allow_hosts is not None:
+ hosts = [s.strip() for s in self.allow_hosts.split(',')]
+ else:
+ hosts = ['*']
+ if self.package_index is None:
+ self.package_index = self.create_index(
+ self.index_url, search_path=self.shadow_path, hosts=hosts,
+ )
+ self.local_index = Environment(self.shadow_path + sys.path)
+
+ if self.find_links is not None:
+ if isinstance(self.find_links, str):
+ self.find_links = self.find_links.split()
+ else:
+ self.find_links = []
+ if self.local_snapshots_ok:
+ self.package_index.scan_egg_links(self.shadow_path + sys.path)
+ if not self.no_find_links:
+ self.package_index.add_find_links(self.find_links)
+ self.set_undefined_options('install_lib', ('optimize', 'optimize'))
+ self.optimize = self._validate_optimize(self.optimize)
+
+ if self.editable and not self.build_directory:
+ raise DistutilsArgError(
+ "Must specify a build directory (-b) when using --editable"
+ )
+ if not self.args:
+ raise DistutilsArgError(
+ "No urls, filenames, or requirements specified (see --help)")
+
+ self.outputs = []
+
+ @staticmethod
+ def _process_site_dirs(site_dirs):
+ if site_dirs is None:
+ return
+
+ normpath = map(normalize_path, sys.path)
+ site_dirs = [
+ os.path.expanduser(s.strip()) for s in
+ site_dirs.split(',')
+ ]
+ for d in site_dirs:
+ if not os.path.isdir(d):
+ log.warn("%s (in --site-dirs) does not exist", d)
+ elif normalize_path(d) not in normpath:
+ raise DistutilsOptionError(
+ d + " (in --site-dirs) is not on sys.path"
+ )
+ else:
+ yield normalize_path(d)
+
+ @staticmethod
+ def _validate_optimize(value):
+ try:
+ value = int(value)
+ if value not in range(3):
+ raise ValueError
+ except ValueError as e:
+ raise DistutilsOptionError(
+ "--optimize must be 0, 1, or 2"
+ ) from e
+
+ return value
+
+ def _fix_install_dir_for_user_site(self):
+ """
+ Fix the install_dir if "--user" was used.
+ """
+ if not self.user:
+ return
+
+ self.create_home_path()
+ if self.install_userbase is None:
+ msg = "User base directory is not specified"
+ raise DistutilsPlatformError(msg)
+ self.install_base = self.install_platbase = self.install_userbase
+ scheme_name = f'{os.name}_user'
+ self.select_scheme(scheme_name)
+
+ def _expand_attrs(self, attrs):
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ if os.name == 'posix' or os.name == 'nt':
+ val = os.path.expanduser(val)
+ val = subst_vars(val, self.config_vars)
+ setattr(self, attr, val)
+
+ def expand_basedirs(self):
+ """Calls `os.path.expanduser` on install_base, install_platbase and
+ root."""
+ self._expand_attrs(['install_base', 'install_platbase', 'root'])
+
+ def expand_dirs(self):
+ """Calls `os.path.expanduser` on install dirs."""
+ dirs = [
+ 'install_purelib',
+ 'install_platlib',
+ 'install_lib',
+ 'install_headers',
+ 'install_scripts',
+ 'install_data',
+ ]
+ self._expand_attrs(dirs)
+
+ def run(self, show_deprecation=True):
+ if show_deprecation:
+ self.announce(
+ "WARNING: The easy_install command is deprecated "
+ "and will be removed in a future version.",
+ log.WARN,
+ )
+ if self.verbose != self.distribution.verbose:
+ log.set_verbosity(self.verbose)
+ try:
+ for spec in self.args:
+ self.easy_install(spec, not self.no_deps)
+ if self.record:
+ outputs = self.outputs
+ if self.root: # strip any package prefix
+ root_len = len(self.root)
+ for counter in range(len(outputs)):
+ outputs[counter] = outputs[counter][root_len:]
+ from distutils import file_util
+
+ self.execute(
+ file_util.write_file, (self.record, outputs),
+ "writing list of installed files to '%s'" %
+ self.record
+ )
+ self.warn_deprecated_options()
+ finally:
+ log.set_verbosity(self.distribution.verbose)
+
+ def pseudo_tempname(self):
+ """Return a pseudo-tempname base in the install directory.
+ This code is intentionally naive; if a malicious party can write to
+ the target directory you're already in deep doodoo.
+ """
+ try:
+ pid = os.getpid()
+ except Exception:
+ pid = random.randint(0, sys.maxsize)
+ return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+
+ def warn_deprecated_options(self):
+ pass
+
+ def check_site_dir(self): # noqa: C901 # is too complex (12) # FIXME
+ """Verify that self.install_dir is .pth-capable dir, if needed"""
+
+ instdir = normalize_path(self.install_dir)
+ pth_file = os.path.join(instdir, 'easy-install.pth')
+
+ if not os.path.exists(instdir):
+ try:
+ os.makedirs(instdir)
+ except (OSError, IOError):
+ self.cant_write_to_target()
+
+ # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+ is_site_dir = instdir in self.all_site_dirs
+
+ if not is_site_dir and not self.multi_version:
+ # No? Then directly test whether it does .pth file processing
+ is_site_dir = self.check_pth_processing()
+ else:
+ # make sure we can write to target dir
+ testfile = self.pseudo_tempname() + '.write-test'
+ test_exists = os.path.exists(testfile)
+ try:
+ if test_exists:
+ os.unlink(testfile)
+ open(testfile, 'w').close()
+ os.unlink(testfile)
+ except (OSError, IOError):
+ self.cant_write_to_target()
+
+ if not is_site_dir and not self.multi_version:
+ # Can't install non-multi to non-site dir with easy_install
+ pythonpath = os.environ.get('PYTHONPATH', '')
+ log.warn(self.__no_default_msg, self.install_dir, pythonpath)
+
+ if is_site_dir:
+ if self.pth_file is None:
+ self.pth_file = PthDistributions(pth_file, self.all_site_dirs)
+ else:
+ self.pth_file = None
+
+ if self.multi_version and not os.path.exists(pth_file):
+ self.pth_file = None # don't create a .pth file
+ self.install_dir = instdir
+
+ __cant_write_msg = textwrap.dedent("""
+ can't create or remove files in install directory
+
+ The following error occurred while trying to add or remove files in the
+ installation directory:
+
+ %s
+
+ The installation directory you specified (via --install-dir, --prefix, or
+ the distutils default setting) was:
+
+ %s
+ """).lstrip() # noqa
+
+ __not_exists_id = textwrap.dedent("""
+ This directory does not currently exist. Please create it and try again, or
+ choose a different installation directory (using the -d or --install-dir
+ option).
+ """).lstrip() # noqa
+
+ __access_msg = textwrap.dedent("""
+ Perhaps your account does not have write access to this directory? If the
+ installation directory is a system-owned directory, you may need to sign in
+ as the administrator or "root" account. If you do not have administrative
+ access to this machine, you may wish to choose a different installation
+ directory, preferably one that is listed in your PYTHONPATH environment
+ variable.
+
+ For information on other options, you may wish to consult the
+ documentation at:
+
+ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html
+
+ Please make the appropriate changes for your system and try again.
+ """).lstrip() # noqa
+
+ def cant_write_to_target(self):
+ msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,)
+
+ if not os.path.exists(self.install_dir):
+ msg += '\n' + self.__not_exists_id
+ else:
+ msg += '\n' + self.__access_msg
+ raise DistutilsError(msg)
+
+ def check_pth_processing(self):
+ """Empirically verify whether .pth files are supported in inst. dir"""
+ instdir = self.install_dir
+ log.info("Checking .pth file support in %s", instdir)
+ pth_file = self.pseudo_tempname() + ".pth"
+ ok_file = pth_file + '.ok'
+ ok_exists = os.path.exists(ok_file)
+ tmpl = _one_liner("""
+ import os
+ f = open({ok_file!r}, 'w')
+ f.write('OK')
+ f.close()
+ """) + '\n'
+ try:
+ if ok_exists:
+ os.unlink(ok_file)
+ dirname = os.path.dirname(ok_file)
+ os.makedirs(dirname, exist_ok=True)
+ f = open(pth_file, 'w')
+ except (OSError, IOError):
+ self.cant_write_to_target()
+ else:
+ try:
+ f.write(tmpl.format(**locals()))
+ f.close()
+ f = None
+ executable = sys.executable
+ if os.name == 'nt':
+ dirname, basename = os.path.split(executable)
+ alt = os.path.join(dirname, 'pythonw.exe')
+ use_alt = (
+ basename.lower() == 'python.exe' and
+ os.path.exists(alt)
+ )
+ if use_alt:
+ # use pythonw.exe to avoid opening a console window
+ executable = alt
+
+ from distutils.spawn import spawn
+
+ spawn([executable, '-E', '-c', 'pass'], 0)
+
+ if os.path.exists(ok_file):
+ log.info(
+ "TEST PASSED: %s appears to support .pth files",
+ instdir
+ )
+ return True
+ finally:
+ if f:
+ f.close()
+ if os.path.exists(ok_file):
+ os.unlink(ok_file)
+ if os.path.exists(pth_file):
+ os.unlink(pth_file)
+ if not self.multi_version:
+ log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+ return False
+
+ def install_egg_scripts(self, dist):
+ """Write all the scripts for `dist`, unless scripts are excluded"""
+ if not self.exclude_scripts and dist.metadata_isdir('scripts'):
+ for script_name in dist.metadata_listdir('scripts'):
+ if dist.metadata_isdir('scripts/' + script_name):
+ # The "script" is a directory, likely a Python 3
+ # __pycache__ directory, so skip it.
+ continue
+ self.install_script(
+ dist, script_name,
+ dist.get_metadata('scripts/' + script_name)
+ )
+ self.install_wrapper_scripts(dist)
+
+ def add_output(self, path):
+ if os.path.isdir(path):
+ for base, dirs, files in os.walk(path):
+ for filename in files:
+ self.outputs.append(os.path.join(base, filename))
+ else:
+ self.outputs.append(path)
+
+ def not_editable(self, spec):
+ if self.editable:
+ raise DistutilsArgError(
+ "Invalid argument %r: you can't use filenames or URLs "
+ "with --editable (except via the --find-links option)."
+ % (spec,)
+ )
+
+ def check_editable(self, spec):
+ if not self.editable:
+ return
+
+ if os.path.exists(os.path.join(self.build_directory, spec.key)):
+ raise DistutilsArgError(
+ "%r already exists in %s; can't do a checkout there" %
+ (spec.key, self.build_directory)
+ )
+
+ @contextlib.contextmanager
+ def _tmpdir(self):
+ tmpdir = tempfile.mkdtemp(prefix=u"easy_install-")
+ try:
+ # cast to str as workaround for #709 and #710 and #712
+ yield str(tmpdir)
+ finally:
+ os.path.exists(tmpdir) and rmtree(tmpdir)
+
+ def easy_install(self, spec, deps=False):
+ with self._tmpdir() as tmpdir:
+ if not isinstance(spec, Requirement):
+ if URL_SCHEME(spec):
+ # It's a url, download it to tmpdir and process
+ self.not_editable(spec)
+ dl = self.package_index.download(spec, tmpdir)
+ return self.install_item(None, dl, tmpdir, deps, True)
+
+ elif os.path.exists(spec):
+ # Existing file or directory, just process it directly
+ self.not_editable(spec)
+ return self.install_item(None, spec, tmpdir, deps, True)
+ else:
+ spec = parse_requirement_arg(spec)
+
+ self.check_editable(spec)
+ dist = self.package_index.fetch_distribution(
+ spec, tmpdir, self.upgrade, self.editable,
+ not self.always_copy, self.local_index
+ )
+ if dist is None:
+ msg = "Could not find suitable distribution for %r" % spec
+ if self.always_copy:
+ msg += " (--always-copy skips system and development eggs)"
+ raise DistutilsError(msg)
+ elif dist.precedence == DEVELOP_DIST:
+ # .egg-info dists don't need installing, just process deps
+ self.process_distribution(spec, dist, deps, "Using")
+ return dist
+ else:
+ return self.install_item(spec, dist.location, tmpdir, deps)
+
+ def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+
+ # Installation is also needed if file in tmpdir or is not an egg
+ install_needed = install_needed or self.always_copy
+ install_needed = install_needed or os.path.dirname(download) == tmpdir
+ install_needed = install_needed or not download.endswith('.egg')
+ install_needed = install_needed or (
+ self.always_copy_from is not None and
+ os.path.dirname(normalize_path(download)) ==
+ normalize_path(self.always_copy_from)
+ )
+
+ if spec and not install_needed:
+ # at this point, we know it's a local .egg, we just don't know if
+ # it's already installed.
+ for dist in self.local_index[spec.project_name]:
+ if dist.location == download:
+ break
+ else:
+ install_needed = True # it's not in the local index
+
+ log.info("Processing %s", os.path.basename(download))
+
+ if install_needed:
+ dists = self.install_eggs(spec, download, tmpdir)
+ for dist in dists:
+ self.process_distribution(spec, dist, deps)
+ else:
+ dists = [self.egg_distribution(download)]
+ self.process_distribution(spec, dists[0], deps, "Using")
+
+ if spec is not None:
+ for dist in dists:
+ if dist in spec:
+ return dist
+
+ def select_scheme(self, name):
+ try:
+ install._select_scheme(self, name)
+ except AttributeError:
+ # stdlib distutils
+ install.install.select_scheme(self, name.replace('posix', 'unix'))
+
+ # FIXME: 'easy_install.process_distribution' is too complex (12)
+ def process_distribution( # noqa: C901
+ self, requirement, dist, deps=True, *info,
+ ):
+ self.update_pth(dist)
+ self.package_index.add(dist)
+ if dist in self.local_index[dist.key]:
+ self.local_index.remove(dist)
+ self.local_index.add(dist)
+ self.install_egg_scripts(dist)
+ self.installed_projects[dist.key] = dist
+ log.info(self.installation_report(requirement, dist, *info))
+ if (dist.has_metadata('dependency_links.txt') and
+ not self.no_find_links):
+ self.package_index.add_find_links(
+ dist.get_metadata_lines('dependency_links.txt')
+ )
+ if not deps and not self.always_copy:
+ return
+ elif requirement is not None and dist.key != requirement.key:
+ log.warn("Skipping dependencies for %s", dist)
+ return # XXX this is not the distribution we were looking for
+ elif requirement is None or dist not in requirement:
+ # if we wound up with a different version, resolve what we've got
+ distreq = dist.as_requirement()
+ requirement = Requirement(str(distreq))
+ log.info("Processing dependencies for %s", requirement)
+ try:
+ distros = WorkingSet([]).resolve(
+ [requirement], self.local_index, self.easy_install
+ )
+ except DistributionNotFound as e:
+ raise DistutilsError(str(e)) from e
+ except VersionConflict as e:
+ raise DistutilsError(e.report()) from e
+ if self.always_copy or self.always_copy_from:
+ # Force all the relevant distros to be copied or activated
+ for dist in distros:
+ if dist.key not in self.installed_projects:
+ self.easy_install(dist.as_requirement())
+ log.info("Finished processing dependencies for %s", requirement)
+
+ def should_unzip(self, dist):
+ if self.zip_ok is not None:
+ return not self.zip_ok
+ if dist.has_metadata('not-zip-safe'):
+ return True
+ if not dist.has_metadata('zip-safe'):
+ return True
+ return False
+
+ def maybe_move(self, spec, dist_filename, setup_base):
+ dst = os.path.join(self.build_directory, spec.key)
+ if os.path.exists(dst):
+ msg = (
+ "%r already exists in %s; build directory %s will not be kept"
+ )
+ log.warn(msg, spec.key, self.build_directory, setup_base)
+ return setup_base
+ if os.path.isdir(dist_filename):
+ setup_base = dist_filename
+ else:
+ if os.path.dirname(dist_filename) == setup_base:
+ os.unlink(dist_filename) # get it out of the tmp dir
+ contents = os.listdir(setup_base)
+ if len(contents) == 1:
+ dist_filename = os.path.join(setup_base, contents[0])
+ if os.path.isdir(dist_filename):
+ # if the only thing there is a directory, move it instead
+ setup_base = dist_filename
+ ensure_directory(dst)
+ shutil.move(setup_base, dst)
+ return dst
+
+ def install_wrapper_scripts(self, dist):
+ if self.exclude_scripts:
+ return
+ for args in ScriptWriter.best().get_args(dist):
+ self.write_script(*args)
+
+ def install_script(self, dist, script_name, script_text, dev_path=None):
+ """Generate a legacy script wrapper and install it"""
+ spec = str(dist.as_requirement())
+ is_script = is_python_script(script_text, script_name)
+
+ if is_script:
+ body = self._load_template(dev_path) % locals()
+ script_text = ScriptWriter.get_header(script_text) + body
+ self.write_script(script_name, _to_bytes(script_text), 'b')
+
+ @staticmethod
+ def _load_template(dev_path):
+ """
+ There are a couple of template scripts in the package. This
+ function loads one of them and prepares it for use.
+ """
+ # See https://github.com/pypa/setuptools/issues/134 for info
+ # on script file naming and downstream issues with SVR4
+ name = 'script.tmpl'
+ if dev_path:
+ name = name.replace('.tmpl', ' (dev).tmpl')
+
+ raw_bytes = resource_string('setuptools', name)
+ return raw_bytes.decode('utf-8')
+
+ def write_script(self, script_name, contents, mode="t", blockers=()):
+ """Write an executable file to the scripts directory"""
+ self.delete_blockers( # clean up old .py/.pyw w/o a script
+ [os.path.join(self.script_dir, x) for x in blockers]
+ )
+ log.info("Installing %s script to %s", script_name, self.script_dir)
+ target = os.path.join(self.script_dir, script_name)
+ self.add_output(target)
+
+ if self.dry_run:
+ return
+
+ mask = current_umask()
+ ensure_directory(target)
+ if os.path.exists(target):
+ os.unlink(target)
+ with open(target, "w" + mode) as f:
+ f.write(contents)
+ chmod(target, 0o777 - mask)
+
+ def install_eggs(self, spec, dist_filename, tmpdir):
+ # .egg dirs or files are already built, so just return them
+ installer_map = {
+ '.egg': self.install_egg,
+ '.exe': self.install_exe,
+ '.whl': self.install_wheel,
+ }
+ try:
+ install_dist = installer_map[
+ dist_filename.lower()[-4:]
+ ]
+ except KeyError:
+ pass
+ else:
+ return [install_dist(dist_filename, tmpdir)]
+
+ # Anything else, try to extract and build
+ setup_base = tmpdir
+ if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+ unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+ elif os.path.isdir(dist_filename):
+ setup_base = os.path.abspath(dist_filename)
+
+ if (setup_base.startswith(tmpdir) # something we downloaded
+ and self.build_directory and spec is not None):
+ setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+ # Find the setup.py file
+ setup_script = os.path.join(setup_base, 'setup.py')
+
+ if not os.path.exists(setup_script):
+ setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+ if not setups:
+ raise DistutilsError(
+ "Couldn't find a setup script in %s" %
+ os.path.abspath(dist_filename)
+ )
+ if len(setups) > 1:
+ raise DistutilsError(
+ "Multiple setup scripts in %s" %
+ os.path.abspath(dist_filename)
+ )
+ setup_script = setups[0]
+
+ # Now run it, and return the result
+ if self.editable:
+ log.info(self.report_editable(spec, setup_script))
+ return []
+ else:
+ return self.build_and_install(setup_script, setup_base)
+
+ def egg_distribution(self, egg_path):
+ if os.path.isdir(egg_path):
+ metadata = PathMetadata(egg_path, os.path.join(egg_path,
+ 'EGG-INFO'))
+ else:
+ metadata = EggMetadata(zipimport.zipimporter(egg_path))
+ return Distribution.from_filename(egg_path, metadata=metadata)
+
+ # FIXME: 'easy_install.install_egg' is too complex (11)
+ def install_egg(self, egg_path, tmpdir): # noqa: C901
+ destination = os.path.join(
+ self.install_dir,
+ os.path.basename(egg_path),
+ )
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+
+ dist = self.egg_distribution(egg_path)
+ if not (
+ os.path.exists(destination) and os.path.samefile(egg_path, destination)
+ ):
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(
+ os.unlink,
+ (destination,),
+ "Removing " + destination,
+ )
+ try:
+ new_dist_is_zipped = False
+ if os.path.isdir(egg_path):
+ if egg_path.startswith(tmpdir):
+ f, m = shutil.move, "Moving"
+ else:
+ f, m = shutil.copytree, "Copying"
+ elif self.should_unzip(dist):
+ self.mkpath(destination)
+ f, m = self.unpack_and_compile, "Extracting"
+ else:
+ new_dist_is_zipped = True
+ if egg_path.startswith(tmpdir):
+ f, m = shutil.move, "Moving"
+ else:
+ f, m = shutil.copy2, "Copying"
+ self.execute(
+ f,
+ (egg_path, destination),
+ (m + " %s to %s") % (
+ os.path.basename(egg_path),
+ os.path.dirname(destination)
+ ),
+ )
+ update_dist_caches(
+ destination,
+ fix_zipimporter_caches=new_dist_is_zipped,
+ )
+ except Exception:
+ update_dist_caches(destination, fix_zipimporter_caches=False)
+ raise
+
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ def install_exe(self, dist_filename, tmpdir):
+ # See if it's valid, get data
+ cfg = extract_wininst_cfg(dist_filename)
+ if cfg is None:
+ raise DistutilsError(
+ "%s is not a valid distutils Windows .exe" % dist_filename
+ )
+ # Create a dummy distribution object until we build the real distro
+ dist = Distribution(
+ None,
+ project_name=cfg.get('metadata', 'name'),
+ version=cfg.get('metadata', 'version'), platform=get_platform(),
+ )
+
+ # Convert the .exe to an unpacked egg
+ egg_path = os.path.join(tmpdir, dist.egg_name() + '.egg')
+ dist.location = egg_path
+ egg_tmp = egg_path + '.tmp'
+ _egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+ pkg_inf = os.path.join(_egg_info, 'PKG-INFO')
+ ensure_directory(pkg_inf) # make sure EGG-INFO dir exists
+ dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX
+ self.exe_to_egg(dist_filename, egg_tmp)
+
+ # Write EGG-INFO/PKG-INFO
+ if not os.path.exists(pkg_inf):
+ f = open(pkg_inf, 'w')
+ f.write('Metadata-Version: 1.0\n')
+ for k, v in cfg.items('metadata'):
+ if k != 'target_version':
+ f.write('%s: %s\n' % (k.replace('_', '-').title(), v))
+ f.close()
+ script_dir = os.path.join(_egg_info, 'scripts')
+ # delete entry-point scripts to avoid duping
+ self.delete_blockers([
+ os.path.join(script_dir, args[0])
+ for args in ScriptWriter.get_args(dist)
+ ])
+ # Build .egg file from tmpdir
+ bdist_egg.make_zipfile(
+ egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run,
+ )
+ # install the .egg
+ return self.install_egg(egg_path, tmpdir)
+
+ # FIXME: 'easy_install.exe_to_egg' is too complex (12)
+ def exe_to_egg(self, dist_filename, egg_tmp): # noqa: C901
+ """Extract a bdist_wininst to the directories an egg would use"""
+ # Check for .pth file and set up prefix translations
+ prefixes = get_exe_prefixes(dist_filename)
+ to_compile = []
+ native_libs = []
+ top_level = {}
+
+ def process(src, dst):
+ s = src.lower()
+ for old, new in prefixes:
+ if s.startswith(old):
+ src = new + src[len(old):]
+ parts = src.split('/')
+ dst = os.path.join(egg_tmp, *parts)
+ dl = dst.lower()
+ if dl.endswith('.pyd') or dl.endswith('.dll'):
+ parts[-1] = bdist_egg.strip_module(parts[-1])
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ native_libs.append(src)
+ elif dl.endswith('.py') and old != 'SCRIPTS/':
+ top_level[os.path.splitext(parts[0])[0]] = 1
+ to_compile.append(dst)
+ return dst
+ if not src.endswith('.pth'):
+ log.warn("WARNING: can't process %s", src)
+ return None
+
+ # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+ unpack_archive(dist_filename, egg_tmp, process)
+ stubs = []
+ for res in native_libs:
+ if res.lower().endswith('.pyd'): # create stubs for .pyd's
+ parts = res.split('/')
+ resource = parts[-1]
+ parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py'
+ pyfile = os.path.join(egg_tmp, *parts)
+ to_compile.append(pyfile)
+ stubs.append(pyfile)
+ bdist_egg.write_stub(resource, pyfile)
+ self.byte_compile(to_compile) # compile .py's
+ bdist_egg.write_safety_flag(
+ os.path.join(egg_tmp, 'EGG-INFO'),
+ bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag
+
+ for name in 'top_level', 'native_libs':
+ if locals()[name]:
+ txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt')
+ if not os.path.exists(txt):
+ f = open(txt, 'w')
+ f.write('\n'.join(locals()[name]) + '\n')
+ f.close()
+
+ def install_wheel(self, wheel_path, tmpdir):
+ wheel = Wheel(wheel_path)
+ assert wheel.is_compatible()
+ destination = os.path.join(self.install_dir, wheel.egg_name())
+ destination = os.path.abspath(destination)
+ if not self.dry_run:
+ ensure_directory(destination)
+ if os.path.isdir(destination) and not os.path.islink(destination):
+ dir_util.remove_tree(destination, dry_run=self.dry_run)
+ elif os.path.exists(destination):
+ self.execute(
+ os.unlink,
+ (destination,),
+ "Removing " + destination,
+ )
+ try:
+ self.execute(
+ wheel.install_as_egg,
+ (destination,),
+ ("Installing %s to %s") % (
+ os.path.basename(wheel_path),
+ os.path.dirname(destination)
+ ),
+ )
+ finally:
+ update_dist_caches(destination, fix_zipimporter_caches=False)
+ self.add_output(destination)
+ return self.egg_distribution(destination)
+
+ __mv_warning = textwrap.dedent("""
+ Because this distribution was installed --multi-version, before you can
+ import modules from this package in an application, you will need to
+ 'import pkg_resources' and then use a 'require()' call similar to one of
+ these examples, in order to select the desired version:
+
+ pkg_resources.require("%(name)s") # latest installed version
+ pkg_resources.require("%(name)s==%(version)s") # this exact version
+ pkg_resources.require("%(name)s>=%(version)s") # this version or higher
+ """).lstrip() # noqa
+
+ __id_warning = textwrap.dedent("""
+ Note also that the installation directory must be on sys.path at runtime for
+ this to work. (e.g. by being the application's script directory, by being on
+ PYTHONPATH, or by being added to sys.path by your code.)
+ """) # noqa
+
+ def installation_report(self, req, dist, what="Installed"):
+ """Helpful installation message for display to package users"""
+ msg = "\n%(what)s %(eggloc)s%(extras)s"
+ if self.multi_version and not self.no_report:
+ msg += '\n' + self.__mv_warning
+ if self.install_dir not in map(normalize_path, sys.path):
+ msg += '\n' + self.__id_warning
+
+ eggloc = dist.location
+ name = dist.project_name
+ version = dist.version
+ extras = '' # TODO: self.report_extras(req, dist)
+ return msg % locals()
+
+ __editable_msg = textwrap.dedent("""
+ Extracted editable version of %(spec)s to %(dirname)s
+
+ If it uses setuptools in its setup script, you can activate it in
+ "development" mode by going to that directory and running::
+
+ %(python)s setup.py develop
+
+ See the setuptools documentation for the "develop" command for more info.
+ """).lstrip() # noqa
+
+ def report_editable(self, spec, setup_script):
+ dirname = os.path.dirname(setup_script)
+ python = sys.executable
+ return '\n' + self.__editable_msg % locals()
+
+ def run_setup(self, setup_script, setup_base, args):
+ sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+ sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+ args = list(args)
+ if self.verbose > 2:
+ v = 'v' * (self.verbose - 1)
+ args.insert(0, '-' + v)
+ elif self.verbose < 2:
+ args.insert(0, '-q')
+ if self.dry_run:
+ args.insert(0, '-n')
+ log.info(
+ "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args)
+ )
+ try:
+ run_setup(setup_script, args)
+ except SystemExit as v:
+ raise DistutilsError(
+ "Setup script exited with %s" % (v.args[0],)
+ ) from v
+
+ def build_and_install(self, setup_script, setup_base):
+ args = ['bdist_egg', '--dist-dir']
+
+ dist_dir = tempfile.mkdtemp(
+ prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+ )
+ try:
+ self._set_fetcher_options(os.path.dirname(setup_script))
+ args.append(dist_dir)
+
+ self.run_setup(setup_script, setup_base, args)
+ all_eggs = Environment([dist_dir])
+ eggs = []
+ for key in all_eggs:
+ for dist in all_eggs[key]:
+ eggs.append(self.install_egg(dist.location, setup_base))
+ if not eggs and not self.dry_run:
+ log.warn("No eggs found in %s (setup script problem?)",
+ dist_dir)
+ return eggs
+ finally:
+ rmtree(dist_dir)
+ log.set_verbosity(self.verbose) # restore our log verbosity
+
+ def _set_fetcher_options(self, base):
+ """
+ When easy_install is about to run bdist_egg on a source dist, that
+ source dist might have 'setup_requires' directives, requiring
+ additional fetching. Ensure the fetcher options given to easy_install
+ are available to that command as well.
+ """
+ # find the fetch options from easy_install and write them out
+ # to the setup.cfg file.
+ ei_opts = self.distribution.get_option_dict('easy_install').copy()
+ fetch_directives = (
+ 'find_links', 'site_dirs', 'index_url', 'optimize', 'allow_hosts',
+ )
+ fetch_options = {}
+ for key, val in ei_opts.items():
+ if key not in fetch_directives:
+ continue
+ fetch_options[key] = val[1]
+ # create a settings dictionary suitable for `edit_config`
+ settings = dict(easy_install=fetch_options)
+ cfg_filename = os.path.join(base, 'setup.cfg')
+ setopt.edit_config(cfg_filename, settings)
+
+ def update_pth(self, dist): # noqa: C901 # is too complex (11) # FIXME
+ if self.pth_file is None:
+ return
+
+ for d in self.pth_file[dist.key]: # drop old entries
+ if not self.multi_version and d.location == dist.location:
+ continue
+
+ log.info("Removing %s from easy-install.pth file", d)
+ self.pth_file.remove(d)
+ if d.location in self.shadow_path:
+ self.shadow_path.remove(d.location)
+
+ if not self.multi_version:
+ if dist.location in self.pth_file.paths:
+ log.info(
+ "%s is already the active version in easy-install.pth",
+ dist,
+ )
+ else:
+ log.info("Adding %s to easy-install.pth file", dist)
+ self.pth_file.add(dist) # add new entry
+ if dist.location not in self.shadow_path:
+ self.shadow_path.append(dist.location)
+
+ if self.dry_run:
+ return
+
+ self.pth_file.save()
+
+ if dist.key != 'setuptools':
+ return
+
+ # Ensure that setuptools itself never becomes unavailable!
+ # XXX should this check for latest version?
+ filename = os.path.join(self.install_dir, 'setuptools.pth')
+ if os.path.islink(filename):
+ os.unlink(filename)
+ with open(filename, 'wt') as f:
+ f.write(self.pth_file.make_relative(dist.location) + '\n')
+
+ def unpack_progress(self, src, dst):
+ # Progress filter for unpacking
+ log.debug("Unpacking %s to %s", src, dst)
+ return dst # only unpack-and-compile skips files for dry run
+
+ def unpack_and_compile(self, egg_path, destination):
+ to_compile = []
+ to_chmod = []
+
+ def pf(src, dst):
+ if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+ to_compile.append(dst)
+ elif dst.endswith('.dll') or dst.endswith('.so'):
+ to_chmod.append(dst)
+ self.unpack_progress(src, dst)
+ return not self.dry_run and dst or None
+
+ unpack_archive(egg_path, destination, pf)
+ self.byte_compile(to_compile)
+ if not self.dry_run:
+ for f in to_chmod:
+ mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755
+ chmod(f, mode)
+
+ def byte_compile(self, to_compile):
+ if sys.dont_write_bytecode:
+ return
+
+ from distutils.util import byte_compile
+
+ try:
+ # try to make the byte compile messages quieter
+ log.set_verbosity(self.verbose - 1)
+
+ byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+ if self.optimize:
+ byte_compile(
+ to_compile, optimize=self.optimize, force=1,
+ dry_run=self.dry_run,
+ )
+ finally:
+ log.set_verbosity(self.verbose) # restore original verbosity
+
+ __no_default_msg = textwrap.dedent("""
+ bad install directory or PYTHONPATH
+
+ You are attempting to install a package to a directory that is not
+ on PYTHONPATH and which Python does not read ".pth" files from. The
+ installation directory you specified (via --install-dir, --prefix, or
+ the distutils default setting) was:
+
+ %s
+
+ and your PYTHONPATH environment variable currently contains:
+
+ %r
+
+ Here are some of your options for correcting the problem:
+
+ * You can choose a different installation directory, i.e., one that is
+ on PYTHONPATH or supports .pth files
+
+ * You can add the installation directory to the PYTHONPATH environment
+ variable. (It must then also be on PYTHONPATH whenever you run
+ Python and want to use the package(s) you are installing.)
+
+ * You can set up the installation directory to support ".pth" files by
+ using one of the approaches described here:
+
+ https://setuptools.pypa.io/en/latest/deprecated/easy_install.html#custom-installation-locations
+
+
+ Please make the appropriate changes for your system and try again.
+ """).strip()
+
+ def create_home_path(self):
+ """Create directories under ~."""
+ if not self.user:
+ return
+ home = convert_path(os.path.expanduser("~"))
+ for path in only_strs(self.config_vars.values()):
+ if path.startswith(home) and not os.path.isdir(path):
+ self.debug_print("os.makedirs('%s', 0o700)" % path)
+ os.makedirs(path, 0o700)
+
+ INSTALL_SCHEMES = dict(
+ posix=dict(
+ install_dir='$base/lib/python$py_version_short/site-packages',
+ script_dir='$base/bin',
+ ),
+ )
+
+ DEFAULT_SCHEME = dict(
+ install_dir='$base/Lib/site-packages',
+ script_dir='$base/Scripts',
+ )
+
+ def _expand(self, *attrs):
+ config_vars = self.get_finalized_command('install').config_vars
+
+ if self.prefix:
+ # Set default install_dir/scripts from --prefix
+ config_vars = dict(config_vars)
+ config_vars['base'] = self.prefix
+ scheme = self.INSTALL_SCHEMES.get(os.name, self.DEFAULT_SCHEME)
+ for attr, val in scheme.items():
+ if getattr(self, attr, None) is None:
+ setattr(self, attr, val)
+
+ from distutils.util import subst_vars
+
+ for attr in attrs:
+ val = getattr(self, attr)
+ if val is not None:
+ val = subst_vars(val, config_vars)
+ if os.name == 'posix':
+ val = os.path.expanduser(val)
+ setattr(self, attr, val)
+
+
+def _pythonpath():
+ items = os.environ.get('PYTHONPATH', '').split(os.pathsep)
+ return filter(None, items)
+
+
+def get_site_dirs():
+ """
+ Return a list of 'site' dirs
+ """
+
+ sitedirs = []
+
+ # start with PYTHONPATH
+ sitedirs.extend(_pythonpath())
+
+ prefixes = [sys.prefix]
+ if sys.exec_prefix != sys.prefix:
+ prefixes.append(sys.exec_prefix)
+ for prefix in prefixes:
+ if not prefix:
+ continue
+
+ if sys.platform in ('os2emx', 'riscos'):
+ sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+ elif os.sep == '/':
+ sitedirs.extend([
+ os.path.join(
+ prefix,
+ "lib",
+ "python{}.{}".format(*sys.version_info),
+ "site-packages",
+ ),
+ os.path.join(prefix, "lib", "site-python"),
+ ])
+ else:
+ sitedirs.extend([
+ prefix,
+ os.path.join(prefix, "lib", "site-packages"),
+ ])
+ if sys.platform != 'darwin':
+ continue
+
+ # for framework builds *only* we add the standard Apple
+ # locations. Currently only per-user, but /Library and
+ # /Network/Library could be added too
+ if 'Python.framework' not in prefix:
+ continue
+
+ home = os.environ.get('HOME')
+ if not home:
+ continue
+
+ home_sp = os.path.join(
+ home,
+ 'Library',
+ 'Python',
+ '{}.{}'.format(*sys.version_info),
+ 'site-packages',
+ )
+ sitedirs.append(home_sp)
+ lib_paths = get_path('purelib'), get_path('platlib')
+
+ sitedirs.extend(s for s in lib_paths if s not in sitedirs)
+
+ if site.ENABLE_USER_SITE:
+ sitedirs.append(site.USER_SITE)
+
+ with contextlib.suppress(AttributeError):
+ sitedirs.extend(site.getsitepackages())
+
+ sitedirs = list(map(normalize_path, sitedirs))
+
+ return sitedirs
+
+
+def expand_paths(inputs): # noqa: C901 # is too complex (11) # FIXME
+ """Yield sys.path directories that might contain "old-style" packages"""
+
+ seen = {}
+
+ for dirname in inputs:
+ dirname = normalize_path(dirname)
+ if dirname in seen:
+ continue
+
+ seen[dirname] = 1
+ if not os.path.isdir(dirname):
+ continue
+
+ files = os.listdir(dirname)
+ yield dirname, files
+
+ for name in files:
+ if not name.endswith('.pth'):
+ # We only care about the .pth files
+ continue
+ if name in ('easy-install.pth', 'setuptools.pth'):
+ # Ignore .pth files that we control
+ continue
+
+ # Read the .pth file
+ f = open(os.path.join(dirname, name))
+ lines = list(yield_lines(f))
+ f.close()
+
+ # Yield existing non-dupe, non-import directory lines from it
+ for line in lines:
+ if line.startswith("import"):
+ continue
+
+ line = normalize_path(line.rstrip())
+ if line in seen:
+ continue
+
+ seen[line] = 1
+ if not os.path.isdir(line):
+ continue
+
+ yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+ """Extract configuration data from a bdist_wininst .exe
+
+ Returns a configparser.RawConfigParser, or None
+ """
+ f = open(dist_filename, 'rb')
+ try:
+ endrec = zipfile._EndRecData(f)
+ if endrec is None:
+ return None
+
+ prepended = (endrec[9] - endrec[5]) - endrec[6]
+ if prepended < 12: # no wininst data here
+ return None
+ f.seek(prepended - 12)
+
+ tag, cfglen, bmlen = struct.unpack("<iii", f.read(12))
+ if tag not in (0x1234567A, 0x1234567B):
+ return None # not a valid tag
+
+ f.seek(prepended - (12 + cfglen))
+ init = {'version': '', 'target_version': ''}
+ cfg = configparser.RawConfigParser(init)
+ try:
+ part = f.read(cfglen)
+ # Read up to the first null byte.
+ config = part.split(b'\0', 1)[0]
+ # Now the config is in bytes, but for RawConfigParser, it should
+ # be text, so decode it.
+ config = config.decode(sys.getfilesystemencoding())
+ cfg.read_file(io.StringIO(config))
+ except configparser.Error:
+ return None
+ if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+ return None
+ return cfg
+
+ finally:
+ f.close()
+
+
+def get_exe_prefixes(exe_filename):
+ """Get exe->egg path translations for a given .exe file"""
+
+ prefixes = [
+ ('PURELIB/', ''),
+ ('PLATLIB/pywin32_system32', ''),
+ ('PLATLIB/', ''),
+ ('SCRIPTS/', 'EGG-INFO/scripts/'),
+ ('DATA/lib/site-packages', ''),
+ ]
+ z = zipfile.ZipFile(exe_filename)
+ try:
+ for info in z.infolist():
+ name = info.filename
+ parts = name.split('/')
+ if len(parts) == 3 and parts[2] == 'PKG-INFO':
+ if parts[1].endswith('.egg-info'):
+ prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/'))
+ break
+ if len(parts) != 2 or not name.endswith('.pth'):
+ continue
+ if name.endswith('-nspkg.pth'):
+ continue
+ if parts[0].upper() in ('PURELIB', 'PLATLIB'):
+ contents = z.read(name).decode()
+ for pth in yield_lines(contents):
+ pth = pth.strip().replace('\\', '/')
+ if not pth.startswith('import'):
+ prefixes.append((('%s/%s/' % (parts[0], pth)), ''))
+ finally:
+ z.close()
+ prefixes = [(x.lower(), y) for x, y in prefixes]
+ prefixes.sort()
+ prefixes.reverse()
+ return prefixes
+
+
+class PthDistributions(Environment):
+ """A .pth file with Distribution paths in it"""
+
+ dirty = False
+
+ def __init__(self, filename, sitedirs=()):
+ self.filename = filename
+ self.sitedirs = list(map(normalize_path, sitedirs))
+ self.basedir = normalize_path(os.path.dirname(self.filename))
+ self._load()
+ super().__init__([], None, None)
+ for path in yield_lines(self.paths):
+ list(map(self.add, find_distributions(path, True)))
+
+ def _load(self):
+ self.paths = []
+ saw_import = False
+ seen = dict.fromkeys(self.sitedirs)
+ if os.path.isfile(self.filename):
+ f = open(self.filename, 'rt')
+ for line in f:
+ if line.startswith('import'):
+ saw_import = True
+ continue
+ path = line.rstrip()
+ self.paths.append(path)
+ if not path.strip() or path.strip().startswith('#'):
+ continue
+ # skip non-existent paths, in case somebody deleted a package
+ # manually, and duplicate paths as well
+ path = self.paths[-1] = normalize_path(
+ os.path.join(self.basedir, path)
+ )
+ if not os.path.exists(path) or path in seen:
+ self.paths.pop() # skip it
+ self.dirty = True # we cleaned up, so we're dirty now :)
+ continue
+ seen[path] = 1
+ f.close()
+
+ if self.paths and not saw_import:
+ self.dirty = True # ensure anything we touch has import wrappers
+ while self.paths and not self.paths[-1].strip():
+ self.paths.pop()
+
+ def save(self):
+ """Write changed .pth file back to disk"""
+ if not self.dirty:
+ return
+
+ rel_paths = list(map(self.make_relative, self.paths))
+ if rel_paths:
+ log.debug("Saving %s", self.filename)
+ lines = self._wrap_lines(rel_paths)
+ data = '\n'.join(lines) + '\n'
+
+ if os.path.islink(self.filename):
+ os.unlink(self.filename)
+ with open(self.filename, 'wt') as f:
+ f.write(data)
+
+ elif os.path.exists(self.filename):
+ log.debug("Deleting empty %s", self.filename)
+ os.unlink(self.filename)
+
+ self.dirty = False
+
+ @staticmethod
+ def _wrap_lines(lines):
+ return lines
+
+ def add(self, dist):
+ """Add `dist` to the distribution map"""
+ new_path = (
+ dist.location not in self.paths and (
+ dist.location not in self.sitedirs or
+ # account for '.' being in PYTHONPATH
+ dist.location == os.getcwd()
+ )
+ )
+ if new_path:
+ self.paths.append(dist.location)
+ self.dirty = True
+ super().add(dist)
+
+ def remove(self, dist):
+ """Remove `dist` from the distribution map"""
+ while dist.location in self.paths:
+ self.paths.remove(dist.location)
+ self.dirty = True
+ super().remove(dist)
+
+ def make_relative(self, path):
+ npath, last = os.path.split(normalize_path(path))
+ baselen = len(self.basedir)
+ parts = [last]
+ sep = os.altsep == '/' and '/' or os.sep
+ while len(npath) >= baselen:
+ if npath == self.basedir:
+ parts.append(os.curdir)
+ parts.reverse()
+ return sep.join(parts)
+ npath, last = os.path.split(npath)
+ parts.append(last)
+ else:
+ return path
+
+
+class RewritePthDistributions(PthDistributions):
+ @classmethod
+ def _wrap_lines(cls, lines):
+ yield cls.prelude
+ for line in lines:
+ yield line
+ yield cls.postlude
+
+ prelude = _one_liner("""
+ import sys
+ sys.__plen = len(sys.path)
+ """)
+ postlude = _one_liner("""
+ import sys
+ new = sys.path[sys.__plen:]
+ del sys.path[sys.__plen:]
+ p = getattr(sys, '__egginsert', 0)
+ sys.path[p:p] = new
+ sys.__egginsert = p + len(new)
+ """)
+
+
+if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'raw') == 'rewrite':
+ PthDistributions = RewritePthDistributions
+
+
+def _first_line_re():
+ """
+ Return a regular expression based on first_line_re suitable for matching
+ strings.
+ """
+ if isinstance(first_line_re.pattern, str):
+ return first_line_re
+
+ # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern.
+ return re.compile(first_line_re.pattern.decode())
+
+
+def auto_chmod(func, arg, exc):
+ if func in [os.unlink, os.remove] and os.name == 'nt':
+ chmod(arg, stat.S_IWRITE)
+ return func(arg)
+ et, ev, _ = sys.exc_info()
+ # TODO: This code doesn't make sense. What is it trying to do?
+ raise (ev[0], ev[1] + (" %s %s" % (func, arg)))
+
+
+def update_dist_caches(dist_path, fix_zipimporter_caches):
+ """
+ Fix any globally cached `dist_path` related data
+
+ `dist_path` should be a path of a newly installed egg distribution (zipped
+ or unzipped).
+
+ sys.path_importer_cache contains finder objects that have been cached when
+ importing data from the original distribution. Any such finders need to be
+ cleared since the replacement distribution might be packaged differently,
+ e.g. a zipped egg distribution might get replaced with an unzipped egg
+ folder or vice versa. Having the old finders cached may then cause Python
+ to attempt loading modules from the replacement distribution using an
+ incorrect loader.
+
+ zipimport.zipimporter objects are Python loaders charged with importing
+ data packaged inside zip archives. If stale loaders referencing the
+ original distribution, are left behind, they can fail to load modules from
+ the replacement distribution. E.g. if an old zipimport.zipimporter instance
+ is used to load data from a new zipped egg archive, it may cause the
+ operation to attempt to locate the requested data in the wrong location -
+ one indicated by the original distribution's zip archive directory
+ information. Such an operation may then fail outright, e.g. report having
+ read a 'bad local file header', or even worse, it may fail silently &
+ return invalid data.
+
+ zipimport._zip_directory_cache contains cached zip archive directory
+ information for all existing zipimport.zipimporter instances and all such
+ instances connected to the same archive share the same cached directory
+ information.
+
+ If asked, and the underlying Python implementation allows it, we can fix
+ all existing zipimport.zipimporter instances instead of having to track
+ them down and remove them one by one, by updating their shared cached zip
+ archive directory information. This, of course, assumes that the
+ replacement distribution is packaged as a zipped egg.
+
+ If not asked to fix existing zipimport.zipimporter instances, we still do
+ our best to clear any remaining zipimport.zipimporter related cached data
+ that might somehow later get used when attempting to load data from the new
+ distribution and thus cause such load operations to fail. Note that when
+ tracking down such remaining stale data, we can not catch every conceivable
+ usage from here, and we clear only those that we know of and have found to
+ cause problems if left alive. Any remaining caches should be updated by
+ whomever is in charge of maintaining them, i.e. they should be ready to
+ handle us replacing their zip archives with new distributions at runtime.
+
+ """
+ # There are several other known sources of stale zipimport.zipimporter
+ # instances that we do not clear here, but might if ever given a reason to
+ # do so:
+ # * Global setuptools pkg_resources.working_set (a.k.a. 'master working
+ # set') may contain distributions which may in turn contain their
+ # zipimport.zipimporter loaders.
+ # * Several zipimport.zipimporter loaders held by local variables further
+ # up the function call stack when running the setuptools installation.
+ # * Already loaded modules may have their __loader__ attribute set to the
+ # exact loader instance used when importing them. Python 3.4 docs state
+ # that this information is intended mostly for introspection and so is
+ # not expected to cause us problems.
+ normalized_path = normalize_path(dist_path)
+ _uncache(normalized_path, sys.path_importer_cache)
+ if fix_zipimporter_caches:
+ _replace_zip_directory_cache_data(normalized_path)
+ else:
+ # Here, even though we do not want to fix existing and now stale
+ # zipimporter cache information, we still want to remove it. Related to
+ # Python's zip archive directory information cache, we clear each of
+ # its stale entries in two phases:
+ # 1. Clear the entry so attempting to access zip archive information
+ # via any existing stale zipimport.zipimporter instances fails.
+ # 2. Remove the entry from the cache so any newly constructed
+ # zipimport.zipimporter instances do not end up using old stale
+ # zip archive directory information.
+ # This whole stale data removal step does not seem strictly necessary,
+ # but has been left in because it was done before we started replacing
+ # the zip archive directory information cache content if possible, and
+ # there are no relevant unit tests that we can depend on to tell us if
+ # this is really needed.
+ _remove_and_clear_zip_directory_cache_data(normalized_path)
+
+
+def _collect_zipimporter_cache_entries(normalized_path, cache):
+ """
+ Return zipimporter cache entry keys related to a given normalized path.
+
+ Alternative path spellings (e.g. those using different character case or
+ those using alternative path separators) related to the same path are
+ included. Any sub-path entries are included as well, i.e. those
+ corresponding to zip archives embedded in other zip archives.
+
+ """
+ result = []
+ prefix_len = len(normalized_path)
+ for p in cache:
+ np = normalize_path(p)
+ if (np.startswith(normalized_path) and
+ np[prefix_len:prefix_len + 1] in (os.sep, '')):
+ result.append(p)
+ return result
+
+
+def _update_zipimporter_cache(normalized_path, cache, updater=None):
+ """
+ Update zipimporter cache data for a given normalized path.
+
+ Any sub-path entries are processed as well, i.e. those corresponding to zip
+ archives embedded in other zip archives.
+
+ Given updater is a callable taking a cache entry key and the original entry
+ (after already removing the entry from the cache), and expected to update
+ the entry and possibly return a new one to be inserted in its place.
+ Returning None indicates that the entry should not be replaced with a new
+ one. If no updater is given, the cache entries are simply removed without
+ any additional processing, the same as if the updater simply returned None.
+
+ """
+ for p in _collect_zipimporter_cache_entries(normalized_path, cache):
+ # N.B. pypy's custom zipimport._zip_directory_cache implementation does
+ # not support the complete dict interface:
+ # * Does not support item assignment, thus not allowing this function
+ # to be used only for removing existing cache entries.
+ # * Does not support the dict.pop() method, forcing us to use the
+ # get/del patterns instead. For more detailed information see the
+ # following links:
+ # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420
+ # http://bit.ly/2h9itJX
+ old_entry = cache[p]
+ del cache[p]
+ new_entry = updater and updater(p, old_entry)
+ if new_entry is not None:
+ cache[p] = new_entry
+
+
+def _uncache(normalized_path, cache):
+ _update_zipimporter_cache(normalized_path, cache)
+
+
+def _remove_and_clear_zip_directory_cache_data(normalized_path):
+ def clear_and_remove_cached_zip_archive_directory_data(path, old_entry):
+ old_entry.clear()
+
+ _update_zipimporter_cache(
+ normalized_path, zipimport._zip_directory_cache,
+ updater=clear_and_remove_cached_zip_archive_directory_data)
+
+
+# PyPy Python implementation does not allow directly writing to the
+# zipimport._zip_directory_cache and so prevents us from attempting to correct
+# its content. The best we can do there is clear the problematic cache content
+# and have PyPy repopulate it as needed. The downside is that if there are any
+# stale zipimport.zipimporter instances laying around, attempting to use them
+# will fail due to not having its zip archive directory information available
+# instead of being automatically corrected to use the new correct zip archive
+# directory information.
+if '__pypy__' in sys.builtin_module_names:
+ _replace_zip_directory_cache_data = \
+ _remove_and_clear_zip_directory_cache_data
+else:
+
+ def _replace_zip_directory_cache_data(normalized_path):
+ def replace_cached_zip_archive_directory_data(path, old_entry):
+ # N.B. In theory, we could load the zip directory information just
+ # once for all updated path spellings, and then copy it locally and
+ # update its contained path strings to contain the correct
+ # spelling, but that seems like a way too invasive move (this cache
+ # structure is not officially documented anywhere and could in
+ # theory change with new Python releases) for no significant
+ # benefit.
+ old_entry.clear()
+ zipimport.zipimporter(path)
+ old_entry.update(zipimport._zip_directory_cache[path])
+ return old_entry
+
+ _update_zipimporter_cache(
+ normalized_path, zipimport._zip_directory_cache,
+ updater=replace_cached_zip_archive_directory_data)
+
+
+def is_python(text, filename='<string>'):
+ "Is this string a valid Python script?"
+ try:
+ compile(text, filename, 'exec')
+ except (SyntaxError, TypeError):
+ return False
+ else:
+ return True
+
+
+def is_sh(executable):
+ """Determine if the specified executable is a .sh (contains a #! line)"""
+ try:
+ with io.open(executable, encoding='latin-1') as fp:
+ magic = fp.read(2)
+ except (OSError, IOError):
+ return executable
+ return magic == '#!'
+
+
+def nt_quote_arg(arg):
+ """Quote a command line argument according to Windows parsing rules"""
+ return subprocess.list2cmdline([arg])
+
+
+def is_python_script(script_text, filename):
+ """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc.
+ """
+ if filename.endswith('.py') or filename.endswith('.pyw'):
+ return True # extension says it's Python
+ if is_python(script_text, filename):
+ return True # it's syntactically valid Python
+ if script_text.startswith('#!'):
+ # It begins with a '#!' line, so check if 'python' is in it somewhere
+ return 'python' in script_text.splitlines()[0].lower()
+
+ return False # Not any Python I can recognize
+
+
+try:
+ from os import chmod as _chmod
+except ImportError:
+ # Jython compatibility
+ def _chmod(*args):
+ pass
+
+
+def chmod(path, mode):
+ log.debug("changing mode of %s to %o", path, mode)
+ try:
+ _chmod(path, mode)
+ except os.error as e:
+ log.debug("chmod failed: %s", e)
+
+
+class CommandSpec(list):
+ """
+ A command spec for a #! header, specified as a list of arguments akin to
+ those passed to Popen.
+ """
+
+ options = []
+ split_args = dict()
+
+ @classmethod
+ def best(cls):
+ """
+ Choose the best CommandSpec class based on environmental conditions.
+ """
+ return cls
+
+ @classmethod
+ def _sys_executable(cls):
+ _default = os.path.normpath(sys.executable)
+ return os.environ.get('__PYVENV_LAUNCHER__', _default)
+
+ @classmethod
+ def from_param(cls, param):
+ """
+ Construct a CommandSpec from a parameter to build_scripts, which may
+ be None.
+ """
+ if isinstance(param, cls):
+ return param
+ if isinstance(param, list):
+ return cls(param)
+ if param is None:
+ return cls.from_environment()
+ # otherwise, assume it's a string.
+ return cls.from_string(param)
+
+ @classmethod
+ def from_environment(cls):
+ return cls([cls._sys_executable()])
+
+ @classmethod
+ def from_string(cls, string):
+ """
+ Construct a command spec from a simple string representing a command
+ line parseable by shlex.split.
+ """
+ items = shlex.split(string, **cls.split_args)
+ return cls(items)
+
+ def install_options(self, script_text):
+ self.options = shlex.split(self._extract_options(script_text))
+ cmdline = subprocess.list2cmdline(self)
+ if not isascii(cmdline):
+ self.options[:0] = ['-x']
+
+ @staticmethod
+ def _extract_options(orig_script):
+ """
+ Extract any options from the first line of the script.
+ """
+ first = (orig_script + '\n').splitlines()[0]
+ match = _first_line_re().match(first)
+ options = match.group(1) or '' if match else ''
+ return options.strip()
+
+ def as_header(self):
+ return self._render(self + list(self.options))
+
+ @staticmethod
+ def _strip_quotes(item):
+ _QUOTES = '"\''
+ for q in _QUOTES:
+ if item.startswith(q) and item.endswith(q):
+ return item[1:-1]
+ return item
+
+ @staticmethod
+ def _render(items):
+ cmdline = subprocess.list2cmdline(
+ CommandSpec._strip_quotes(item.strip()) for item in items)
+ return '#!' + cmdline + '\n'
+
+
+# For pbr compat; will be removed in a future version.
+sys_executable = CommandSpec._sys_executable()
+
+
+class WindowsCommandSpec(CommandSpec):
+ split_args = dict(posix=False)
+
+
+class ScriptWriter:
+ """
+ Encapsulates behavior around writing entry point scripts for console and
+ gui apps.
+ """
+
+ template = textwrap.dedent(r"""
+ # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r
+ import re
+ import sys
+
+ # for compatibility with easy_install; see #2198
+ __requires__ = %(spec)r
+
+ try:
+ from importlib.metadata import distribution
+ except ImportError:
+ try:
+ from importlib_metadata import distribution
+ except ImportError:
+ from pkg_resources import load_entry_point
+
+
+ def importlib_load_entry_point(spec, group, name):
+ dist_name, _, _ = spec.partition('==')
+ matches = (
+ entry_point
+ for entry_point in distribution(dist_name).entry_points
+ if entry_point.group == group and entry_point.name == name
+ )
+ return next(matches).load()
+
+
+ globals().setdefault('load_entry_point', importlib_load_entry_point)
+
+
+ if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
+ sys.exit(load_entry_point(%(spec)r, %(group)r, %(name)r)())
+ """).lstrip()
+
+ command_spec_class = CommandSpec
+
+ @classmethod
+ def get_script_args(cls, dist, executable=None, wininst=False):
+ # for backward compatibility
+ warnings.warn("Use get_args", EasyInstallDeprecationWarning)
+ writer = (WindowsScriptWriter if wininst else ScriptWriter).best()
+ header = cls.get_script_header("", executable, wininst)
+ return writer.get_args(dist, header)
+
+ @classmethod
+ def get_script_header(cls, script_text, executable=None, wininst=False):
+ # for backward compatibility
+ warnings.warn(
+ "Use get_header", EasyInstallDeprecationWarning, stacklevel=2)
+ if wininst:
+ executable = "python.exe"
+ return cls.get_header(script_text, executable)
+
+ @classmethod
+ def get_args(cls, dist, header=None):
+ """
+ Yield write_script() argument tuples for a distribution's
+ console_scripts and gui_scripts entry points.
+ """
+ if header is None:
+ header = cls.get_header()
+ spec = str(dist.as_requirement())
+ for type_ in 'console', 'gui':
+ group = type_ + '_scripts'
+ for name, ep in dist.get_entry_map(group).items():
+ cls._ensure_safe_name(name)
+ script_text = cls.template % locals()
+ args = cls._get_script_args(type_, name, header, script_text)
+ for res in args:
+ yield res
+
+ @staticmethod
+ def _ensure_safe_name(name):
+ """
+ Prevent paths in *_scripts entry point names.
+ """
+ has_path_sep = re.search(r'[\\/]', name)
+ if has_path_sep:
+ raise ValueError("Path separators not allowed in script names")
+
+ @classmethod
+ def get_writer(cls, force_windows):
+ # for backward compatibility
+ warnings.warn("Use best", EasyInstallDeprecationWarning)
+ return WindowsScriptWriter.best() if force_windows else cls.best()
+
+ @classmethod
+ def best(cls):
+ """
+ Select the best ScriptWriter for this environment.
+ """
+ if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'):
+ return WindowsScriptWriter.best()
+ else:
+ return cls
+
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ # Simply write the stub with no extension.
+ yield (name, header + script_text)
+
+ @classmethod
+ def get_header(cls, script_text="", executable=None):
+ """Create a #! line, getting options (if any) from script_text"""
+ cmd = cls.command_spec_class.best().from_param(executable)
+ cmd.install_options(script_text)
+ return cmd.as_header()
+
+
+class WindowsScriptWriter(ScriptWriter):
+ command_spec_class = WindowsCommandSpec
+
+ @classmethod
+ def get_writer(cls):
+ # for backward compatibility
+ warnings.warn("Use best", EasyInstallDeprecationWarning)
+ return cls.best()
+
+ @classmethod
+ def best(cls):
+ """
+ Select the best ScriptWriter suitable for Windows
+ """
+ writer_lookup = dict(
+ executable=WindowsExecutableLauncherWriter,
+ natural=cls,
+ )
+ # for compatibility, use the executable launcher by default
+ launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable')
+ return writer_lookup[launcher]
+
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ "For Windows, add a .py extension"
+ ext = dict(console='.pya', gui='.pyw')[type_]
+ if ext not in os.environ['PATHEXT'].lower().split(';'):
+ msg = (
+ "{ext} not listed in PATHEXT; scripts will not be "
+ "recognized as executables."
+ ).format(**locals())
+ warnings.warn(msg, UserWarning)
+ old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe']
+ old.remove(ext)
+ header = cls._adjust_header(type_, header)
+ blockers = [name + x for x in old]
+ yield name + ext, header + script_text, 't', blockers
+
+ @classmethod
+ def _adjust_header(cls, type_, orig_header):
+ """
+ Make sure 'pythonw' is used for gui and 'python' is used for
+ console (regardless of what sys.executable is).
+ """
+ pattern = 'pythonw.exe'
+ repl = 'python.exe'
+ if type_ == 'gui':
+ pattern, repl = repl, pattern
+ pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE)
+ new_header = pattern_ob.sub(string=orig_header, repl=repl)
+ return new_header if cls._use_header(new_header) else orig_header
+
+ @staticmethod
+ def _use_header(new_header):
+ """
+ Should _adjust_header use the replaced header?
+
+ On non-windows systems, always use. On
+ Windows systems, only use the replaced header if it resolves
+ to an executable on the system.
+ """
+ clean_header = new_header[2:-1].strip('"')
+ return sys.platform != 'win32' or find_executable(clean_header)
+
+
+class WindowsExecutableLauncherWriter(WindowsScriptWriter):
+ @classmethod
+ def _get_script_args(cls, type_, name, header, script_text):
+ """
+ For Windows, add a .py extension and an .exe launcher
+ """
+ if type_ == 'gui':
+ launcher_type = 'gui'
+ ext = '-script.pyw'
+ old = ['.pyw']
+ else:
+ launcher_type = 'cli'
+ ext = '-script.py'
+ old = ['.py', '.pyc', '.pyo']
+ hdr = cls._adjust_header(type_, header)
+ blockers = [name + x for x in old]
+ yield (name + ext, hdr + script_text, 't', blockers)
+ yield (
+ name + '.exe', get_win_launcher(launcher_type),
+ 'b' # write in binary mode
+ )
+ if not is_64bit():
+ # install a manifest for the launcher to prevent Windows
+ # from detecting it as an installer (which it will for
+ # launchers like easy_install.exe). Consider only
+ # adding a manifest for launchers detected as installers.
+ # See Distribute #143 for details.
+ m_name = name + '.exe.manifest'
+ yield (m_name, load_launcher_manifest(name), 't')
+
+
+# for backward-compatibility
+get_script_args = ScriptWriter.get_script_args
+get_script_header = ScriptWriter.get_script_header
+
+
+def get_win_launcher(type):
+ """
+ Load the Windows launcher (executable) suitable for launching a script.
+
+ `type` should be either 'cli' or 'gui'
+
+ Returns the executable as a byte string.
+ """
+ launcher_fn = '%s.exe' % type
+ if is_64bit():
+ if get_platform() == "win-arm64":
+ launcher_fn = launcher_fn.replace(".", "-arm64.")
+ else:
+ launcher_fn = launcher_fn.replace(".", "-64.")
+ else:
+ launcher_fn = launcher_fn.replace(".", "-32.")
+ return resource_string('setuptools', launcher_fn)
+
+
+def load_launcher_manifest(name):
+ manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml')
+ return manifest.decode('utf-8') % vars()
+
+
+def rmtree(path, ignore_errors=False, onerror=auto_chmod):
+ return shutil.rmtree(path, ignore_errors, onerror)
+
+
+def current_umask():
+ tmp = os.umask(0o022)
+ os.umask(tmp)
+ return tmp
+
+
+def only_strs(values):
+ """
+ Exclude non-str values. Ref #3063.
+ """
+ return filter(lambda val: isinstance(val, str), values)
+
+
+class EasyInstallDeprecationWarning(SetuptoolsDeprecationWarning):
+ """
+ Warning for EasyInstall deprecations, bypassing suppression.
+ """
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/editable_wheel.py b/venv/lib/python3.11/site-packages/setuptools/command/editable_wheel.py
new file mode 100644
index 0000000..d60cfbe
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/editable_wheel.py
@@ -0,0 +1,844 @@
+"""
+Create a wheel that, when installed, will make the source package 'editable'
+(add it to the interpreter's path, including metadata) per PEP 660. Replaces
+'setup.py develop'.
+
+.. note::
+ One of the mechanisms briefly mentioned in PEP 660 to implement editable installs is
+ to create a separated directory inside ``build`` and use a .pth file to point to that
+ directory. In the context of this file such directory is referred as
+ *auxiliary build directory* or ``auxiliary_dir``.
+"""
+
+import logging
+import os
+import re
+import shutil
+import sys
+import traceback
+import warnings
+from contextlib import suppress
+from enum import Enum
+from inspect import cleandoc
+from itertools import chain
+from pathlib import Path
+from tempfile import TemporaryDirectory
+from typing import (
+ TYPE_CHECKING,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Mapping,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+from setuptools import Command, SetuptoolsDeprecationWarning, errors, namespaces
+from setuptools.command.build_py import build_py as build_py_cls
+from setuptools.discovery import find_package_path
+from setuptools.dist import Distribution
+
+if TYPE_CHECKING:
+ from wheel.wheelfile import WheelFile # noqa
+
+if sys.version_info >= (3, 8):
+ from typing import Protocol
+elif TYPE_CHECKING:
+ from typing_extensions import Protocol
+else:
+ from abc import ABC as Protocol
+
+_Path = Union[str, Path]
+_P = TypeVar("_P", bound=_Path)
+_logger = logging.getLogger(__name__)
+
+
+class _EditableMode(Enum):
+ """
+ Possible editable installation modes:
+ `lenient` (new files automatically added to the package - DEFAULT);
+ `strict` (requires a new installation when files are added/removed); or
+ `compat` (attempts to emulate `python setup.py develop` - DEPRECATED).
+ """
+
+ STRICT = "strict"
+ LENIENT = "lenient"
+ COMPAT = "compat" # TODO: Remove `compat` after Dec/2022.
+
+ @classmethod
+ def convert(cls, mode: Optional[str]) -> "_EditableMode":
+ if not mode:
+ return _EditableMode.LENIENT # default
+
+ _mode = mode.upper()
+ if _mode not in _EditableMode.__members__:
+ raise errors.OptionError(f"Invalid editable mode: {mode!r}. Try: 'strict'.")
+
+ if _mode == "COMPAT":
+ msg = """
+ The 'compat' editable mode is transitional and will be removed
+ in future versions of `setuptools`.
+ Please adapt your code accordingly to use either the 'strict' or the
+ 'lenient' modes.
+
+ For more information, please check:
+ https://setuptools.pypa.io/en/latest/userguide/development_mode.html
+ """
+ warnings.warn(msg, SetuptoolsDeprecationWarning)
+
+ return _EditableMode[_mode]
+
+
+_STRICT_WARNING = """
+New or renamed files may not be automatically picked up without a new installation.
+"""
+
+_LENIENT_WARNING = """
+Options like `package-data`, `include/exclude-package-data` or
+`packages.find.exclude/include` may have no effect.
+"""
+
+
+class editable_wheel(Command):
+ """Build 'editable' wheel for development.
+ (This command is reserved for internal use of setuptools).
+ """
+
+ description = "create a PEP 660 'editable' wheel"
+
+ user_options = [
+ ("dist-dir=", "d", "directory to put final built distributions in"),
+ ("dist-info-dir=", "I", "path to a pre-build .dist-info directory"),
+ ("mode=", None, cleandoc(_EditableMode.__doc__ or "")),
+ ]
+
+ def initialize_options(self):
+ self.dist_dir = None
+ self.dist_info_dir = None
+ self.project_dir = None
+ self.mode = None
+
+ def finalize_options(self):
+ dist = self.distribution
+ self.project_dir = dist.src_root or os.curdir
+ self.package_dir = dist.package_dir or {}
+ self.dist_dir = Path(self.dist_dir or os.path.join(self.project_dir, "dist"))
+
+ def run(self):
+ try:
+ self.dist_dir.mkdir(exist_ok=True)
+ self._ensure_dist_info()
+
+ # Add missing dist_info files
+ self.reinitialize_command("bdist_wheel")
+ bdist_wheel = self.get_finalized_command("bdist_wheel")
+ bdist_wheel.write_wheelfile(self.dist_info_dir)
+
+ self._create_wheel_file(bdist_wheel)
+ except Exception as ex:
+ traceback.print_exc()
+ msg = """
+ Support for editable installs via PEP 660 was recently introduced
+ in `setuptools`. If you are seeing this error, please report to:
+
+ https://github.com/pypa/setuptools/issues
+
+ Meanwhile you can try the legacy behavior by setting an
+ environment variable and trying to install again:
+
+ SETUPTOOLS_ENABLE_FEATURES="legacy-editable"
+ """
+ raise errors.InternalError(cleandoc(msg)) from ex
+
+ def _ensure_dist_info(self):
+ if self.dist_info_dir is None:
+ dist_info = self.reinitialize_command("dist_info")
+ dist_info.output_dir = self.dist_dir
+ dist_info.ensure_finalized()
+ dist_info.run()
+ self.dist_info_dir = dist_info.dist_info_dir
+ else:
+ assert str(self.dist_info_dir).endswith(".dist-info")
+ assert Path(self.dist_info_dir, "METADATA").exists()
+
+ def _install_namespaces(self, installation_dir, pth_prefix):
+ # XXX: Only required to support the deprecated namespace practice
+ dist = self.distribution
+ if not dist.namespace_packages:
+ return
+
+ src_root = Path(self.project_dir, self.package_dir.get("", ".")).resolve()
+ installer = _NamespaceInstaller(dist, installation_dir, pth_prefix, src_root)
+ installer.install_namespaces()
+
+ def _find_egg_info_dir(self) -> Optional[str]:
+ parent_dir = Path(self.dist_info_dir).parent if self.dist_info_dir else Path()
+ candidates = map(str, parent_dir.glob("*.egg-info"))
+ return next(candidates, None)
+
+ def _configure_build(
+ self, name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path
+ ):
+ """Configure commands to behave in the following ways:
+
+ - Build commands can write to ``build_lib`` if they really want to...
+ (but this folder is expected to be ignored and modules are expected to live
+ in the project directory...)
+ - Binary extensions should be built in-place (editable_mode = True)
+ - Data/header/script files are not part of the "editable" specification
+ so they are written directly to the unpacked_wheel directory.
+ """
+ # Non-editable files (data, headers, scripts) are written directly to the
+ # unpacked_wheel
+
+ dist = self.distribution
+ wheel = str(unpacked_wheel)
+ build_lib = str(build_lib)
+ data = str(Path(unpacked_wheel, f"{name}.data", "data"))
+ headers = str(Path(unpacked_wheel, f"{name}.data", "headers"))
+ scripts = str(Path(unpacked_wheel, f"{name}.data", "scripts"))
+
+ # egg-info may be generated again to create a manifest (used for package data)
+ egg_info = dist.reinitialize_command("egg_info", reinit_subcommands=True)
+ egg_info.egg_base = str(tmp_dir)
+ egg_info.ignore_egg_info_in_manifest = True
+
+ build = dist.reinitialize_command("build", reinit_subcommands=True)
+ install = dist.reinitialize_command("install", reinit_subcommands=True)
+
+ build.build_platlib = build.build_purelib = build.build_lib = build_lib
+ install.install_purelib = install.install_platlib = install.install_lib = wheel
+ install.install_scripts = build.build_scripts = scripts
+ install.install_headers = headers
+ install.install_data = data
+
+ install_scripts = dist.get_command_obj("install_scripts")
+ install_scripts.no_ep = True
+
+ build.build_temp = str(tmp_dir)
+
+ build_py = dist.get_command_obj("build_py")
+ build_py.compile = False
+ build_py.existing_egg_info_dir = self._find_egg_info_dir()
+
+ self._set_editable_mode()
+
+ build.ensure_finalized()
+ install.ensure_finalized()
+
+ def _set_editable_mode(self):
+ """Set the ``editable_mode`` flag in the build sub-commands"""
+ dist = self.distribution
+ build = dist.get_command_obj("build")
+ for cmd_name in build.get_sub_commands():
+ cmd = dist.get_command_obj(cmd_name)
+ if hasattr(cmd, "editable_mode"):
+ cmd.editable_mode = True
+ elif hasattr(cmd, "inplace"):
+ cmd.inplace = True # backward compatibility with distutils
+
+ def _collect_build_outputs(self) -> Tuple[List[str], Dict[str, str]]:
+ files: List[str] = []
+ mapping: Dict[str, str] = {}
+ build = self.get_finalized_command("build")
+
+ for cmd_name in build.get_sub_commands():
+ cmd = self.get_finalized_command(cmd_name)
+ if hasattr(cmd, "get_outputs"):
+ files.extend(cmd.get_outputs() or [])
+ if hasattr(cmd, "get_output_mapping"):
+ mapping.update(cmd.get_output_mapping() or {})
+
+ return files, mapping
+
+ def _run_build_commands(
+ self, dist_name: str, unpacked_wheel: _Path, build_lib: _Path, tmp_dir: _Path
+ ) -> Tuple[List[str], Dict[str, str]]:
+ self._configure_build(dist_name, unpacked_wheel, build_lib, tmp_dir)
+ self._run_build_subcommands()
+ files, mapping = self._collect_build_outputs()
+ self._run_install("headers")
+ self._run_install("scripts")
+ self._run_install("data")
+ return files, mapping
+
+ def _run_build_subcommands(self):
+ """
+ Issue #3501 indicates that some plugins/customizations might rely on:
+
+ 1. ``build_py`` not running
+ 2. ``build_py`` always copying files to ``build_lib``
+
+ However both these assumptions may be false in editable_wheel.
+ This method implements a temporary workaround to support the ecosystem
+ while the implementations catch up.
+ """
+ # TODO: Once plugins/customisations had the chance to catch up, replace
+ # `self._run_build_subcommands()` with `self.run_command("build")`.
+ # Also remove _safely_run, TestCustomBuildPy. Suggested date: Aug/2023.
+ build: Command = self.get_finalized_command("build")
+ for name in build.get_sub_commands():
+ cmd = self.get_finalized_command(name)
+ if name == "build_py" and type(cmd) != build_py_cls:
+ self._safely_run(name)
+ else:
+ self.run_command(name)
+
+ def _safely_run(self, cmd_name: str):
+ try:
+ return self.run_command(cmd_name)
+ except Exception:
+ msg = f"""{traceback.format_exc()}\n
+ If you are seeing this warning it is very likely that a setuptools
+ plugin or customization overrides the `{cmd_name}` command, without
+ taking into consideration how editable installs run build steps
+ starting from v64.0.0.
+
+ Plugin authors and developers relying on custom build steps are encouraged
+ to update their `{cmd_name}` implementation considering the information in
+ https://setuptools.pypa.io/en/latest/userguide/extension.html
+ about editable installs.
+
+ For the time being `setuptools` will silence this error and ignore
+ the faulty command, but this behaviour will change in future versions.\n
+ """
+ warnings.warn(msg, SetuptoolsDeprecationWarning, stacklevel=2)
+
+ def _create_wheel_file(self, bdist_wheel):
+ from wheel.wheelfile import WheelFile
+
+ dist_info = self.get_finalized_command("dist_info")
+ dist_name = dist_info.name
+ tag = "-".join(bdist_wheel.get_tag())
+ build_tag = "0.editable" # According to PEP 427 needs to start with digit
+ archive_name = f"{dist_name}-{build_tag}-{tag}.whl"
+ wheel_path = Path(self.dist_dir, archive_name)
+ if wheel_path.exists():
+ wheel_path.unlink()
+
+ unpacked_wheel = TemporaryDirectory(suffix=archive_name)
+ build_lib = TemporaryDirectory(suffix=".build-lib")
+ build_tmp = TemporaryDirectory(suffix=".build-temp")
+
+ with unpacked_wheel as unpacked, build_lib as lib, build_tmp as tmp:
+ unpacked_dist_info = Path(unpacked, Path(self.dist_info_dir).name)
+ shutil.copytree(self.dist_info_dir, unpacked_dist_info)
+ self._install_namespaces(unpacked, dist_info.name)
+ files, mapping = self._run_build_commands(dist_name, unpacked, lib, tmp)
+ strategy = self._select_strategy(dist_name, tag, lib)
+ with strategy, WheelFile(wheel_path, "w") as wheel_obj:
+ strategy(wheel_obj, files, mapping)
+ wheel_obj.write_files(unpacked)
+
+ return wheel_path
+
+ def _run_install(self, category: str):
+ has_category = getattr(self.distribution, f"has_{category}", None)
+ if has_category and has_category():
+ _logger.info(f"Installing {category} as non editable")
+ self.run_command(f"install_{category}")
+
+ def _select_strategy(
+ self,
+ name: str,
+ tag: str,
+ build_lib: _Path,
+ ) -> "EditableStrategy":
+ """Decides which strategy to use to implement an editable installation."""
+ build_name = f"__editable__.{name}-{tag}"
+ project_dir = Path(self.project_dir)
+ mode = _EditableMode.convert(self.mode)
+
+ if mode is _EditableMode.STRICT:
+ auxiliary_dir = _empty_dir(Path(self.project_dir, "build", build_name))
+ return _LinkTree(self.distribution, name, auxiliary_dir, build_lib)
+
+ packages = _find_packages(self.distribution)
+ has_simple_layout = _simple_layout(packages, self.package_dir, project_dir)
+ is_compat_mode = mode is _EditableMode.COMPAT
+ if set(self.package_dir) == {""} and has_simple_layout or is_compat_mode:
+ # src-layout(ish) is relatively safe for a simple pth file
+ src_dir = self.package_dir.get("", ".")
+ return _StaticPth(self.distribution, name, [Path(project_dir, src_dir)])
+
+ # Use a MetaPathFinder to avoid adding accidental top-level packages/modules
+ return _TopLevelFinder(self.distribution, name)
+
+
+class EditableStrategy(Protocol):
+ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ ...
+
+ def __enter__(self):
+ ...
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ ...
+
+
+class _StaticPth:
+ def __init__(self, dist: Distribution, name: str, path_entries: List[Path]):
+ self.dist = dist
+ self.name = name
+ self.path_entries = path_entries
+
+ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ entries = "\n".join((str(p.resolve()) for p in self.path_entries))
+ contents = bytes(f"{entries}\n", "utf-8")
+ wheel.writestr(f"__editable__.{self.name}.pth", contents)
+
+ def __enter__(self):
+ msg = f"""
+ Editable install will be performed using .pth file to extend `sys.path` with:
+ {list(map(os.fspath, self.path_entries))!r}
+ """
+ _logger.warning(msg + _LENIENT_WARNING)
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ ...
+
+
+class _LinkTree(_StaticPth):
+ """
+ Creates a ``.pth`` file that points to a link tree in the ``auxiliary_dir``.
+
+ This strategy will only link files (not dirs), so it can be implemented in
+ any OS, even if that means using hardlinks instead of symlinks.
+
+ By collocating ``auxiliary_dir`` and the original source code, limitations
+ with hardlinks should be avoided.
+ """
+ def __init__(
+ self, dist: Distribution,
+ name: str,
+ auxiliary_dir: _Path,
+ build_lib: _Path,
+ ):
+ self.auxiliary_dir = Path(auxiliary_dir)
+ self.build_lib = Path(build_lib).resolve()
+ self._file = dist.get_command_obj("build_py").copy_file
+ super().__init__(dist, name, [self.auxiliary_dir])
+
+ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ self._create_links(files, mapping)
+ super().__call__(wheel, files, mapping)
+
+ def _normalize_output(self, file: str) -> Optional[str]:
+ # Files relative to build_lib will be normalized to None
+ with suppress(ValueError):
+ path = Path(file).resolve().relative_to(self.build_lib)
+ return str(path).replace(os.sep, '/')
+ return None
+
+ def _create_file(self, relative_output: str, src_file: str, link=None):
+ dest = self.auxiliary_dir / relative_output
+ if not dest.parent.is_dir():
+ dest.parent.mkdir(parents=True)
+ self._file(src_file, dest, link=link)
+
+ def _create_links(self, outputs, output_mapping):
+ self.auxiliary_dir.mkdir(parents=True, exist_ok=True)
+ link_type = "sym" if _can_symlink_files(self.auxiliary_dir) else "hard"
+ mappings = {
+ self._normalize_output(k): v
+ for k, v in output_mapping.items()
+ }
+ mappings.pop(None, None) # remove files that are not relative to build_lib
+
+ for output in outputs:
+ relative = self._normalize_output(output)
+ if relative and relative not in mappings:
+ self._create_file(relative, output)
+
+ for relative, src in mappings.items():
+ self._create_file(relative, src, link=link_type)
+
+ def __enter__(self):
+ msg = "Strict editable install will be performed using a link tree.\n"
+ _logger.warning(msg + _STRICT_WARNING)
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ msg = f"""\n
+ Strict editable installation performed using the auxiliary directory:
+ {self.auxiliary_dir}
+
+ Please be careful to not remove this directory, otherwise you might not be able
+ to import/use your package.
+ """
+ warnings.warn(msg, InformationOnly)
+
+
+class _TopLevelFinder:
+ def __init__(self, dist: Distribution, name: str):
+ self.dist = dist
+ self.name = name
+
+ def __call__(self, wheel: "WheelFile", files: List[str], mapping: Dict[str, str]):
+ src_root = self.dist.src_root or os.curdir
+ top_level = chain(_find_packages(self.dist), _find_top_level_modules(self.dist))
+ package_dir = self.dist.package_dir or {}
+ roots = _find_package_roots(top_level, package_dir, src_root)
+
+ namespaces_: Dict[str, List[str]] = dict(chain(
+ _find_namespaces(self.dist.packages or [], roots),
+ ((ns, []) for ns in _find_virtual_namespaces(roots)),
+ ))
+
+ name = f"__editable__.{self.name}.finder"
+ finder = _make_identifier(name)
+ content = bytes(_finder_template(name, roots, namespaces_), "utf-8")
+ wheel.writestr(f"{finder}.py", content)
+
+ content = bytes(f"import {finder}; {finder}.install()", "utf-8")
+ wheel.writestr(f"__editable__.{self.name}.pth", content)
+
+ def __enter__(self):
+ msg = "Editable install will be performed using a meta path finder.\n"
+ _logger.warning(msg + _LENIENT_WARNING)
+ return self
+
+ def __exit__(self, _exc_type, _exc_value, _traceback):
+ msg = """\n
+ Please be careful with folders in your working directory with the same
+ name as your package as they may take precedence during imports.
+ """
+ warnings.warn(msg, InformationOnly)
+
+
+def _can_symlink_files(base_dir: Path) -> bool:
+ with TemporaryDirectory(dir=str(base_dir.resolve())) as tmp:
+ path1, path2 = Path(tmp, "file1.txt"), Path(tmp, "file2.txt")
+ path1.write_text("file1", encoding="utf-8")
+ with suppress(AttributeError, NotImplementedError, OSError):
+ os.symlink(path1, path2)
+ if path2.is_symlink() and path2.read_text(encoding="utf-8") == "file1":
+ return True
+
+ try:
+ os.link(path1, path2) # Ensure hard links can be created
+ except Exception as ex:
+ msg = (
+ "File system does not seem to support either symlinks or hard links. "
+ "Strict editable installs require one of them to be supported."
+ )
+ raise LinksNotSupported(msg) from ex
+ return False
+
+
+def _simple_layout(
+ packages: Iterable[str], package_dir: Dict[str, str], project_dir: Path
+) -> bool:
+ """Return ``True`` if:
+ - all packages are contained by the same parent directory, **and**
+ - all packages become importable if the parent directory is added to ``sys.path``.
+
+ >>> _simple_layout(['a'], {"": "src"}, "/tmp/myproj")
+ True
+ >>> _simple_layout(['a', 'a.b'], {"": "src"}, "/tmp/myproj")
+ True
+ >>> _simple_layout(['a', 'a.b'], {}, "/tmp/myproj")
+ True
+ >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"": "src"}, "/tmp/myproj")
+ True
+ >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "a", "b": "b"}, ".")
+ True
+ >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a", "b": "_b"}, ".")
+ False
+ >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a": "_a"}, "/tmp/myproj")
+ False
+ >>> _simple_layout(['a', 'a.a1', 'a.a1.a2', 'b'], {"a.a1.a2": "_a2"}, ".")
+ False
+ >>> _simple_layout(['a', 'a.b'], {"": "src", "a.b": "_ab"}, "/tmp/myproj")
+ False
+ >>> # Special cases, no packages yet:
+ >>> _simple_layout([], {"": "src"}, "/tmp/myproj")
+ True
+ >>> _simple_layout([], {"a": "_a", "": "src"}, "/tmp/myproj")
+ False
+ """
+ layout = {
+ pkg: find_package_path(pkg, package_dir, project_dir)
+ for pkg in packages
+ }
+ if not layout:
+ return set(package_dir) in ({}, {""})
+ parent = os.path.commonpath([_parent_path(k, v) for k, v in layout.items()])
+ return all(
+ _normalize_path(Path(parent, *key.split('.'))) == _normalize_path(value)
+ for key, value in layout.items()
+ )
+
+
+def _parent_path(pkg, pkg_path):
+ """Infer the parent path containing a package, that if added to ``sys.path`` would
+ allow importing that package.
+ When ``pkg`` is directly mapped into a directory with a different name, return its
+ own path.
+ >>> _parent_path("a", "src/a")
+ 'src'
+ >>> _parent_path("b", "src/c")
+ 'src/c'
+ """
+ parent = pkg_path[:-len(pkg)] if pkg_path.endswith(pkg) else pkg_path
+ return parent.rstrip("/" + os.sep)
+
+
+def _find_packages(dist: Distribution) -> Iterator[str]:
+ yield from iter(dist.packages or [])
+
+ py_modules = dist.py_modules or []
+ nested_modules = [mod for mod in py_modules if "." in mod]
+ if dist.ext_package:
+ yield dist.ext_package
+ else:
+ ext_modules = dist.ext_modules or []
+ nested_modules += [x.name for x in ext_modules if "." in x.name]
+
+ for module in nested_modules:
+ package, _, _ = module.rpartition(".")
+ yield package
+
+
+def _find_top_level_modules(dist: Distribution) -> Iterator[str]:
+ py_modules = dist.py_modules or []
+ yield from (mod for mod in py_modules if "." not in mod)
+
+ if not dist.ext_package:
+ ext_modules = dist.ext_modules or []
+ yield from (x.name for x in ext_modules if "." not in x.name)
+
+
+def _find_package_roots(
+ packages: Iterable[str],
+ package_dir: Mapping[str, str],
+ src_root: _Path,
+) -> Dict[str, str]:
+ pkg_roots: Dict[str, str] = {
+ pkg: _absolute_root(find_package_path(pkg, package_dir, src_root))
+ for pkg in sorted(packages)
+ }
+
+ return _remove_nested(pkg_roots)
+
+
+def _absolute_root(path: _Path) -> str:
+ """Works for packages and top-level modules"""
+ path_ = Path(path)
+ parent = path_.parent
+
+ if path_.exists():
+ return str(path_.resolve())
+ else:
+ return str(parent.resolve() / path_.name)
+
+
+def _find_virtual_namespaces(pkg_roots: Dict[str, str]) -> Iterator[str]:
+ """By carefully designing ``package_dir``, it is possible to implement the logical
+ structure of PEP 420 in a package without the corresponding directories.
+
+ Moreover a parent package can be purposefully/accidentally skipped in the discovery
+ phase (e.g. ``find_packages(include=["mypkg.*"])``, when ``mypkg.foo`` is included
+ by ``mypkg`` itself is not).
+ We consider this case to also be a virtual namespace (ignoring the original
+ directory) to emulate a non-editable installation.
+
+ This function will try to find these kinds of namespaces.
+ """
+ for pkg in pkg_roots:
+ if "." not in pkg:
+ continue
+ parts = pkg.split(".")
+ for i in range(len(parts) - 1, 0, -1):
+ partial_name = ".".join(parts[:i])
+ path = Path(find_package_path(partial_name, pkg_roots, ""))
+ if not path.exists() or partial_name not in pkg_roots:
+ # partial_name not in pkg_roots ==> purposefully/accidentally skipped
+ yield partial_name
+
+
+def _find_namespaces(
+ packages: List[str], pkg_roots: Dict[str, str]
+) -> Iterator[Tuple[str, List[str]]]:
+ for pkg in packages:
+ path = find_package_path(pkg, pkg_roots, "")
+ if Path(path).exists() and not Path(path, "__init__.py").exists():
+ yield (pkg, [path])
+
+
+def _remove_nested(pkg_roots: Dict[str, str]) -> Dict[str, str]:
+ output = dict(pkg_roots.copy())
+
+ for pkg, path in reversed(list(pkg_roots.items())):
+ if any(
+ pkg != other and _is_nested(pkg, path, other, other_path)
+ for other, other_path in pkg_roots.items()
+ ):
+ output.pop(pkg)
+
+ return output
+
+
+def _is_nested(pkg: str, pkg_path: str, parent: str, parent_path: str) -> bool:
+ """
+ Return ``True`` if ``pkg`` is nested inside ``parent`` both logically and in the
+ file system.
+ >>> _is_nested("a.b", "path/a/b", "a", "path/a")
+ True
+ >>> _is_nested("a.b", "path/a/b", "a", "otherpath/a")
+ False
+ >>> _is_nested("a.b", "path/a/b", "c", "path/c")
+ False
+ >>> _is_nested("a.a", "path/a/a", "a", "path/a")
+ True
+ >>> _is_nested("b.a", "path/b/a", "a", "path/a")
+ False
+ """
+ norm_pkg_path = _normalize_path(pkg_path)
+ rest = pkg.replace(parent, "", 1).strip(".").split(".")
+ return (
+ pkg.startswith(parent)
+ and norm_pkg_path == _normalize_path(Path(parent_path, *rest))
+ )
+
+
+def _normalize_path(filename: _Path) -> str:
+ """Normalize a file/dir name for comparison purposes"""
+ # See pkg_resources.normalize_path
+ file = os.path.abspath(filename) if sys.platform == 'cygwin' else filename
+ return os.path.normcase(os.path.realpath(os.path.normpath(file)))
+
+
+def _empty_dir(dir_: _P) -> _P:
+ """Create a directory ensured to be empty. Existing files may be removed."""
+ shutil.rmtree(dir_, ignore_errors=True)
+ os.makedirs(dir_)
+ return dir_
+
+
+def _make_identifier(name: str) -> str:
+ """Make a string safe to be used as Python identifier.
+ >>> _make_identifier("12abc")
+ '_12abc'
+ >>> _make_identifier("__editable__.myns.pkg-78.9.3_local")
+ '__editable___myns_pkg_78_9_3_local'
+ """
+ safe = re.sub(r'\W|^(?=\d)', '_', name)
+ assert safe.isidentifier()
+ return safe
+
+
+class _NamespaceInstaller(namespaces.Installer):
+ def __init__(self, distribution, installation_dir, editable_name, src_root):
+ self.distribution = distribution
+ self.src_root = src_root
+ self.installation_dir = installation_dir
+ self.editable_name = editable_name
+ self.outputs = []
+ self.dry_run = False
+
+ def _get_target(self):
+ """Installation target."""
+ return os.path.join(self.installation_dir, self.editable_name)
+
+ def _get_root(self):
+ """Where the modules/packages should be loaded from."""
+ return repr(str(self.src_root))
+
+
+_FINDER_TEMPLATE = """\
+import sys
+from importlib.machinery import ModuleSpec
+from importlib.machinery import all_suffixes as module_suffixes
+from importlib.util import spec_from_file_location
+from itertools import chain
+from pathlib import Path
+
+MAPPING = {mapping!r}
+NAMESPACES = {namespaces!r}
+PATH_PLACEHOLDER = {name!r} + ".__path_hook__"
+
+
+class _EditableFinder: # MetaPathFinder
+ @classmethod
+ def find_spec(cls, fullname, path=None, target=None):
+ for pkg, pkg_path in reversed(list(MAPPING.items())):
+ if fullname == pkg or fullname.startswith(f"{{pkg}}."):
+ rest = fullname.replace(pkg, "", 1).strip(".").split(".")
+ return cls._find_spec(fullname, Path(pkg_path, *rest))
+
+ return None
+
+ @classmethod
+ def _find_spec(cls, fullname, candidate_path):
+ init = candidate_path / "__init__.py"
+ candidates = (candidate_path.with_suffix(x) for x in module_suffixes())
+ for candidate in chain([init], candidates):
+ if candidate.exists():
+ return spec_from_file_location(fullname, candidate)
+
+
+class _EditableNamespaceFinder: # PathEntryFinder
+ @classmethod
+ def _path_hook(cls, path):
+ if path == PATH_PLACEHOLDER:
+ return cls
+ raise ImportError
+
+ @classmethod
+ def _paths(cls, fullname):
+ # Ensure __path__ is not empty for the spec to be considered a namespace.
+ return NAMESPACES[fullname] or MAPPING.get(fullname) or [PATH_PLACEHOLDER]
+
+ @classmethod
+ def find_spec(cls, fullname, target=None):
+ if fullname in NAMESPACES:
+ spec = ModuleSpec(fullname, None, is_package=True)
+ spec.submodule_search_locations = cls._paths(fullname)
+ return spec
+ return None
+
+ @classmethod
+ def find_module(cls, fullname):
+ return None
+
+
+def install():
+ if not any(finder == _EditableFinder for finder in sys.meta_path):
+ sys.meta_path.append(_EditableFinder)
+
+ if not NAMESPACES:
+ return
+
+ if not any(hook == _EditableNamespaceFinder._path_hook for hook in sys.path_hooks):
+ # PathEntryFinder is needed to create NamespaceSpec without private APIS
+ sys.path_hooks.append(_EditableNamespaceFinder._path_hook)
+ if PATH_PLACEHOLDER not in sys.path:
+ sys.path.append(PATH_PLACEHOLDER) # Used just to trigger the path hook
+"""
+
+
+def _finder_template(
+ name: str, mapping: Mapping[str, str], namespaces: Dict[str, List[str]]
+) -> str:
+ """Create a string containing the code for the``MetaPathFinder`` and
+ ``PathEntryFinder``.
+ """
+ mapping = dict(sorted(mapping.items(), key=lambda p: p[0]))
+ return _FINDER_TEMPLATE.format(name=name, mapping=mapping, namespaces=namespaces)
+
+
+class InformationOnly(UserWarning):
+ """Currently there is no clear way of displaying messages to the users
+ that use the setuptools backend directly via ``pip``.
+ The only thing that might work is a warning, although it is not the
+ most appropriate tool for the job...
+ """
+
+
+class LinksNotSupported(errors.FileError):
+ """File system does not seem to support either symlinks or hard links."""
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/egg_info.py b/venv/lib/python3.11/site-packages/setuptools/command/egg_info.py
new file mode 100644
index 0000000..25888ed
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/egg_info.py
@@ -0,0 +1,763 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+from distutils.filelist import FileList as _FileList
+from distutils.errors import DistutilsInternalError
+from distutils.util import convert_path
+from distutils import log
+import distutils.errors
+import distutils.filelist
+import functools
+import os
+import re
+import sys
+import io
+import warnings
+import time
+import collections
+
+from .._importlib import metadata
+from .. import _entry_points
+
+from setuptools import Command
+from setuptools.command.sdist import sdist
+from setuptools.command.sdist import walk_revctrl
+from setuptools.command.setopt import edit_config
+from setuptools.command import bdist_egg
+from pkg_resources import (
+ Requirement, safe_name, parse_version,
+ safe_version, to_filename)
+import setuptools.unicode_utils as unicode_utils
+from setuptools.glob import glob
+
+from setuptools.extern import packaging
+from setuptools.extern.jaraco.text import yield_lines
+from setuptools import SetuptoolsDeprecationWarning
+
+
+def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
+ """
+ Translate a file path glob like '*.txt' in to a regular expression.
+ This differs from fnmatch.translate which allows wildcards to match
+ directory separators. It also knows about '**/' which matches any number of
+ directories.
+ """
+ pat = ''
+
+ # This will split on '/' within [character classes]. This is deliberate.
+ chunks = glob.split(os.path.sep)
+
+ sep = re.escape(os.sep)
+ valid_char = '[^%s]' % (sep,)
+
+ for c, chunk in enumerate(chunks):
+ last_chunk = c == len(chunks) - 1
+
+ # Chunks that are a literal ** are globstars. They match anything.
+ if chunk == '**':
+ if last_chunk:
+ # Match anything if this is the last component
+ pat += '.*'
+ else:
+ # Match '(name/)*'
+ pat += '(?:%s+%s)*' % (valid_char, sep)
+ continue # Break here as the whole path component has been handled
+
+ # Find any special characters in the remainder
+ i = 0
+ chunk_len = len(chunk)
+ while i < chunk_len:
+ char = chunk[i]
+ if char == '*':
+ # Match any number of name characters
+ pat += valid_char + '*'
+ elif char == '?':
+ # Match a name character
+ pat += valid_char
+ elif char == '[':
+ # Character class
+ inner_i = i + 1
+ # Skip initial !/] chars
+ if inner_i < chunk_len and chunk[inner_i] == '!':
+ inner_i = inner_i + 1
+ if inner_i < chunk_len and chunk[inner_i] == ']':
+ inner_i = inner_i + 1
+
+ # Loop till the closing ] is found
+ while inner_i < chunk_len and chunk[inner_i] != ']':
+ inner_i = inner_i + 1
+
+ if inner_i >= chunk_len:
+ # Got to the end of the string without finding a closing ]
+ # Do not treat this as a matching group, but as a literal [
+ pat += re.escape(char)
+ else:
+ # Grab the insides of the [brackets]
+ inner = chunk[i + 1:inner_i]
+ char_class = ''
+
+ # Class negation
+ if inner[0] == '!':
+ char_class = '^'
+ inner = inner[1:]
+
+ char_class += re.escape(inner)
+ pat += '[%s]' % (char_class,)
+
+ # Skip to the end ]
+ i = inner_i
+ else:
+ pat += re.escape(char)
+ i += 1
+
+ # Join each chunk with the dir separator
+ if not last_chunk:
+ pat += sep
+
+ pat += r'\Z'
+ return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
+
+
+class InfoCommon:
+ tag_build = None
+ tag_date = None
+
+ @property
+ def name(self):
+ return safe_name(self.distribution.get_name())
+
+ def tagged_version(self):
+ return safe_version(self._maybe_tag(self.distribution.get_version()))
+
+ def _maybe_tag(self, version):
+ """
+ egg_info may be called more than once for a distribution,
+ in which case the version string already contains all tags.
+ """
+ return (
+ version if self.vtags and self._already_tagged(version)
+ else version + self.vtags
+ )
+
+ def _already_tagged(self, version: str) -> bool:
+ # Depending on their format, tags may change with version normalization.
+ # So in addition the regular tags, we have to search for the normalized ones.
+ return version.endswith(self.vtags) or version.endswith(self._safe_tags())
+
+ def _safe_tags(self) -> str:
+ # To implement this we can rely on `safe_version` pretending to be version 0
+ # followed by tags. Then we simply discard the starting 0 (fake version number)
+ return safe_version(f"0{self.vtags}")[1:]
+
+ def tags(self) -> str:
+ version = ''
+ if self.tag_build:
+ version += self.tag_build
+ if self.tag_date:
+ version += time.strftime("-%Y%m%d")
+ return version
+ vtags = property(tags)
+
+
+class egg_info(InfoCommon, Command):
+ description = "create a distribution's .egg-info directory"
+
+ user_options = [
+ ('egg-base=', 'e', "directory containing .egg-info directories"
+ " (default: top of the source tree)"),
+ ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+ ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+ ('no-date', 'D', "Don't include date stamp [default]"),
+ ]
+
+ boolean_options = ['tag-date']
+ negative_opt = {
+ 'no-date': 'tag-date',
+ }
+
+ def initialize_options(self):
+ self.egg_base = None
+ self.egg_name = None
+ self.egg_info = None
+ self.egg_version = None
+ self.broken_egg_info = False
+ self.ignore_egg_info_in_manifest = False
+
+ ####################################
+ # allow the 'tag_svn_revision' to be detected and
+ # set, supporting sdists built on older Setuptools.
+ @property
+ def tag_svn_revision(self):
+ pass
+
+ @tag_svn_revision.setter
+ def tag_svn_revision(self, value):
+ pass
+ ####################################
+
+ def save_version_info(self, filename):
+ """
+ Materialize the value of date into the
+ build tag. Install build keys in a deterministic order
+ to avoid arbitrary reordering on subsequent builds.
+ """
+ egg_info = collections.OrderedDict()
+ # follow the order these keys would have been added
+ # when PYTHONHASHSEED=0
+ egg_info['tag_build'] = self.tags()
+ egg_info['tag_date'] = 0
+ edit_config(filename, dict(egg_info=egg_info))
+
+ def finalize_options(self):
+ # Note: we need to capture the current value returned
+ # by `self.tagged_version()`, so we can later update
+ # `self.distribution.metadata.version` without
+ # repercussions.
+ self.egg_name = self.name
+ self.egg_version = self.tagged_version()
+ parsed_version = parse_version(self.egg_version)
+
+ try:
+ is_version = isinstance(parsed_version, packaging.version.Version)
+ spec = "%s==%s" if is_version else "%s===%s"
+ Requirement(spec % (self.egg_name, self.egg_version))
+ except ValueError as e:
+ raise distutils.errors.DistutilsOptionError(
+ "Invalid distribution name or version syntax: %s-%s" %
+ (self.egg_name, self.egg_version)
+ ) from e
+
+ if self.egg_base is None:
+ dirs = self.distribution.package_dir
+ self.egg_base = (dirs or {}).get('', os.curdir)
+
+ self.ensure_dirname('egg_base')
+ self.egg_info = to_filename(self.egg_name) + '.egg-info'
+ if self.egg_base != os.curdir:
+ self.egg_info = os.path.join(self.egg_base, self.egg_info)
+ if '-' in self.egg_name:
+ self.check_broken_egg_info()
+
+ # Set package version for the benefit of dumber commands
+ # (e.g. sdist, bdist_wininst, etc.)
+ #
+ self.distribution.metadata.version = self.egg_version
+
+ # If we bootstrapped around the lack of a PKG-INFO, as might be the
+ # case in a fresh checkout, make sure that any special tags get added
+ # to the version info
+ #
+ pd = self.distribution._patched_dist
+ if pd is not None and pd.key == self.egg_name.lower():
+ pd._version = self.egg_version
+ pd._parsed_version = parse_version(self.egg_version)
+ self.distribution._patched_dist = None
+
+ def write_or_delete_file(self, what, filename, data, force=False):
+ """Write `data` to `filename` or delete if empty
+
+ If `data` is non-empty, this routine is the same as ``write_file()``.
+ If `data` is empty but not ``None``, this is the same as calling
+ ``delete_file(filename)`. If `data` is ``None``, then this is a no-op
+ unless `filename` exists, in which case a warning is issued about the
+ orphaned file (if `force` is false), or deleted (if `force` is true).
+ """
+ if data:
+ self.write_file(what, filename, data)
+ elif os.path.exists(filename):
+ if data is None and not force:
+ log.warn(
+ "%s not set in setup(), but %s exists", what, filename
+ )
+ return
+ else:
+ self.delete_file(filename)
+
+ def write_file(self, what, filename, data):
+ """Write `data` to `filename` (if not a dry run) after announcing it
+
+ `what` is used in a log message to identify what is being written
+ to the file.
+ """
+ log.info("writing %s to %s", what, filename)
+ data = data.encode("utf-8")
+ if not self.dry_run:
+ f = open(filename, 'wb')
+ f.write(data)
+ f.close()
+
+ def delete_file(self, filename):
+ """Delete `filename` (if not a dry run) after announcing it"""
+ log.info("deleting %s", filename)
+ if not self.dry_run:
+ os.unlink(filename)
+
+ def run(self):
+ self.mkpath(self.egg_info)
+ os.utime(self.egg_info, None)
+ for ep in metadata.entry_points(group='egg_info.writers'):
+ writer = ep.load()
+ writer(self, ep.name, os.path.join(self.egg_info, ep.name))
+
+ # Get rid of native_libs.txt if it was put there by older bdist_egg
+ nl = os.path.join(self.egg_info, "native_libs.txt")
+ if os.path.exists(nl):
+ self.delete_file(nl)
+
+ self.find_sources()
+
+ def find_sources(self):
+ """Generate SOURCES.txt manifest file"""
+ manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
+ mm = manifest_maker(self.distribution)
+ mm.ignore_egg_info_dir = self.ignore_egg_info_in_manifest
+ mm.manifest = manifest_filename
+ mm.run()
+ self.filelist = mm.filelist
+
+ def check_broken_egg_info(self):
+ bei = self.egg_name + '.egg-info'
+ if self.egg_base != os.curdir:
+ bei = os.path.join(self.egg_base, bei)
+ if os.path.exists(bei):
+ log.warn(
+ "-" * 78 + '\n'
+ "Note: Your current .egg-info directory has a '-' in its name;"
+ '\nthis will not work correctly with "setup.py develop".\n\n'
+ 'Please rename %s to %s to correct this problem.\n' + '-' * 78,
+ bei, self.egg_info
+ )
+ self.broken_egg_info = self.egg_info
+ self.egg_info = bei # make it work for now
+
+
+class FileList(_FileList):
+ # Implementations of the various MANIFEST.in commands
+
+ def __init__(self, warn=None, debug_print=None, ignore_egg_info_dir=False):
+ super().__init__(warn, debug_print)
+ self.ignore_egg_info_dir = ignore_egg_info_dir
+
+ def process_template_line(self, line):
+ # Parse the line: split it up, make sure the right number of words
+ # is there, and return the relevant words. 'action' is always
+ # defined: it's the first word of the line. Which of the other
+ # three are defined depends on the action; it'll be either
+ # patterns, (dir and patterns), or (dir_pattern).
+ (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+
+ action_map = {
+ 'include': self.include,
+ 'exclude': self.exclude,
+ 'global-include': self.global_include,
+ 'global-exclude': self.global_exclude,
+ 'recursive-include': functools.partial(
+ self.recursive_include, dir,
+ ),
+ 'recursive-exclude': functools.partial(
+ self.recursive_exclude, dir,
+ ),
+ 'graft': self.graft,
+ 'prune': self.prune,
+ }
+ log_map = {
+ 'include': "warning: no files found matching '%s'",
+ 'exclude': (
+ "warning: no previously-included files found "
+ "matching '%s'"
+ ),
+ 'global-include': (
+ "warning: no files found matching '%s' "
+ "anywhere in distribution"
+ ),
+ 'global-exclude': (
+ "warning: no previously-included files matching "
+ "'%s' found anywhere in distribution"
+ ),
+ 'recursive-include': (
+ "warning: no files found matching '%s' "
+ "under directory '%s'"
+ ),
+ 'recursive-exclude': (
+ "warning: no previously-included files matching "
+ "'%s' found under directory '%s'"
+ ),
+ 'graft': "warning: no directories found matching '%s'",
+ 'prune': "no previously-included directories found matching '%s'",
+ }
+
+ try:
+ process_action = action_map[action]
+ except KeyError:
+ raise DistutilsInternalError(
+ "this cannot happen: invalid action '{action!s}'".
+ format(action=action),
+ )
+
+ # OK, now we know that the action is valid and we have the
+ # right number of words on the line for that action -- so we
+ # can proceed with minimal error-checking.
+
+ action_is_recursive = action.startswith('recursive-')
+ if action in {'graft', 'prune'}:
+ patterns = [dir_pattern]
+ extra_log_args = (dir, ) if action_is_recursive else ()
+ log_tmpl = log_map[action]
+
+ self.debug_print(
+ ' '.join(
+ [action] +
+ ([dir] if action_is_recursive else []) +
+ patterns,
+ )
+ )
+ for pattern in patterns:
+ if not process_action(pattern):
+ log.warn(log_tmpl, pattern, *extra_log_args)
+
+ def _remove_files(self, predicate):
+ """
+ Remove all files from the file list that match the predicate.
+ Return True if any matching files were removed
+ """
+ found = False
+ for i in range(len(self.files) - 1, -1, -1):
+ if predicate(self.files[i]):
+ self.debug_print(" removing " + self.files[i])
+ del self.files[i]
+ found = True
+ return found
+
+ def include(self, pattern):
+ """Include files that match 'pattern'."""
+ found = [f for f in glob(pattern) if not os.path.isdir(f)]
+ self.extend(found)
+ return bool(found)
+
+ def exclude(self, pattern):
+ """Exclude files that match 'pattern'."""
+ match = translate_pattern(pattern)
+ return self._remove_files(match.match)
+
+ def recursive_include(self, dir, pattern):
+ """
+ Include all files anywhere in 'dir/' that match the pattern.
+ """
+ full_pattern = os.path.join(dir, '**', pattern)
+ found = [f for f in glob(full_pattern, recursive=True)
+ if not os.path.isdir(f)]
+ self.extend(found)
+ return bool(found)
+
+ def recursive_exclude(self, dir, pattern):
+ """
+ Exclude any file anywhere in 'dir/' that match the pattern.
+ """
+ match = translate_pattern(os.path.join(dir, '**', pattern))
+ return self._remove_files(match.match)
+
+ def graft(self, dir):
+ """Include all files from 'dir/'."""
+ found = [
+ item
+ for match_dir in glob(dir)
+ for item in distutils.filelist.findall(match_dir)
+ ]
+ self.extend(found)
+ return bool(found)
+
+ def prune(self, dir):
+ """Filter out files from 'dir/'."""
+ match = translate_pattern(os.path.join(dir, '**'))
+ return self._remove_files(match.match)
+
+ def global_include(self, pattern):
+ """
+ Include all files anywhere in the current directory that match the
+ pattern. This is very inefficient on large file trees.
+ """
+ if self.allfiles is None:
+ self.findall()
+ match = translate_pattern(os.path.join('**', pattern))
+ found = [f for f in self.allfiles if match.match(f)]
+ self.extend(found)
+ return bool(found)
+
+ def global_exclude(self, pattern):
+ """
+ Exclude all files anywhere that match the pattern.
+ """
+ match = translate_pattern(os.path.join('**', pattern))
+ return self._remove_files(match.match)
+
+ def append(self, item):
+ if item.endswith('\r'): # Fix older sdists built on Windows
+ item = item[:-1]
+ path = convert_path(item)
+
+ if self._safe_path(path):
+ self.files.append(path)
+
+ def extend(self, paths):
+ self.files.extend(filter(self._safe_path, paths))
+
+ def _repair(self):
+ """
+ Replace self.files with only safe paths
+
+ Because some owners of FileList manipulate the underlying
+ ``files`` attribute directly, this method must be called to
+ repair those paths.
+ """
+ self.files = list(filter(self._safe_path, self.files))
+
+ def _safe_path(self, path):
+ enc_warn = "'%s' not %s encodable -- skipping"
+
+ # To avoid accidental trans-codings errors, first to unicode
+ u_path = unicode_utils.filesys_decode(path)
+ if u_path is None:
+ log.warn("'%s' in unexpected encoding -- skipping" % path)
+ return False
+
+ # Must ensure utf-8 encodability
+ utf8_path = unicode_utils.try_encode(u_path, "utf-8")
+ if utf8_path is None:
+ log.warn(enc_warn, path, 'utf-8')
+ return False
+
+ try:
+ # ignore egg-info paths
+ is_egg_info = ".egg-info" in u_path or b".egg-info" in utf8_path
+ if self.ignore_egg_info_dir and is_egg_info:
+ return False
+ # accept is either way checks out
+ if os.path.exists(u_path) or os.path.exists(utf8_path):
+ return True
+ # this will catch any encode errors decoding u_path
+ except UnicodeEncodeError:
+ log.warn(enc_warn, path, sys.getfilesystemencoding())
+
+
+class manifest_maker(sdist):
+ template = "MANIFEST.in"
+
+ def initialize_options(self):
+ self.use_defaults = 1
+ self.prune = 1
+ self.manifest_only = 1
+ self.force_manifest = 1
+ self.ignore_egg_info_dir = False
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ self.filelist = FileList(ignore_egg_info_dir=self.ignore_egg_info_dir)
+ if not os.path.exists(self.manifest):
+ self.write_manifest() # it must exist so it'll get in the list
+ self.add_defaults()
+ if os.path.exists(self.template):
+ self.read_template()
+ self.add_license_files()
+ self.prune_file_list()
+ self.filelist.sort()
+ self.filelist.remove_duplicates()
+ self.write_manifest()
+
+ def _manifest_normalize(self, path):
+ path = unicode_utils.filesys_decode(path)
+ return path.replace(os.sep, '/')
+
+ def write_manifest(self):
+ """
+ Write the file list in 'self.filelist' to the manifest file
+ named by 'self.manifest'.
+ """
+ self.filelist._repair()
+
+ # Now _repairs should encodability, but not unicode
+ files = [self._manifest_normalize(f) for f in self.filelist.files]
+ msg = "writing manifest file '%s'" % self.manifest
+ self.execute(write_file, (self.manifest, files), msg)
+
+ def warn(self, msg):
+ if not self._should_suppress_warning(msg):
+ sdist.warn(self, msg)
+
+ @staticmethod
+ def _should_suppress_warning(msg):
+ """
+ suppress missing-file warnings from sdist
+ """
+ return re.match(r"standard file .*not found", msg)
+
+ def add_defaults(self):
+ sdist.add_defaults(self)
+ self.filelist.append(self.template)
+ self.filelist.append(self.manifest)
+ rcfiles = list(walk_revctrl())
+ if rcfiles:
+ self.filelist.extend(rcfiles)
+ elif os.path.exists(self.manifest):
+ self.read_manifest()
+
+ if os.path.exists("setup.py"):
+ # setup.py should be included by default, even if it's not
+ # the script called to create the sdist
+ self.filelist.append("setup.py")
+
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist.graft(ei_cmd.egg_info)
+
+ def add_license_files(self):
+ license_files = self.distribution.metadata.license_files or []
+ for lf in license_files:
+ log.info("adding license file '%s'", lf)
+ pass
+ self.filelist.extend(license_files)
+
+ def prune_file_list(self):
+ build = self.get_finalized_command('build')
+ base_dir = self.distribution.get_fullname()
+ self.filelist.prune(build.build_base)
+ self.filelist.prune(base_dir)
+ sep = re.escape(os.sep)
+ self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
+ is_regex=1)
+
+ def _safe_data_files(self, build_py):
+ """
+ The parent class implementation of this method
+ (``sdist``) will try to include data files, which
+ might cause recursion problems when
+ ``include_package_data=True``.
+
+ Therefore, avoid triggering any attempt of
+ analyzing/building the manifest again.
+ """
+ if hasattr(build_py, 'get_data_files_without_manifest'):
+ return build_py.get_data_files_without_manifest()
+
+ warnings.warn(
+ "Custom 'build_py' does not implement "
+ "'get_data_files_without_manifest'.\nPlease extend command classes"
+ " from setuptools instead of distutils.",
+ SetuptoolsDeprecationWarning
+ )
+ return build_py.get_data_files()
+
+
+def write_file(filename, contents):
+ """Create a file with the specified name and write 'contents' (a
+ sequence of strings without line terminators) to it.
+ """
+ contents = "\n".join(contents)
+
+ # assuming the contents has been vetted for utf-8 encoding
+ contents = contents.encode("utf-8")
+
+ with open(filename, "wb") as f: # always write POSIX-style manifest
+ f.write(contents)
+
+
+def write_pkg_info(cmd, basename, filename):
+ log.info("writing %s", filename)
+ if not cmd.dry_run:
+ metadata = cmd.distribution.metadata
+ metadata.version, oldver = cmd.egg_version, metadata.version
+ metadata.name, oldname = cmd.egg_name, metadata.name
+
+ try:
+ # write unescaped data to PKG-INFO, so older pkg_resources
+ # can still parse it
+ metadata.write_pkg_info(cmd.egg_info)
+ finally:
+ metadata.name, metadata.version = oldname, oldver
+
+ safe = getattr(cmd.distribution, 'zip_safe', None)
+
+ bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+
+def warn_depends_obsolete(cmd, basename, filename):
+ if os.path.exists(filename):
+ log.warn(
+ "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
+ "Use the install_requires/extras_require setup() args instead."
+ )
+
+
+def _write_requirements(stream, reqs):
+ lines = yield_lines(reqs or ())
+
+ def append_cr(line):
+ return line + '\n'
+ lines = map(append_cr, lines)
+ stream.writelines(lines)
+
+
+def write_requirements(cmd, basename, filename):
+ dist = cmd.distribution
+ data = io.StringIO()
+ _write_requirements(data, dist.install_requires)
+ extras_require = dist.extras_require or {}
+ for extra in sorted(extras_require):
+ data.write('\n[{extra}]\n'.format(**vars()))
+ _write_requirements(data, extras_require[extra])
+ cmd.write_or_delete_file("requirements", filename, data.getvalue())
+
+
+def write_setup_requirements(cmd, basename, filename):
+ data = io.StringIO()
+ _write_requirements(data, cmd.distribution.setup_requires)
+ cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
+
+
+def write_toplevel_names(cmd, basename, filename):
+ pkgs = dict.fromkeys(
+ [
+ k.split('.', 1)[0]
+ for k in cmd.distribution.iter_distribution_names()
+ ]
+ )
+ cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
+
+
+def overwrite_arg(cmd, basename, filename):
+ write_arg(cmd, basename, filename, True)
+
+
+def write_arg(cmd, basename, filename, force=False):
+ argname = os.path.splitext(basename)[0]
+ value = getattr(cmd.distribution, argname, None)
+ if value is not None:
+ value = '\n'.join(value) + '\n'
+ cmd.write_or_delete_file(argname, filename, value, force)
+
+
+def write_entries(cmd, basename, filename):
+ eps = _entry_points.load(cmd.distribution.entry_points)
+ defn = _entry_points.render(eps)
+ cmd.write_or_delete_file('entry points', filename, defn, True)
+
+
+def get_pkg_info_revision():
+ """
+ Get a -r### off of PKG-INFO Version in case this is an sdist of
+ a subversion revision.
+ """
+ warnings.warn(
+ "get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
+ if os.path.exists('PKG-INFO'):
+ with io.open('PKG-INFO') as f:
+ for line in f:
+ match = re.match(r"Version:.*-r(\d+)\s*$", line)
+ if match:
+ return int(match.group(1))
+ return 0
+
+
+class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
+ """Deprecated behavior warning for EggInfo, bypassing suppression."""
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/install.py b/venv/lib/python3.11/site-packages/setuptools/command/install.py
new file mode 100644
index 0000000..55fdb12
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/install.py
@@ -0,0 +1,139 @@
+from distutils.errors import DistutilsArgError
+import inspect
+import glob
+import warnings
+import platform
+import distutils.command.install as orig
+
+import setuptools
+
+# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
+# now. See https://github.com/pypa/setuptools/issues/199/
+_install = orig.install
+
+
+class install(orig.install):
+ """Use easy_install to install the package, w/dependencies"""
+
+ user_options = orig.install.user_options + [
+ ('old-and-unmanageable', None, "Try not to use this!"),
+ ('single-version-externally-managed', None,
+ "used by system package builders to create 'flat' eggs"),
+ ]
+ boolean_options = orig.install.boolean_options + [
+ 'old-and-unmanageable', 'single-version-externally-managed',
+ ]
+ new_commands = [
+ ('install_egg_info', lambda self: True),
+ ('install_scripts', lambda self: True),
+ ]
+ _nc = dict(new_commands)
+
+ def initialize_options(self):
+
+ warnings.warn(
+ "setup.py install is deprecated. "
+ "Use build and pip and other standards-based tools.",
+ setuptools.SetuptoolsDeprecationWarning,
+ )
+
+ orig.install.initialize_options(self)
+ self.old_and_unmanageable = None
+ self.single_version_externally_managed = None
+
+ def finalize_options(self):
+ orig.install.finalize_options(self)
+ if self.root:
+ self.single_version_externally_managed = True
+ elif self.single_version_externally_managed:
+ if not self.root and not self.record:
+ raise DistutilsArgError(
+ "You must specify --record or --root when building system"
+ " packages"
+ )
+
+ def handle_extra_path(self):
+ if self.root or self.single_version_externally_managed:
+ # explicit backward-compatibility mode, allow extra_path to work
+ return orig.install.handle_extra_path(self)
+
+ # Ignore extra_path when installing an egg (or being run by another
+ # command without --root or --single-version-externally-managed
+ self.path_file = None
+ self.extra_dirs = ''
+
+ def run(self):
+ # Explicit request for old-style install? Just do it
+ if self.old_and_unmanageable or self.single_version_externally_managed:
+ return orig.install.run(self)
+
+ if not self._called_from_setup(inspect.currentframe()):
+ # Run in backward-compatibility mode to support bdist_* commands.
+ orig.install.run(self)
+ else:
+ self.do_egg_install()
+
+ @staticmethod
+ def _called_from_setup(run_frame):
+ """
+ Attempt to detect whether run() was called from setup() or by another
+ command. If called by setup(), the parent caller will be the
+ 'run_command' method in 'distutils.dist', and *its* caller will be
+ the 'run_commands' method. If called any other way, the
+ immediate caller *might* be 'run_command', but it won't have been
+ called by 'run_commands'. Return True in that case or if a call stack
+ is unavailable. Return False otherwise.
+ """
+ if run_frame is None:
+ msg = "Call stack not available. bdist_* commands may fail."
+ warnings.warn(msg)
+ if platform.python_implementation() == 'IronPython':
+ msg = "For best results, pass -X:Frames to enable call stack."
+ warnings.warn(msg)
+ return True
+
+ frames = inspect.getouterframes(run_frame)
+ for frame in frames[2:4]:
+ caller, = frame[:1]
+ info = inspect.getframeinfo(caller)
+ caller_module = caller.f_globals.get('__name__', '')
+
+ if caller_module == "setuptools.dist" and info.function == "run_command":
+ # Starting from v61.0.0 setuptools overwrites dist.run_command
+ continue
+
+ return (
+ caller_module == 'distutils.dist'
+ and info.function == 'run_commands'
+ )
+
+ def do_egg_install(self):
+
+ easy_install = self.distribution.get_command_class('easy_install')
+
+ cmd = easy_install(
+ self.distribution, args="x", root=self.root, record=self.record,
+ )
+ cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
+ cmd.always_copy_from = '.' # make sure local-dir eggs get installed
+
+ # pick up setup-dir .egg files only: no .egg-info
+ cmd.package_index.scan(glob.glob('*.egg'))
+
+ self.run_command('bdist_egg')
+ args = [self.distribution.get_command_obj('bdist_egg').egg_output]
+
+ if setuptools.bootstrap_install_from:
+ # Bootstrap self-installation of setuptools
+ args.insert(0, setuptools.bootstrap_install_from)
+
+ cmd.args = args
+ cmd.run(show_deprecation=False)
+ setuptools.bootstrap_install_from = None
+
+
+# XXX Python 3.1 doesn't see _nc if this is inside the class
+install.sub_commands = (
+ [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
+ install.new_commands
+)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/install_egg_info.py b/venv/lib/python3.11/site-packages/setuptools/command/install_egg_info.py
new file mode 100644
index 0000000..65ede40
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/install_egg_info.py
@@ -0,0 +1,63 @@
+from distutils import log, dir_util
+import os
+
+from setuptools import Command
+from setuptools import namespaces
+from setuptools.archive_util import unpack_archive
+from .._path import ensure_directory
+import pkg_resources
+
+
+class install_egg_info(namespaces.Installer, Command):
+ """Install an .egg-info directory for the package"""
+
+ description = "Install an .egg-info directory for the package"
+
+ user_options = [
+ ('install-dir=', 'd', "directory to install to"),
+ ]
+
+ def initialize_options(self):
+ self.install_dir = None
+
+ def finalize_options(self):
+ self.set_undefined_options('install_lib',
+ ('install_dir', 'install_dir'))
+ ei_cmd = self.get_finalized_command("egg_info")
+ basename = pkg_resources.Distribution(
+ None, None, ei_cmd.egg_name, ei_cmd.egg_version
+ ).egg_name() + '.egg-info'
+ self.source = ei_cmd.egg_info
+ self.target = os.path.join(self.install_dir, basename)
+ self.outputs = []
+
+ def run(self):
+ self.run_command('egg_info')
+ if os.path.isdir(self.target) and not os.path.islink(self.target):
+ dir_util.remove_tree(self.target, dry_run=self.dry_run)
+ elif os.path.exists(self.target):
+ self.execute(os.unlink, (self.target,), "Removing " + self.target)
+ if not self.dry_run:
+ ensure_directory(self.target)
+ self.execute(
+ self.copytree, (), "Copying %s to %s" % (self.source, self.target)
+ )
+ self.install_namespaces()
+
+ def get_outputs(self):
+ return self.outputs
+
+ def copytree(self):
+ # Copy the .egg-info tree to site-packages
+ def skimmer(src, dst):
+ # filter out source-control directories; note that 'src' is always
+ # a '/'-separated path, regardless of platform. 'dst' is a
+ # platform-specific path.
+ for skip in '.svn/', 'CVS/':
+ if src.startswith(skip) or '/' + skip in src:
+ return None
+ self.outputs.append(dst)
+ log.debug("Copying %s to %s", src, dst)
+ return dst
+
+ unpack_archive(self.source, self.target, skimmer)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/install_lib.py b/venv/lib/python3.11/site-packages/setuptools/command/install_lib.py
new file mode 100644
index 0000000..2e9d875
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/install_lib.py
@@ -0,0 +1,122 @@
+import os
+import sys
+from itertools import product, starmap
+import distutils.command.install_lib as orig
+
+
+class install_lib(orig.install_lib):
+ """Don't add compiled flags to filenames of non-Python files"""
+
+ def run(self):
+ self.build()
+ outfiles = self.install()
+ if outfiles is not None:
+ # always compile, in case we have any extension stubs to deal with
+ self.byte_compile(outfiles)
+
+ def get_exclusions(self):
+ """
+ Return a collections.Sized collections.Container of paths to be
+ excluded for single_version_externally_managed installations.
+ """
+ all_packages = (
+ pkg
+ for ns_pkg in self._get_SVEM_NSPs()
+ for pkg in self._all_packages(ns_pkg)
+ )
+
+ excl_specs = product(all_packages, self._gen_exclusion_paths())
+ return set(starmap(self._exclude_pkg_path, excl_specs))
+
+ def _exclude_pkg_path(self, pkg, exclusion_path):
+ """
+ Given a package name and exclusion path within that package,
+ compute the full exclusion path.
+ """
+ parts = pkg.split('.') + [exclusion_path]
+ return os.path.join(self.install_dir, *parts)
+
+ @staticmethod
+ def _all_packages(pkg_name):
+ """
+ >>> list(install_lib._all_packages('foo.bar.baz'))
+ ['foo.bar.baz', 'foo.bar', 'foo']
+ """
+ while pkg_name:
+ yield pkg_name
+ pkg_name, sep, child = pkg_name.rpartition('.')
+
+ def _get_SVEM_NSPs(self):
+ """
+ Get namespace packages (list) but only for
+ single_version_externally_managed installations and empty otherwise.
+ """
+ # TODO: is it necessary to short-circuit here? i.e. what's the cost
+ # if get_finalized_command is called even when namespace_packages is
+ # False?
+ if not self.distribution.namespace_packages:
+ return []
+
+ install_cmd = self.get_finalized_command('install')
+ svem = install_cmd.single_version_externally_managed
+
+ return self.distribution.namespace_packages if svem else []
+
+ @staticmethod
+ def _gen_exclusion_paths():
+ """
+ Generate file paths to be excluded for namespace packages (bytecode
+ cache files).
+ """
+ # always exclude the package module itself
+ yield '__init__.py'
+
+ yield '__init__.pyc'
+ yield '__init__.pyo'
+
+ if not hasattr(sys, 'implementation'):
+ return
+
+ base = os.path.join(
+ '__pycache__', '__init__.' + sys.implementation.cache_tag)
+ yield base + '.pyc'
+ yield base + '.pyo'
+ yield base + '.opt-1.pyc'
+ yield base + '.opt-2.pyc'
+
+ def copy_tree(
+ self, infile, outfile,
+ preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+ ):
+ assert preserve_mode and preserve_times and not preserve_symlinks
+ exclude = self.get_exclusions()
+
+ if not exclude:
+ return orig.install_lib.copy_tree(self, infile, outfile)
+
+ # Exclude namespace package __init__.py* files from the output
+
+ from setuptools.archive_util import unpack_directory
+ from distutils import log
+
+ outfiles = []
+
+ def pf(src, dst):
+ if dst in exclude:
+ log.warn("Skipping installation of %s (namespace package)",
+ dst)
+ return False
+
+ log.info("copying %s -> %s", src, os.path.dirname(dst))
+ outfiles.append(dst)
+ return dst
+
+ unpack_directory(infile, outfile, pf)
+ return outfiles
+
+ def get_outputs(self):
+ outputs = orig.install_lib.get_outputs(self)
+ exclude = self.get_exclusions()
+ if exclude:
+ return [f for f in outputs if f not in exclude]
+ return outputs
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/install_scripts.py b/venv/lib/python3.11/site-packages/setuptools/command/install_scripts.py
new file mode 100644
index 0000000..aeb0e42
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/install_scripts.py
@@ -0,0 +1,70 @@
+from distutils import log
+import distutils.command.install_scripts as orig
+from distutils.errors import DistutilsModuleError
+import os
+import sys
+
+from pkg_resources import Distribution, PathMetadata
+from .._path import ensure_directory
+
+
+class install_scripts(orig.install_scripts):
+ """Do normal script install, plus any egg_info wrapper scripts"""
+
+ def initialize_options(self):
+ orig.install_scripts.initialize_options(self)
+ self.no_ep = False
+
+ def run(self):
+ import setuptools.command.easy_install as ei
+
+ self.run_command("egg_info")
+ if self.distribution.scripts:
+ orig.install_scripts.run(self) # run first to set up self.outfiles
+ else:
+ self.outfiles = []
+ if self.no_ep:
+ # don't install entry point scripts into .egg file!
+ return
+
+ ei_cmd = self.get_finalized_command("egg_info")
+ dist = Distribution(
+ ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+ ei_cmd.egg_name, ei_cmd.egg_version,
+ )
+ bs_cmd = self.get_finalized_command('build_scripts')
+ exec_param = getattr(bs_cmd, 'executable', None)
+ try:
+ bw_cmd = self.get_finalized_command("bdist_wininst")
+ is_wininst = getattr(bw_cmd, '_is_running', False)
+ except (ImportError, DistutilsModuleError):
+ is_wininst = False
+ writer = ei.ScriptWriter
+ if is_wininst:
+ exec_param = "python.exe"
+ writer = ei.WindowsScriptWriter
+ if exec_param == sys.executable:
+ # In case the path to the Python executable contains a space, wrap
+ # it so it's not split up.
+ exec_param = [exec_param]
+ # resolve the writer to the environment
+ writer = writer.best()
+ cmd = writer.command_spec_class.best().from_param(exec_param)
+ for args in writer.get_args(dist, cmd.as_header()):
+ self.write_script(*args)
+
+ def write_script(self, script_name, contents, mode="t", *ignored):
+ """Write an executable file to the scripts directory"""
+ from setuptools.command.easy_install import chmod, current_umask
+
+ log.info("Installing %s script to %s", script_name, self.install_dir)
+ target = os.path.join(self.install_dir, script_name)
+ self.outfiles.append(target)
+
+ mask = current_umask()
+ if not self.dry_run:
+ ensure_directory(target)
+ f = open(target, "w" + mode)
+ f.write(contents)
+ f.close()
+ chmod(target, 0o777 - mask)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/launcher manifest.xml b/venv/lib/python3.11/site-packages/setuptools/command/launcher manifest.xml
new file mode 100644
index 0000000..5972a96
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/launcher manifest.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+ <assemblyIdentity version="1.0.0.0"
+ processorArchitecture="X86"
+ name="%(name)s"
+ type="win32"/>
+ <!-- Identify the application security requirements. -->
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
+ <security>
+ <requestedPrivileges>
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
+ </requestedPrivileges>
+ </security>
+ </trustInfo>
+</assembly>
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/py36compat.py b/venv/lib/python3.11/site-packages/setuptools/command/py36compat.py
new file mode 100644
index 0000000..343547a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/py36compat.py
@@ -0,0 +1,134 @@
+import os
+from glob import glob
+from distutils.util import convert_path
+from distutils.command import sdist
+
+
+class sdist_add_defaults:
+ """
+ Mix-in providing forward-compatibility for functionality as found in
+ distutils on Python 3.7.
+
+ Do not edit the code in this class except to update functionality
+ as implemented in distutils. Instead, override in the subclass.
+ """
+
+ def add_defaults(self):
+ """Add all the default files to self.filelist:
+ - README or README.txt
+ - setup.py
+ - test/test*.py
+ - all pure Python modules mentioned in setup script
+ - all files pointed by package_data (build_py)
+ - all files defined in data_files.
+ - all files defined as scripts.
+ - all C sources listed as part of extensions or C libraries
+ in the setup script (doesn't catch C headers!)
+ Warns if (README or README.txt) or setup.py are missing; everything
+ else is optional.
+ """
+ self._add_defaults_standards()
+ self._add_defaults_optional()
+ self._add_defaults_python()
+ self._add_defaults_data_files()
+ self._add_defaults_ext()
+ self._add_defaults_c_libs()
+ self._add_defaults_scripts()
+
+ @staticmethod
+ def _cs_path_exists(fspath):
+ """
+ Case-sensitive path existence check
+
+ >>> sdist_add_defaults._cs_path_exists(__file__)
+ True
+ >>> sdist_add_defaults._cs_path_exists(__file__.upper())
+ False
+ """
+ if not os.path.exists(fspath):
+ return False
+ # make absolute so we always have a directory
+ abspath = os.path.abspath(fspath)
+ directory, filename = os.path.split(abspath)
+ return filename in os.listdir(directory)
+
+ def _add_defaults_standards(self):
+ standards = [self.READMES, self.distribution.script_name]
+ for fn in standards:
+ if isinstance(fn, tuple):
+ alts = fn
+ got_it = False
+ for fn in alts:
+ if self._cs_path_exists(fn):
+ got_it = True
+ self.filelist.append(fn)
+ break
+
+ if not got_it:
+ self.warn("standard file not found: should have one of " +
+ ', '.join(alts))
+ else:
+ if self._cs_path_exists(fn):
+ self.filelist.append(fn)
+ else:
+ self.warn("standard file '%s' not found" % fn)
+
+ def _add_defaults_optional(self):
+ optional = ['test/test*.py', 'setup.cfg']
+ for pattern in optional:
+ files = filter(os.path.isfile, glob(pattern))
+ self.filelist.extend(files)
+
+ def _add_defaults_python(self):
+ # build_py is used to get:
+ # - python modules
+ # - files defined in package_data
+ build_py = self.get_finalized_command('build_py')
+
+ # getting python files
+ if self.distribution.has_pure_modules():
+ self.filelist.extend(build_py.get_source_files())
+
+ # getting package_data files
+ # (computed in build_py.data_files by build_py.finalize_options)
+ for pkg, src_dir, build_dir, filenames in build_py.data_files:
+ for filename in filenames:
+ self.filelist.append(os.path.join(src_dir, filename))
+
+ def _add_defaults_data_files(self):
+ # getting distribution.data_files
+ if self.distribution.has_data_files():
+ for item in self.distribution.data_files:
+ if isinstance(item, str):
+ # plain file
+ item = convert_path(item)
+ if os.path.isfile(item):
+ self.filelist.append(item)
+ else:
+ # a (dirname, filenames) tuple
+ dirname, filenames = item
+ for f in filenames:
+ f = convert_path(f)
+ if os.path.isfile(f):
+ self.filelist.append(f)
+
+ def _add_defaults_ext(self):
+ if self.distribution.has_ext_modules():
+ build_ext = self.get_finalized_command('build_ext')
+ self.filelist.extend(build_ext.get_source_files())
+
+ def _add_defaults_c_libs(self):
+ if self.distribution.has_c_libraries():
+ build_clib = self.get_finalized_command('build_clib')
+ self.filelist.extend(build_clib.get_source_files())
+
+ def _add_defaults_scripts(self):
+ if self.distribution.has_scripts():
+ build_scripts = self.get_finalized_command('build_scripts')
+ self.filelist.extend(build_scripts.get_source_files())
+
+
+if hasattr(sdist.sdist, '_add_defaults_standards'):
+ # disable the functionality already available upstream
+ class sdist_add_defaults: # noqa
+ pass
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/register.py b/venv/lib/python3.11/site-packages/setuptools/command/register.py
new file mode 100644
index 0000000..b8266b9
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/register.py
@@ -0,0 +1,18 @@
+from distutils import log
+import distutils.command.register as orig
+
+from setuptools.errors import RemovedCommandError
+
+
+class register(orig.register):
+ """Formerly used to register packages on PyPI."""
+
+ def run(self):
+ msg = (
+ "The register command has been removed, use twine to upload "
+ + "instead (https://pypi.org/p/twine)"
+ )
+
+ self.announce("ERROR: " + msg, log.ERROR)
+
+ raise RemovedCommandError(msg)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/rotate.py b/venv/lib/python3.11/site-packages/setuptools/command/rotate.py
new file mode 100644
index 0000000..74795ba
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/rotate.py
@@ -0,0 +1,64 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import os
+import shutil
+
+from setuptools import Command
+
+
+class rotate(Command):
+ """Delete older distributions"""
+
+ description = "delete older distributions, keeping N newest files"
+ user_options = [
+ ('match=', 'm', "patterns to match (required)"),
+ ('dist-dir=', 'd', "directory where the distributions are"),
+ ('keep=', 'k', "number of matching distributions to keep"),
+ ]
+
+ boolean_options = []
+
+ def initialize_options(self):
+ self.match = None
+ self.dist_dir = None
+ self.keep = None
+
+ def finalize_options(self):
+ if self.match is None:
+ raise DistutilsOptionError(
+ "Must specify one or more (comma-separated) match patterns "
+ "(e.g. '.zip' or '.egg')"
+ )
+ if self.keep is None:
+ raise DistutilsOptionError("Must specify number of files to keep")
+ try:
+ self.keep = int(self.keep)
+ except ValueError as e:
+ raise DistutilsOptionError("--keep must be an integer") from e
+ if isinstance(self.match, str):
+ self.match = [
+ convert_path(p.strip()) for p in self.match.split(',')
+ ]
+ self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+ def run(self):
+ self.run_command("egg_info")
+ from glob import glob
+
+ for pattern in self.match:
+ pattern = self.distribution.get_name() + '*' + pattern
+ files = glob(os.path.join(self.dist_dir, pattern))
+ files = [(os.path.getmtime(f), f) for f in files]
+ files.sort()
+ files.reverse()
+
+ log.info("%d file(s) matching %s", len(files), pattern)
+ files = files[self.keep:]
+ for (t, f) in files:
+ log.info("Deleting %s", f)
+ if not self.dry_run:
+ if os.path.isdir(f):
+ shutil.rmtree(f)
+ else:
+ os.unlink(f)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/saveopts.py b/venv/lib/python3.11/site-packages/setuptools/command/saveopts.py
new file mode 100644
index 0000000..611cec5
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/saveopts.py
@@ -0,0 +1,22 @@
+from setuptools.command.setopt import edit_config, option_base
+
+
+class saveopts(option_base):
+ """Save command-line options to a file"""
+
+ description = "save supplied options to setup.cfg or other config file"
+
+ def run(self):
+ dist = self.distribution
+ settings = {}
+
+ for cmd in dist.command_options:
+
+ if cmd == 'saveopts':
+ continue # don't save our own options!
+
+ for opt, (src, val) in dist.get_option_dict(cmd).items():
+ if src == "command line":
+ settings.setdefault(cmd, {})[opt] = val
+
+ edit_config(self.filename, settings, self.dry_run)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/sdist.py b/venv/lib/python3.11/site-packages/setuptools/command/sdist.py
new file mode 100644
index 0000000..4a8cde7
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/sdist.py
@@ -0,0 +1,210 @@
+from distutils import log
+import distutils.command.sdist as orig
+import os
+import sys
+import io
+import contextlib
+from itertools import chain
+
+from .py36compat import sdist_add_defaults
+
+from .._importlib import metadata
+from .build import _ORIGINAL_SUBCOMMANDS
+
+_default_revctrl = list
+
+
+def walk_revctrl(dirname=''):
+ """Find all files under revision control"""
+ for ep in metadata.entry_points(group='setuptools.file_finders'):
+ for item in ep.load()(dirname):
+ yield item
+
+
+class sdist(sdist_add_defaults, orig.sdist):
+ """Smart sdist that finds anything supported by revision control"""
+
+ user_options = [
+ ('formats=', None,
+ "formats for source distribution (comma-separated list)"),
+ ('keep-temp', 'k',
+ "keep the distribution tree around after creating " +
+ "archive file(s)"),
+ ('dist-dir=', 'd',
+ "directory to put the source distribution archive(s) in "
+ "[default: dist]"),
+ ('owner=', 'u',
+ "Owner name used when creating a tar file [default: current user]"),
+ ('group=', 'g',
+ "Group name used when creating a tar file [default: current group]"),
+ ]
+
+ negative_opt = {}
+
+ README_EXTENSIONS = ['', '.rst', '.txt', '.md']
+ READMES = tuple('README{0}'.format(ext) for ext in README_EXTENSIONS)
+
+ def run(self):
+ self.run_command('egg_info')
+ ei_cmd = self.get_finalized_command('egg_info')
+ self.filelist = ei_cmd.filelist
+ self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
+ self.check_readme()
+
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ self.make_distribution()
+
+ dist_files = getattr(self.distribution, 'dist_files', [])
+ for file in self.archive_files:
+ data = ('sdist', '', file)
+ if data not in dist_files:
+ dist_files.append(data)
+
+ def initialize_options(self):
+ orig.sdist.initialize_options(self)
+
+ self._default_to_gztar()
+
+ def _default_to_gztar(self):
+ # only needed on Python prior to 3.6.
+ if sys.version_info >= (3, 6, 0, 'beta', 1):
+ return
+ self.formats = ['gztar']
+
+ def make_distribution(self):
+ """
+ Workaround for #516
+ """
+ with self._remove_os_link():
+ orig.sdist.make_distribution(self)
+
+ @staticmethod
+ @contextlib.contextmanager
+ def _remove_os_link():
+ """
+ In a context, remove and restore os.link if it exists
+ """
+
+ class NoValue:
+ pass
+
+ orig_val = getattr(os, 'link', NoValue)
+ try:
+ del os.link
+ except Exception:
+ pass
+ try:
+ yield
+ finally:
+ if orig_val is not NoValue:
+ setattr(os, 'link', orig_val)
+
+ def add_defaults(self):
+ super().add_defaults()
+ self._add_defaults_build_sub_commands()
+
+ def _add_defaults_optional(self):
+ super()._add_defaults_optional()
+ if os.path.isfile('pyproject.toml'):
+ self.filelist.append('pyproject.toml')
+
+ def _add_defaults_python(self):
+ """getting python files"""
+ if self.distribution.has_pure_modules():
+ build_py = self.get_finalized_command('build_py')
+ self.filelist.extend(build_py.get_source_files())
+ self._add_data_files(self._safe_data_files(build_py))
+
+ def _add_defaults_build_sub_commands(self):
+ build = self.get_finalized_command("build")
+ missing_cmds = set(build.get_sub_commands()) - _ORIGINAL_SUBCOMMANDS
+ # ^-- the original built-in sub-commands are already handled by default.
+ cmds = (self.get_finalized_command(c) for c in missing_cmds)
+ files = (c.get_source_files() for c in cmds if hasattr(c, "get_source_files"))
+ self.filelist.extend(chain.from_iterable(files))
+
+ def _safe_data_files(self, build_py):
+ """
+ Since the ``sdist`` class is also used to compute the MANIFEST
+ (via :obj:`setuptools.command.egg_info.manifest_maker`),
+ there might be recursion problems when trying to obtain the list of
+ data_files and ``include_package_data=True`` (which in turn depends on
+ the files included in the MANIFEST).
+
+ To avoid that, ``manifest_maker`` should be able to overwrite this
+ method and avoid recursive attempts to build/analyze the MANIFEST.
+ """
+ return build_py.data_files
+
+ def _add_data_files(self, data_files):
+ """
+ Add data files as found in build_py.data_files.
+ """
+ self.filelist.extend(
+ os.path.join(src_dir, name)
+ for _, src_dir, _, filenames in data_files
+ for name in filenames
+ )
+
+ def _add_defaults_data_files(self):
+ try:
+ super()._add_defaults_data_files()
+ except TypeError:
+ log.warn("data_files contains unexpected objects")
+
+ def check_readme(self):
+ for f in self.READMES:
+ if os.path.exists(f):
+ return
+ else:
+ self.warn(
+ "standard file not found: should have one of " +
+ ', '.join(self.READMES)
+ )
+
+ def make_release_tree(self, base_dir, files):
+ orig.sdist.make_release_tree(self, base_dir, files)
+
+ # Save any egg_info command line options used to create this sdist
+ dest = os.path.join(base_dir, 'setup.cfg')
+ if hasattr(os, 'link') and os.path.exists(dest):
+ # unlink and re-copy, since it might be hard-linked, and
+ # we don't want to change the source version
+ os.unlink(dest)
+ self.copy_file('setup.cfg', dest)
+
+ self.get_finalized_command('egg_info').save_version_info(dest)
+
+ def _manifest_is_not_generated(self):
+ # check for special comment used in 2.7.1 and higher
+ if not os.path.isfile(self.manifest):
+ return False
+
+ with io.open(self.manifest, 'rb') as fp:
+ first_line = fp.readline()
+ return (first_line !=
+ '# file GENERATED by distutils, do NOT edit\n'.encode())
+
+ def read_manifest(self):
+ """Read the manifest file (named by 'self.manifest') and use it to
+ fill in 'self.filelist', the list of files to include in the source
+ distribution.
+ """
+ log.info("reading manifest file '%s'", self.manifest)
+ manifest = open(self.manifest, 'rb')
+ for line in manifest:
+ # The manifest must contain UTF-8. See #303.
+ try:
+ line = line.decode('UTF-8')
+ except UnicodeDecodeError:
+ log.warn("%r not UTF-8 decodable -- skipping" % line)
+ continue
+ # ignore comments and blank lines
+ line = line.strip()
+ if line.startswith('#') or not line:
+ continue
+ self.filelist.append(line)
+ manifest.close()
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/setopt.py b/venv/lib/python3.11/site-packages/setuptools/command/setopt.py
new file mode 100644
index 0000000..6358c04
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/setopt.py
@@ -0,0 +1,149 @@
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import distutils
+import os
+import configparser
+
+from setuptools import Command
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+ """Get the filename of the distutils, local, global, or per-user config
+
+ `kind` must be one of "local", "global", or "user"
+ """
+ if kind == 'local':
+ return 'setup.cfg'
+ if kind == 'global':
+ return os.path.join(
+ os.path.dirname(distutils.__file__), 'distutils.cfg'
+ )
+ if kind == 'user':
+ dot = os.name == 'posix' and '.' or ''
+ return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+ raise ValueError(
+ "config_file() type must be 'local', 'global', or 'user'", kind
+ )
+
+
+def edit_config(filename, settings, dry_run=False):
+ """Edit a configuration file to include `settings`
+
+ `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+ command/section name. A ``None`` value means to delete the entire section,
+ while a dictionary lists settings to be changed or deleted in that section.
+ A setting of ``None`` means to delete that setting.
+ """
+ log.debug("Reading configuration from %s", filename)
+ opts = configparser.RawConfigParser()
+ opts.optionxform = lambda x: x
+ opts.read([filename])
+ for section, options in settings.items():
+ if options is None:
+ log.info("Deleting section [%s] from %s", section, filename)
+ opts.remove_section(section)
+ else:
+ if not opts.has_section(section):
+ log.debug("Adding new section [%s] to %s", section, filename)
+ opts.add_section(section)
+ for option, value in options.items():
+ if value is None:
+ log.debug(
+ "Deleting %s.%s from %s",
+ section, option, filename
+ )
+ opts.remove_option(section, option)
+ if not opts.options(section):
+ log.info("Deleting empty [%s] section from %s",
+ section, filename)
+ opts.remove_section(section)
+ else:
+ log.debug(
+ "Setting %s.%s to %r in %s",
+ section, option, value, filename
+ )
+ opts.set(section, option, value)
+
+ log.info("Writing %s", filename)
+ if not dry_run:
+ with open(filename, 'w') as f:
+ opts.write(f)
+
+
+class option_base(Command):
+ """Abstract base class for commands that mess with config files"""
+
+ user_options = [
+ ('global-config', 'g',
+ "save options to the site-wide distutils.cfg file"),
+ ('user-config', 'u',
+ "save options to the current user's pydistutils.cfg file"),
+ ('filename=', 'f',
+ "configuration file to use (default=setup.cfg)"),
+ ]
+
+ boolean_options = [
+ 'global-config', 'user-config',
+ ]
+
+ def initialize_options(self):
+ self.global_config = None
+ self.user_config = None
+ self.filename = None
+
+ def finalize_options(self):
+ filenames = []
+ if self.global_config:
+ filenames.append(config_file('global'))
+ if self.user_config:
+ filenames.append(config_file('user'))
+ if self.filename is not None:
+ filenames.append(self.filename)
+ if not filenames:
+ filenames.append(config_file('local'))
+ if len(filenames) > 1:
+ raise DistutilsOptionError(
+ "Must specify only one configuration file option",
+ filenames
+ )
+ self.filename, = filenames
+
+
+class setopt(option_base):
+ """Save command-line options to a file"""
+
+ description = "set an option in setup.cfg or another config file"
+
+ user_options = [
+ ('command=', 'c', 'command to set an option for'),
+ ('option=', 'o', 'option to set'),
+ ('set-value=', 's', 'value of the option'),
+ ('remove', 'r', 'remove (unset) the value'),
+ ] + option_base.user_options
+
+ boolean_options = option_base.boolean_options + ['remove']
+
+ def initialize_options(self):
+ option_base.initialize_options(self)
+ self.command = None
+ self.option = None
+ self.set_value = None
+ self.remove = None
+
+ def finalize_options(self):
+ option_base.finalize_options(self)
+ if self.command is None or self.option is None:
+ raise DistutilsOptionError("Must specify --command *and* --option")
+ if self.set_value is None and not self.remove:
+ raise DistutilsOptionError("Must specify --set-value or --remove")
+
+ def run(self):
+ edit_config(
+ self.filename, {
+ self.command: {self.option.replace('-', '_'): self.set_value}
+ },
+ self.dry_run
+ )
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/test.py b/venv/lib/python3.11/site-packages/setuptools/command/test.py
new file mode 100644
index 0000000..8dde513
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/test.py
@@ -0,0 +1,251 @@
+import os
+import operator
+import sys
+import contextlib
+import itertools
+import unittest
+from distutils.errors import DistutilsError, DistutilsOptionError
+from distutils import log
+from unittest import TestLoader
+
+from pkg_resources import (
+ resource_listdir,
+ resource_exists,
+ normalize_path,
+ working_set,
+ evaluate_marker,
+ add_activation_listener,
+ require,
+)
+from .._importlib import metadata
+from setuptools import Command
+from setuptools.extern.more_itertools import unique_everseen
+from setuptools.extern.jaraco.functools import pass_none
+
+
+class ScanningLoader(TestLoader):
+ def __init__(self):
+ TestLoader.__init__(self)
+ self._visited = set()
+
+ def loadTestsFromModule(self, module, pattern=None):
+ """Return a suite of all tests cases contained in the given module
+
+ If the module is a package, load tests from all the modules in it.
+ If the module has an ``additional_tests`` function, call it and add
+ the return value to the tests.
+ """
+ if module in self._visited:
+ return None
+ self._visited.add(module)
+
+ tests = []
+ tests.append(TestLoader.loadTestsFromModule(self, module))
+
+ if hasattr(module, "additional_tests"):
+ tests.append(module.additional_tests())
+
+ if hasattr(module, '__path__'):
+ for file in resource_listdir(module.__name__, ''):
+ if file.endswith('.py') and file != '__init__.py':
+ submodule = module.__name__ + '.' + file[:-3]
+ else:
+ if resource_exists(module.__name__, file + '/__init__.py'):
+ submodule = module.__name__ + '.' + file
+ else:
+ continue
+ tests.append(self.loadTestsFromName(submodule))
+
+ if len(tests) != 1:
+ return self.suiteClass(tests)
+ else:
+ return tests[0] # don't create a nested suite for only one return
+
+
+# adapted from jaraco.classes.properties:NonDataProperty
+class NonDataProperty:
+ def __init__(self, fget):
+ self.fget = fget
+
+ def __get__(self, obj, objtype=None):
+ if obj is None:
+ return self
+ return self.fget(obj)
+
+
+class test(Command):
+ """Command to run unit tests after in-place build"""
+
+ description = "run unit tests after in-place build (deprecated)"
+
+ user_options = [
+ ('test-module=', 'm', "Run 'test_suite' in specified module"),
+ (
+ 'test-suite=',
+ 's',
+ "Run single test, case or suite (e.g. 'module.test_suite')",
+ ),
+ ('test-runner=', 'r', "Test runner to use"),
+ ]
+
+ def initialize_options(self):
+ self.test_suite = None
+ self.test_module = None
+ self.test_loader = None
+ self.test_runner = None
+
+ def finalize_options(self):
+
+ if self.test_suite and self.test_module:
+ msg = "You may specify a module or a suite, but not both"
+ raise DistutilsOptionError(msg)
+
+ if self.test_suite is None:
+ if self.test_module is None:
+ self.test_suite = self.distribution.test_suite
+ else:
+ self.test_suite = self.test_module + ".test_suite"
+
+ if self.test_loader is None:
+ self.test_loader = getattr(self.distribution, 'test_loader', None)
+ if self.test_loader is None:
+ self.test_loader = "setuptools.command.test:ScanningLoader"
+ if self.test_runner is None:
+ self.test_runner = getattr(self.distribution, 'test_runner', None)
+
+ @NonDataProperty
+ def test_args(self):
+ return list(self._test_args())
+
+ def _test_args(self):
+ if not self.test_suite:
+ yield 'discover'
+ if self.verbose:
+ yield '--verbose'
+ if self.test_suite:
+ yield self.test_suite
+
+ def with_project_on_sys_path(self, func):
+ """
+ Backward compatibility for project_on_sys_path context.
+ """
+ with self.project_on_sys_path():
+ func()
+
+ @contextlib.contextmanager
+ def project_on_sys_path(self, include_dists=[]):
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ self.reinitialize_command('build_ext', inplace=1)
+ self.run_command('build_ext')
+
+ ei_cmd = self.get_finalized_command("egg_info")
+
+ old_path = sys.path[:]
+ old_modules = sys.modules.copy()
+
+ try:
+ project_path = normalize_path(ei_cmd.egg_base)
+ sys.path.insert(0, project_path)
+ working_set.__init__()
+ add_activation_listener(lambda dist: dist.activate())
+ require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
+ with self.paths_on_pythonpath([project_path]):
+ yield
+ finally:
+ sys.path[:] = old_path
+ sys.modules.clear()
+ sys.modules.update(old_modules)
+ working_set.__init__()
+
+ @staticmethod
+ @contextlib.contextmanager
+ def paths_on_pythonpath(paths):
+ """
+ Add the indicated paths to the head of the PYTHONPATH environment
+ variable so that subprocesses will also see the packages at
+ these paths.
+
+ Do this in a context that restores the value on exit.
+ """
+ nothing = object()
+ orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
+ current_pythonpath = os.environ.get('PYTHONPATH', '')
+ try:
+ prefix = os.pathsep.join(unique_everseen(paths))
+ to_join = filter(None, [prefix, current_pythonpath])
+ new_path = os.pathsep.join(to_join)
+ if new_path:
+ os.environ['PYTHONPATH'] = new_path
+ yield
+ finally:
+ if orig_pythonpath is nothing:
+ os.environ.pop('PYTHONPATH', None)
+ else:
+ os.environ['PYTHONPATH'] = orig_pythonpath
+
+ @staticmethod
+ def install_dists(dist):
+ """
+ Install the requirements indicated by self.distribution and
+ return an iterable of the dists that were built.
+ """
+ ir_d = dist.fetch_build_eggs(dist.install_requires)
+ tr_d = dist.fetch_build_eggs(dist.tests_require or [])
+ er_d = dist.fetch_build_eggs(
+ v
+ for k, v in dist.extras_require.items()
+ if k.startswith(':') and evaluate_marker(k[1:])
+ )
+ return itertools.chain(ir_d, tr_d, er_d)
+
+ def run(self):
+ self.announce(
+ "WARNING: Testing via this command is deprecated and will be "
+ "removed in a future version. Users looking for a generic test "
+ "entry point independent of test runner are encouraged to use "
+ "tox.",
+ log.WARN,
+ )
+
+ installed_dists = self.install_dists(self.distribution)
+
+ cmd = ' '.join(self._argv)
+ if self.dry_run:
+ self.announce('skipping "%s" (dry run)' % cmd)
+ return
+
+ self.announce('running "%s"' % cmd)
+
+ paths = map(operator.attrgetter('location'), installed_dists)
+ with self.paths_on_pythonpath(paths):
+ with self.project_on_sys_path():
+ self.run_tests()
+
+ def run_tests(self):
+ test = unittest.main(
+ None,
+ None,
+ self._argv,
+ testLoader=self._resolve_as_ep(self.test_loader),
+ testRunner=self._resolve_as_ep(self.test_runner),
+ exit=False,
+ )
+ if not test.result.wasSuccessful():
+ msg = 'Test failed: %s' % test.result
+ self.announce(msg, log.ERROR)
+ raise DistutilsError(msg)
+
+ @property
+ def _argv(self):
+ return ['unittest'] + self.test_args
+
+ @staticmethod
+ @pass_none
+ def _resolve_as_ep(val):
+ """
+ Load the indicated attribute value, called, as a as if it were
+ specified as an entry point.
+ """
+ return metadata.EntryPoint(value=val, name=None, group=None).load()()
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/upload.py b/venv/lib/python3.11/site-packages/setuptools/command/upload.py
new file mode 100644
index 0000000..ec7f81e
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/upload.py
@@ -0,0 +1,17 @@
+from distutils import log
+from distutils.command import upload as orig
+
+from setuptools.errors import RemovedCommandError
+
+
+class upload(orig.upload):
+ """Formerly used to upload packages to PyPI."""
+
+ def run(self):
+ msg = (
+ "The upload command has been removed, use twine to upload "
+ + "instead (https://pypi.org/p/twine)"
+ )
+
+ self.announce("ERROR: " + msg, log.ERROR)
+ raise RemovedCommandError(msg)
diff --git a/venv/lib/python3.11/site-packages/setuptools/command/upload_docs.py b/venv/lib/python3.11/site-packages/setuptools/command/upload_docs.py
new file mode 100644
index 0000000..3263f07
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/setuptools/command/upload_docs.py
@@ -0,0 +1,213 @@
+# -*- coding: utf-8 -*-
+"""upload_docs
+
+Implements a Distutils 'upload_docs' subcommand (upload documentation to
+sites other than PyPi such as devpi).
+"""
+
+from base64 import standard_b64encode
+from distutils import log
+from distutils.errors import DistutilsOptionError
+import os
+import socket
+import zipfile
+import tempfile
+import shutil
+import itertools
+import functools
+import http.client
+import urllib.parse
+import warnings
+
+from .._importlib import metadata
+from .. import SetuptoolsDeprecationWarning
+
+from .upload import upload
+
+
+def _encode(s):
+ return s.encode('utf-8', 'surrogateescape')
+
+
+class upload_docs(upload):
+ # override the default repository as upload_docs isn't
+ # supported by Warehouse (and won't be).
+ DEFAULT_REPOSITORY = 'https://pypi.python.org/pypi/'
+
+ description = 'Upload documentation to sites other than PyPi such as devpi'
+
+ user_options = [
+ ('repository=', 'r',
+ "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY),
+ ('show-response', None,
+ 'display full response text from server'),
+ ('upload-dir=', None, 'directory to upload'),
+ ]
+ boolean_options = upload.boolean_options
+
+ def has_sphinx(self):
+ return bool(
+ self.upload_dir is None
+ and metadata.entry_points(group='distutils.commands', name='build_sphinx')
+ )
+
+ sub_commands = [('build_sphinx', has_sphinx)]
+
+ def initialize_options(self):
+ upload.initialize_options(self)
+ self.upload_dir = None
+ self.target_dir = None
+
+ def finalize_options(self):
+ log.warn(
+ "Upload_docs command is deprecated. Use Read the Docs "
+ "(https://readthedocs.org) instead.")
+ upload.finalize_options(self)
+ if self.upload_dir is None:
+ if self.has_sphinx():
+ build_sphinx = self.get_finalized_command('build_sphinx')
+ self.target_dir = dict(build_sphinx.builder_target_dirs)['html']
+ else:
+ build = self.get_finalized_command('build')
+ self.target_dir = os.path.join(build.build_base, 'docs')
+ else:
+ self.ensure_dirname('upload_dir')
+ self.target_dir = self.upload_dir
+ self.announce('Using upload directory %s' % self.target_dir)
+
+ def create_zipfile(self, filename):
+ zip_file = zipfile.ZipFile(filename, "w")
+ try:
+ self.mkpath(self.target_dir) # just in case
+ for root, dirs, files in os.walk(self.target_dir):
+ if root == self.target_dir and not files:
+ tmpl = "no files found in upload directory '%s'"
+ raise DistutilsOptionError(tmpl % self.target_dir)
+ for name in files:
+ full = os.path.join(root, name)
+ relative = root[len(self.target_dir):].lstrip(os.path.sep)
+ dest = os.path.join(relative, name)
+ zip_file.write(full, dest)
+ finally:
+ zip_file.close()
+
+ def run(self):
+ warnings.warn(
+ "upload_docs is deprecated and will be removed in a future "
+ "version. Use tools like httpie or curl instead.",
+ SetuptoolsDeprecationWarning,
+ )
+
+ # Run sub commands
+ for cmd_name in self.get_sub_commands():
+ self.run_command(cmd_name)
+
+ tmp_dir = tempfile.mkdtemp()
+ name = self.distribution.metadata.get_name()
+ zip_file = os.path.join(tmp_dir, "%s.zip" % name)
+ try:
+ self.create_zipfile(zip_file)
+ self.upload_file(zip_file)
+ finally:
+ shutil.rmtree(tmp_dir)
+
+ @staticmethod
+ def _build_part(item, sep_boundary):
+ key, values = item
+ title = '\nContent-Disposition: form-data; name="%s"' % key
+ # handle multiple entries for the same name
+ if not isinstance(values, list):
+ values = [values]
+ for value in values:
+ if isinstance(value, tuple):
+ title += '; filename="%s"' % value[0]
+ value = value[1]
+ else:
+ value = _encode(value)
+ yield sep_boundary
+ yield _encode(title)
+ yield b"\n\n"
+ yield value
+ if value and value[-1:] == b'\r':
+ yield b'\n' # write an extra newline (lurve Macs)
+
+ @classmethod
+ def _build_multipart(cls, data):
+ """
+ Build up the MIME payload for the POST data
+ """
+ boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+ sep_boundary = b'\n--' + boundary.encode('ascii')
+ end_boundary = sep_boundary + b'--'
+ end_items = end_boundary, b"\n",
+ builder = functools.partial(
+ cls._build_part,
+ sep_boundary=sep_boundary,
+ )
+ part_groups = map(builder, data.items())
+ parts = itertools.chain.from_iterable(part_groups)
+ body_items = itertools.chain(parts, end_items)
+ content_type = 'multipart/form-data; boundary=%s' % boundary
+ return b''.join(body_items), content_type
+
+ def upload_file(self, filename):
+ with open(filename, 'rb') as f:
+ content = f.read()
+ meta = self.distribution.metadata
+ data = {
+ ':action': 'doc_upload',
+ 'name': meta.get_name(),
+ 'content': (os.path.basename(filename), content),
+ }
+ # set up the authentication
+ credentials = _encode(self.username + ':' + self.password)
+ credentials = standard_b64encode(credentials).decode('ascii')
+ auth = "Basic " + credentials
+
+ body, ct = self._build_multipart(data)
+
+ msg = "Submitting documentation to %s" % (self.repository)
+ self.announce(msg, log.INFO)
+
+ # build the Request
+ # We can't use urllib2 since we need to send the Basic
+ # auth right with the first request
+ schema, netloc, url, params, query, fragments = \
+ urllib.parse.urlparse(self.repository)
+ assert not params and not query and not fragments
+ if schema == 'http':
+ conn = http.client.HTTPConnection(netloc)
+ elif schema == 'https':
+ conn = http.client.HTTPSConnection(netloc)
+ else:
+ raise AssertionError("unsupported schema " + schema)
+
+ data = ''
+ try:
+ conn.connect()
+ conn.putrequest("POST", url)
+ content_type = ct
+ conn.putheader('Content-type', content_type)
+ conn.putheader('Content-length', str(len(body)))
+ conn.putheader('Authorization', auth)
+ conn.endheaders()
+ conn.send(body)
+ except socket.error as e:
+ self.announce(str(e), log.ERROR)
+ return
+
+ r = conn.getresponse()
+ if r.status == 200:
+ msg = 'Server response (%s): %s' % (r.status, r.reason)
+ self.announce(msg, log.INFO)
+ elif r.status == 301:
+ location = r.getheader('Location')
+ if location is None:
+ location = 'https://pythonhosted.org/%s/' % meta.get_name()
+ msg = 'Upload successful. Visit %s' % location
+ self.announce(msg, log.INFO)
+ else:
+ msg = 'Upload failed (%s): %s' % (r.status, r.reason)
+ self.announce(msg, log.ERROR)
+ if self.show_response:
+ print('-' * 75, r.read(), '-' * 75)