diff options
| author | cyfraeviolae <cyfraeviolae> | 2024-04-03 03:10:44 -0400 | 
|---|---|---|
| committer | cyfraeviolae <cyfraeviolae> | 2024-04-03 03:10:44 -0400 | 
| commit | 6d7ba58f880be618ade07f8ea080fe8c4bf8a896 (patch) | |
| tree | b1c931051ffcebd2bd9d61d98d6233ffa289bbce /venv/lib/python3.11/site-packages/setuptools/_distutils | |
| parent | 4f884c9abc32990b4061a1bb6997b4b37e58ea0b (diff) | |
venv
Diffstat (limited to 'venv/lib/python3.11/site-packages/setuptools/_distutils')
112 files changed, 16702 insertions, 0 deletions
| diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__init__.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/__init__.py new file mode 100644 index 0000000..b3ac014 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__init__.py @@ -0,0 +1,24 @@ +"""distutils + +The main package for the Python Module Distribution Utilities.  Normally +used from a setup script as + +   from distutils.core import setup + +   setup (...) +""" + +import sys +import importlib + +__version__ = sys.version[: sys.version.index(' ')] + + +try: +    # Allow Debian and pkgsrc (only) to customize system +    # behavior. Ref pypa/distutils#2 and pypa/distutils#16. +    # This hook is deprecated and no other environments +    # should use it. +    importlib.import_module('_distutils_system_mod') +except ImportError: +    pass diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pycBinary files differ new file mode 100644 index 0000000..b1fe4ca --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/__init__.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pycBinary files differ new file mode 100644 index 0000000..9105a67 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_collections.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f56fef1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_functools.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_macos_compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_macos_compat.cpython-311.pycBinary files differ new file mode 100644 index 0000000..fc81154 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_macos_compat.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..2e1e788 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/_msvccompiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pycBinary files differ new file mode 100644 index 0000000..6710a07 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/archive_util.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f8c23ab --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/bcppcompiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..05125dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/ccompiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pycBinary files differ new file mode 100644 index 0000000..388d891 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/cmd.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pycBinary files differ new file mode 100644 index 0000000..a699370 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/config.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pycBinary files differ new file mode 100644 index 0000000..90782e3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/core.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..e75efd5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/cygwinccompiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pycBinary files differ new file mode 100644 index 0000000..5362cb5 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/debug.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-311.pycBinary files differ new file mode 100644 index 0000000..fab5163 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dep_util.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pycBinary files differ new file mode 100644 index 0000000..da0f262 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dir_util.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pycBinary files differ new file mode 100644 index 0000000..b9a533a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/dist.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pycBinary files differ new file mode 100644 index 0000000..007f728 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/errors.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pycBinary files differ new file mode 100644 index 0000000..e6fafff --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/extension.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pycBinary files differ new file mode 100644 index 0000000..d97e439 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/fancy_getopt.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pycBinary files differ new file mode 100644 index 0000000..def7e4f --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/file_util.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pycBinary files differ new file mode 100644 index 0000000..cb8b7c1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/filelist.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pycBinary files differ new file mode 100644 index 0000000..5d4759d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/log.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..b1cec06 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/msvc9compiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..ce6d9d8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/msvccompiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-311.pycBinary files differ new file mode 100644 index 0000000..4b1f0ba --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/py38compat.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pycBinary files differ new file mode 100644 index 0000000..4cf3d55 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/py39compat.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pycBinary files differ new file mode 100644 index 0000000..884f306 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/spawn.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f65282e --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/sysconfig.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pycBinary files differ new file mode 100644 index 0000000..237a51f --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/text_file.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-311.pycBinary files differ new file mode 100644 index 0000000..1567bc2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/unixccompiler.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pycBinary files differ new file mode 100644 index 0000000..3114ccf --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/util.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/version.cpython-311.pycBinary files differ new file mode 100644 index 0000000..80e4030 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/version.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-311.pycBinary files differ new file mode 100644 index 0000000..1fa50ac --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/__pycache__/versionpredicate.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/_collections.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/_collections.py new file mode 100644 index 0000000..98fce80 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/_collections.py @@ -0,0 +1,56 @@ +import collections +import itertools + + +# from jaraco.collections 3.5.1 +class DictStack(list, collections.abc.Mapping): +    """ +    A stack of dictionaries that behaves as a view on those dictionaries, +    giving preference to the last. + +    >>> stack = DictStack([dict(a=1, c=2), dict(b=2, a=2)]) +    >>> stack['a'] +    2 +    >>> stack['b'] +    2 +    >>> stack['c'] +    2 +    >>> len(stack) +    3 +    >>> stack.push(dict(a=3)) +    >>> stack['a'] +    3 +    >>> set(stack.keys()) == set(['a', 'b', 'c']) +    True +    >>> set(stack.items()) == set([('a', 3), ('b', 2), ('c', 2)]) +    True +    >>> dict(**stack) == dict(stack) == dict(a=3, c=2, b=2) +    True +    >>> d = stack.pop() +    >>> stack['a'] +    2 +    >>> d = stack.pop() +    >>> stack['a'] +    1 +    >>> stack.get('b', None) +    >>> 'c' in stack +    True +    """ + +    def __iter__(self): +        dicts = list.__iter__(self) +        return iter(set(itertools.chain.from_iterable(c.keys() for c in dicts))) + +    def __getitem__(self, key): +        for scope in reversed(tuple(list.__iter__(self))): +            if key in scope: +                return scope[key] +        raise KeyError(key) + +    push = list.append + +    def __contains__(self, other): +        return collections.abc.Mapping.__contains__(self, other) + +    def __len__(self): +        return len(list(iter(self))) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/_functools.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/_functools.py new file mode 100644 index 0000000..e7053ba --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/_functools.py @@ -0,0 +1,20 @@ +import functools + + +# from jaraco.functools 3.5 +def pass_none(func): +    """ +    Wrap func so it's not called if its first param is None + +    >>> print_text = pass_none(print) +    >>> print_text('text') +    text +    >>> print_text(None) +    """ + +    @functools.wraps(func) +    def wrapper(param, *args, **kwargs): +        if param is not None: +            return func(param, *args, **kwargs) + +    return wrapper diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/_macos_compat.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/_macos_compat.py new file mode 100644 index 0000000..17769e9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/_macos_compat.py @@ -0,0 +1,12 @@ +import sys +import importlib + + +def bypass_compiler_fixup(cmd, args): +    return cmd + + +if sys.platform == 'darwin': +    compiler_fixup = importlib.import_module('_osx_support').compiler_fixup +else: +    compiler_fixup = bypass_compiler_fixup diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/_msvccompiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/_msvccompiler.py new file mode 100644 index 0000000..729c2dd --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/_msvccompiler.py @@ -0,0 +1,572 @@ +"""distutils._msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for Microsoft Visual Studio 2015. + +The module is compatible with VS 2015 and later. You can find legacy support +for older versions in distutils.msvc9compiler and distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +#   finding DevStudio (through the registry) +# ported to VS 2005 and VS 2008 by Christian Heimes +# ported to VS 2015 by Steve Dower + +import os +import subprocess +import contextlib +import warnings +import unittest.mock as mock + +with contextlib.suppress(ImportError): +    import winreg + +from distutils.errors import ( +    DistutilsExecError, +    DistutilsPlatformError, +    CompileError, +    LibError, +    LinkError, +) +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +from itertools import count + + +def _find_vc2015(): +    try: +        key = winreg.OpenKeyEx( +            winreg.HKEY_LOCAL_MACHINE, +            r"Software\Microsoft\VisualStudio\SxS\VC7", +            access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY, +        ) +    except OSError: +        log.debug("Visual C++ is not registered") +        return None, None + +    best_version = 0 +    best_dir = None +    with key: +        for i in count(): +            try: +                v, vc_dir, vt = winreg.EnumValue(key, i) +            except OSError: +                break +            if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir): +                try: +                    version = int(float(v)) +                except (ValueError, TypeError): +                    continue +                if version >= 14 and version > best_version: +                    best_version, best_dir = version, vc_dir +    return best_version, best_dir + + +def _find_vc2017(): +    """Returns "15, path" based on the result of invoking vswhere.exe +    If no install is found, returns "None, None" + +    The version is returned to avoid unnecessarily changing the function +    result. It may be ignored when the path is not None. + +    If vswhere.exe is not available, by definition, VS 2017 is not +    installed. +    """ +    root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles") +    if not root: +        return None, None + +    try: +        path = subprocess.check_output( +            [ +                os.path.join( +                    root, "Microsoft Visual Studio", "Installer", "vswhere.exe" +                ), +                "-latest", +                "-prerelease", +                "-requires", +                "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", +                "-property", +                "installationPath", +                "-products", +                "*", +            ], +            encoding="mbcs", +            errors="strict", +        ).strip() +    except (subprocess.CalledProcessError, OSError, UnicodeDecodeError): +        return None, None + +    path = os.path.join(path, "VC", "Auxiliary", "Build") +    if os.path.isdir(path): +        return 15, path + +    return None, None + + +PLAT_SPEC_TO_RUNTIME = { +    'x86': 'x86', +    'x86_amd64': 'x64', +    'x86_arm': 'arm', +    'x86_arm64': 'arm64', +} + + +def _find_vcvarsall(plat_spec): +    # bpo-38597: Removed vcruntime return value +    _, best_dir = _find_vc2017() + +    if not best_dir: +        best_version, best_dir = _find_vc2015() + +    if not best_dir: +        log.debug("No suitable Visual C++ version found") +        return None, None + +    vcvarsall = os.path.join(best_dir, "vcvarsall.bat") +    if not os.path.isfile(vcvarsall): +        log.debug("%s cannot be found", vcvarsall) +        return None, None + +    return vcvarsall, None + + +def _get_vc_env(plat_spec): +    if os.getenv("DISTUTILS_USE_SDK"): +        return {key.lower(): value for key, value in os.environ.items()} + +    vcvarsall, _ = _find_vcvarsall(plat_spec) +    if not vcvarsall: +        raise DistutilsPlatformError("Unable to find vcvarsall.bat") + +    try: +        out = subprocess.check_output( +            f'cmd /u /c "{vcvarsall}" {plat_spec} && set', +            stderr=subprocess.STDOUT, +        ).decode('utf-16le', errors='replace') +    except subprocess.CalledProcessError as exc: +        log.error(exc.output) +        raise DistutilsPlatformError(f"Error executing {exc.cmd}") + +    env = { +        key.lower(): value +        for key, _, value in (line.partition('=') for line in out.splitlines()) +        if key and value +    } + +    return env + + +def _find_exe(exe, paths=None): +    """Return path to an MSVC executable program. + +    Tries to find the program in several places: first, one of the +    MSVC program search paths from the registry; next, the directories +    in the PATH environment variable.  If any of those work, return an +    absolute path that is known to exist.  If none of them work, just +    return the original program name, 'exe'. +    """ +    if not paths: +        paths = os.getenv('path').split(os.pathsep) +    for p in paths: +        fn = os.path.join(os.path.abspath(p), exe) +        if os.path.isfile(fn): +            return fn +    return exe + + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'. Always cross-compile from x86 to work with the +# lighter-weight MSVC installs that do not include native 64-bit tools. +PLAT_TO_VCVARS = { +    'win32': 'x86', +    'win-amd64': 'x86_amd64', +    'win-arm32': 'x86_arm', +    'win-arm64': 'x86_arm64', +} + + +class MSVCCompiler(CCompiler): +    """Concrete class that implements an interface to Microsoft Visual C++, +    as defined by the CCompiler abstract class.""" + +    compiler_type = 'msvc' + +    # Just set this so CCompiler's constructor doesn't barf.  We currently +    # don't use the 'set_executables()' bureaucracy provided by CCompiler, +    # as it really isn't necessary for this sort of single-compiler class. +    # Would be nice to have a consistent interface with UnixCCompiler, +    # though, so it's worth thinking about. +    executables = {} + +    # Private class data (need to distinguish C from C++ source for compiler) +    _c_extensions = ['.c'] +    _cpp_extensions = ['.cc', '.cpp', '.cxx'] +    _rc_extensions = ['.rc'] +    _mc_extensions = ['.mc'] + +    # Needed for the filename generation methods provided by the +    # base class, CCompiler. +    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions +    res_extension = '.res' +    obj_extension = '.obj' +    static_lib_extension = '.lib' +    shared_lib_extension = '.dll' +    static_lib_format = shared_lib_format = '%s%s' +    exe_extension = '.exe' + +    def __init__(self, verbose=0, dry_run=0, force=0): +        super().__init__(verbose, dry_run, force) +        # target platform (.plat_name is consistent with 'bdist') +        self.plat_name = None +        self.initialized = False + +    @classmethod +    def _configure(cls, vc_env): +        """ +        Set class-level include/lib dirs. +        """ +        cls.include_dirs = cls._parse_path(vc_env.get('include', '')) +        cls.library_dirs = cls._parse_path(vc_env.get('lib', '')) + +    @staticmethod +    def _parse_path(val): +        return [dir.rstrip(os.sep) for dir in val.split(os.pathsep) if dir] + +    def initialize(self, plat_name=None): +        # multi-init means we would need to check platform same each time... +        assert not self.initialized, "don't init multiple times" +        if plat_name is None: +            plat_name = get_platform() +        # sanity check for platforms to prevent obscure errors later. +        if plat_name not in PLAT_TO_VCVARS: +            raise DistutilsPlatformError( +                f"--plat-name must be one of {tuple(PLAT_TO_VCVARS)}" +            ) + +        # Get the vcvarsall.bat spec for the requested platform. +        plat_spec = PLAT_TO_VCVARS[plat_name] + +        vc_env = _get_vc_env(plat_spec) +        if not vc_env: +            raise DistutilsPlatformError( +                "Unable to find a compatible " "Visual Studio installation." +            ) +        self._configure(vc_env) + +        self._paths = vc_env.get('path', '') +        paths = self._paths.split(os.pathsep) +        self.cc = _find_exe("cl.exe", paths) +        self.linker = _find_exe("link.exe", paths) +        self.lib = _find_exe("lib.exe", paths) +        self.rc = _find_exe("rc.exe", paths)  # resource compiler +        self.mc = _find_exe("mc.exe", paths)  # message compiler +        self.mt = _find_exe("mt.exe", paths)  # message compiler + +        self.preprocess_options = None +        # bpo-38597: Always compile with dynamic linking +        # Future releases of Python 3.x will include all past +        # versions of vcruntime*.dll for compatibility. +        self.compile_options = ['/nologo', '/O2', '/W3', '/GL', '/DNDEBUG', '/MD'] + +        self.compile_options_debug = [ +            '/nologo', +            '/Od', +            '/MDd', +            '/Zi', +            '/W3', +            '/D_DEBUG', +        ] + +        ldflags = ['/nologo', '/INCREMENTAL:NO', '/LTCG'] + +        ldflags_debug = ['/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'] + +        self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1'] +        self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1'] +        self.ldflags_shared = [ +            *ldflags, +            '/DLL', +            '/MANIFEST:EMBED,ID=2', +            '/MANIFESTUAC:NO', +        ] +        self.ldflags_shared_debug = [ +            *ldflags_debug, +            '/DLL', +            '/MANIFEST:EMBED,ID=2', +            '/MANIFESTUAC:NO', +        ] +        self.ldflags_static = [*ldflags] +        self.ldflags_static_debug = [*ldflags_debug] + +        self._ldflags = { +            (CCompiler.EXECUTABLE, None): self.ldflags_exe, +            (CCompiler.EXECUTABLE, False): self.ldflags_exe, +            (CCompiler.EXECUTABLE, True): self.ldflags_exe_debug, +            (CCompiler.SHARED_OBJECT, None): self.ldflags_shared, +            (CCompiler.SHARED_OBJECT, False): self.ldflags_shared, +            (CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug, +            (CCompiler.SHARED_LIBRARY, None): self.ldflags_static, +            (CCompiler.SHARED_LIBRARY, False): self.ldflags_static, +            (CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug, +        } + +        self.initialized = True + +    # -- Worker methods ------------------------------------------------ + +    @property +    def out_extensions(self): +        return { +            **super().out_extensions, +            **{ +                ext: self.res_extension +                for ext in self._rc_extensions + self._mc_extensions +            }, +        } + +    def compile(  # noqa: C901 +        self, +        sources, +        output_dir=None, +        macros=None, +        include_dirs=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        depends=None, +    ): + +        if not self.initialized: +            self.initialize() +        compile_info = self._setup_compile( +            output_dir, macros, include_dirs, sources, depends, extra_postargs +        ) +        macros, objects, extra_postargs, pp_opts, build = compile_info + +        compile_opts = extra_preargs or [] +        compile_opts.append('/c') +        if debug: +            compile_opts.extend(self.compile_options_debug) +        else: +            compile_opts.extend(self.compile_options) + +        add_cpp_opts = False + +        for obj in objects: +            try: +                src, ext = build[obj] +            except KeyError: +                continue +            if debug: +                # pass the full pathname to MSVC in debug mode, +                # this allows the debugger to find the source file +                # without asking the user to browse for it +                src = os.path.abspath(src) + +            if ext in self._c_extensions: +                input_opt = "/Tc" + src +            elif ext in self._cpp_extensions: +                input_opt = "/Tp" + src +                add_cpp_opts = True +            elif ext in self._rc_extensions: +                # compile .RC to .RES file +                input_opt = src +                output_opt = "/fo" + obj +                try: +                    self.spawn([self.rc] + pp_opts + [output_opt, input_opt]) +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue +            elif ext in self._mc_extensions: +                # Compile .MC to .RC file to .RES file. +                #   * '-h dir' specifies the directory for the +                #     generated include file +                #   * '-r dir' specifies the target directory of the +                #     generated RC file and the binary message resource +                #     it includes +                # +                # For now (since there are no options to change this), +                # we use the source-directory for the include file and +                # the build directory for the RC file and message +                # resources. This works at least for win32all. +                h_dir = os.path.dirname(src) +                rc_dir = os.path.dirname(obj) +                try: +                    # first compile .MC to .RC and .H file +                    self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src]) +                    base, _ = os.path.splitext(os.path.basename(src)) +                    rc_file = os.path.join(rc_dir, base + '.rc') +                    # then compile .RC to .RES file +                    self.spawn([self.rc, "/fo" + obj, rc_file]) + +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue +            else: +                # how to handle this file? +                raise CompileError(f"Don't know how to compile {src} to {obj}") + +            args = [self.cc] + compile_opts + pp_opts +            if add_cpp_opts: +                args.append('/EHsc') +            args.append(input_opt) +            args.append("/Fo" + obj) +            args.extend(extra_postargs) + +            try: +                self.spawn(args) +            except DistutilsExecError as msg: +                raise CompileError(msg) + +        return objects + +    def create_static_lib( +        self, objects, output_libname, output_dir=None, debug=0, target_lang=None +    ): + +        if not self.initialized: +            self.initialize() +        objects, output_dir = self._fix_object_args(objects, output_dir) +        output_filename = self.library_filename(output_libname, output_dir=output_dir) + +        if self._need_link(objects, output_filename): +            lib_args = objects + ['/OUT:' + output_filename] +            if debug: +                pass  # XXX what goes here? +            try: +                log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args)) +                self.spawn([self.lib] + lib_args) +            except DistutilsExecError as msg: +                raise LibError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    def link( +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): + +        if not self.initialized: +            self.initialize() +        objects, output_dir = self._fix_object_args(objects, output_dir) +        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) +        libraries, library_dirs, runtime_library_dirs = fixed_args + +        if runtime_library_dirs: +            self.warn( +                "I don't know what to do with 'runtime_library_dirs': " +                + str(runtime_library_dirs) +            ) + +        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) +        if output_dir is not None: +            output_filename = os.path.join(output_dir, output_filename) + +        if self._need_link(objects, output_filename): +            ldflags = self._ldflags[target_desc, debug] + +            export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])] + +            ld_args = ( +                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] +            ) + +            # The MSVC linker generates .lib and .exp files, which cannot be +            # suppressed by any linker switches. The .lib files may even be +            # needed! Make sure they are generated in the temporary build +            # directory. Since they have different names for debug and release +            # builds, they can go into the same directory. +            build_temp = os.path.dirname(objects[0]) +            if export_symbols is not None: +                (dll_name, dll_ext) = os.path.splitext( +                    os.path.basename(output_filename) +                ) +                implib_file = os.path.join(build_temp, self.library_filename(dll_name)) +                ld_args.append('/IMPLIB:' + implib_file) + +            if extra_preargs: +                ld_args[:0] = extra_preargs +            if extra_postargs: +                ld_args.extend(extra_postargs) + +            output_dir = os.path.dirname(os.path.abspath(output_filename)) +            self.mkpath(output_dir) +            try: +                log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args)) +                self.spawn([self.linker] + ld_args) +            except DistutilsExecError as msg: +                raise LinkError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    def spawn(self, cmd): +        env = dict(os.environ, PATH=self._paths) +        with self._fallback_spawn(cmd, env) as fallback: +            return super().spawn(cmd, env=env) +        return fallback.value + +    @contextlib.contextmanager +    def _fallback_spawn(self, cmd, env): +        """ +        Discovered in pypa/distutils#15, some tools monkeypatch the compiler, +        so the 'env' kwarg causes a TypeError. Detect this condition and +        restore the legacy, unsafe behavior. +        """ +        bag = type('Bag', (), {})() +        try: +            yield bag +        except TypeError as exc: +            if "unexpected keyword argument 'env'" not in str(exc): +                raise +        else: +            return +        warnings.warn("Fallback spawn triggered. Please update distutils monkeypatch.") +        with mock.patch.dict('os.environ', env): +            bag.value = super().spawn(cmd) + +    # -- Miscellaneous methods ----------------------------------------- +    # These are all used by the 'gen_lib_options() function, in +    # ccompiler.py. + +    def library_dir_option(self, dir): +        return "/LIBPATH:" + dir + +    def runtime_library_dir_option(self, dir): +        raise DistutilsPlatformError( +            "don't know how to set runtime library search path for MSVC" +        ) + +    def library_option(self, lib): +        return self.library_filename(lib) + +    def find_library_file(self, dirs, lib, debug=0): +        # Prefer a debugging library if found (and requested), but deal +        # with it if we don't have one. +        if debug: +            try_names = [lib + "_d", lib] +        else: +            try_names = [lib] +        for dir in dirs: +            for name in try_names: +                libfile = os.path.join(dir, self.library_filename(name)) +                if os.path.isfile(libfile): +                    return libfile +        else: +            # Oops, didn't find it in *any* of 'dirs' +            return None diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/archive_util.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/archive_util.py new file mode 100644 index 0000000..5dfe2a1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/archive_util.py @@ -0,0 +1,280 @@ +"""distutils.archive_util + +Utility functions for creating archive files (tarballs, zip files, +that sort of thing).""" + +import os +from warnings import warn +import sys + +try: +    import zipfile +except ImportError: +    zipfile = None + + +from distutils.errors import DistutilsExecError +from distutils.spawn import spawn +from distutils.dir_util import mkpath +from distutils import log + +try: +    from pwd import getpwnam +except ImportError: +    getpwnam = None + +try: +    from grp import getgrnam +except ImportError: +    getgrnam = None + + +def _get_gid(name): +    """Returns a gid, given a group name.""" +    if getgrnam is None or name is None: +        return None +    try: +        result = getgrnam(name) +    except KeyError: +        result = None +    if result is not None: +        return result[2] +    return None + + +def _get_uid(name): +    """Returns an uid, given a user name.""" +    if getpwnam is None or name is None: +        return None +    try: +        result = getpwnam(name) +    except KeyError: +        result = None +    if result is not None: +        return result[2] +    return None + + +def make_tarball( +    base_name, base_dir, compress="gzip", verbose=0, dry_run=0, owner=None, group=None +): +    """Create a (possibly compressed) tar file from all the files under +    'base_dir'. + +    'compress' must be "gzip" (the default), "bzip2", "xz", "compress", or +    None.  ("compress" will be deprecated in Python 3.2) + +    'owner' and 'group' can be used to define an owner and a group for the +    archive that is being built. If not provided, the current owner and group +    will be used. + +    The output tar file will be named 'base_dir' +  ".tar", possibly plus +    the appropriate compression extension (".gz", ".bz2", ".xz" or ".Z"). + +    Returns the output filename. +    """ +    tar_compression = { +        'gzip': 'gz', +        'bzip2': 'bz2', +        'xz': 'xz', +        None: '', +        'compress': '', +    } +    compress_ext = {'gzip': '.gz', 'bzip2': '.bz2', 'xz': '.xz', 'compress': '.Z'} + +    # flags for compression program, each element of list will be an argument +    if compress is not None and compress not in compress_ext.keys(): +        raise ValueError( +            "bad value for 'compress': must be None, 'gzip', 'bzip2', " +            "'xz' or 'compress'" +        ) + +    archive_name = base_name + '.tar' +    if compress != 'compress': +        archive_name += compress_ext.get(compress, '') + +    mkpath(os.path.dirname(archive_name), dry_run=dry_run) + +    # creating the tarball +    import tarfile  # late import so Python build itself doesn't break + +    log.info('Creating tar archive') + +    uid = _get_uid(owner) +    gid = _get_gid(group) + +    def _set_uid_gid(tarinfo): +        if gid is not None: +            tarinfo.gid = gid +            tarinfo.gname = group +        if uid is not None: +            tarinfo.uid = uid +            tarinfo.uname = owner +        return tarinfo + +    if not dry_run: +        tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) +        try: +            tar.add(base_dir, filter=_set_uid_gid) +        finally: +            tar.close() + +    # compression using `compress` +    if compress == 'compress': +        warn("'compress' is deprecated.", DeprecationWarning) +        # the option varies depending on the platform +        compressed_name = archive_name + compress_ext[compress] +        if sys.platform == 'win32': +            cmd = [compress, archive_name, compressed_name] +        else: +            cmd = [compress, '-f', archive_name] +        spawn(cmd, dry_run=dry_run) +        return compressed_name + +    return archive_name + + +def make_zipfile(base_name, base_dir, verbose=0, dry_run=0):  # noqa: C901 +    """Create a zip file from all the files under 'base_dir'. + +    The output zip file will be named 'base_name' + ".zip".  Uses either the +    "zipfile" Python module (if available) or the InfoZIP "zip" utility +    (if installed and found on the default search path).  If neither tool is +    available, raises DistutilsExecError.  Returns the name of the output zip +    file. +    """ +    zip_filename = base_name + ".zip" +    mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + +    # If zipfile module is not available, try spawning an external +    # 'zip' command. +    if zipfile is None: +        if verbose: +            zipoptions = "-r" +        else: +            zipoptions = "-rq" + +        try: +            spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) +        except DistutilsExecError: +            # XXX really should distinguish between "couldn't find +            # external 'zip' command" and "zip failed". +            raise DistutilsExecError( +                ( +                    "unable to create zip file '%s': " +                    "could neither import the 'zipfile' module nor " +                    "find a standalone zip utility" +                ) +                % zip_filename +            ) + +    else: +        log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + +        if not dry_run: +            try: +                zip = zipfile.ZipFile( +                    zip_filename, "w", compression=zipfile.ZIP_DEFLATED +                ) +            except RuntimeError: +                zip = zipfile.ZipFile(zip_filename, "w", compression=zipfile.ZIP_STORED) + +            with zip: +                if base_dir != os.curdir: +                    path = os.path.normpath(os.path.join(base_dir, '')) +                    zip.write(path, path) +                    log.info("adding '%s'", path) +                for dirpath, dirnames, filenames in os.walk(base_dir): +                    for name in dirnames: +                        path = os.path.normpath(os.path.join(dirpath, name, '')) +                        zip.write(path, path) +                        log.info("adding '%s'", path) +                    for name in filenames: +                        path = os.path.normpath(os.path.join(dirpath, name)) +                        if os.path.isfile(path): +                            zip.write(path, path) +                            log.info("adding '%s'", path) + +    return zip_filename + + +ARCHIVE_FORMATS = { +    'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), +    'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), +    'xztar': (make_tarball, [('compress', 'xz')], "xz'ed tar-file"), +    'ztar': (make_tarball, [('compress', 'compress')], "compressed tar file"), +    'tar': (make_tarball, [('compress', None)], "uncompressed tar file"), +    'zip': (make_zipfile, [], "ZIP file"), +} + + +def check_archive_formats(formats): +    """Returns the first format from the 'format' list that is unknown. + +    If all formats are known, returns None +    """ +    for format in formats: +        if format not in ARCHIVE_FORMATS: +            return format +    return None + + +def make_archive( +    base_name, +    format, +    root_dir=None, +    base_dir=None, +    verbose=0, +    dry_run=0, +    owner=None, +    group=None, +): +    """Create an archive file (eg. zip or tar). + +    'base_name' is the name of the file to create, minus any format-specific +    extension; 'format' is the archive format: one of "zip", "tar", "gztar", +    "bztar", "xztar", or "ztar". + +    'root_dir' is a directory that will be the root directory of the +    archive; ie. we typically chdir into 'root_dir' before creating the +    archive.  'base_dir' is the directory where we start archiving from; +    ie. 'base_dir' will be the common prefix of all files and +    directories in the archive.  'root_dir' and 'base_dir' both default +    to the current directory.  Returns the name of the archive file. + +    'owner' and 'group' are used when creating a tar archive. By default, +    uses the current owner and group. +    """ +    save_cwd = os.getcwd() +    if root_dir is not None: +        log.debug("changing into '%s'", root_dir) +        base_name = os.path.abspath(base_name) +        if not dry_run: +            os.chdir(root_dir) + +    if base_dir is None: +        base_dir = os.curdir + +    kwargs = {'dry_run': dry_run} + +    try: +        format_info = ARCHIVE_FORMATS[format] +    except KeyError: +        raise ValueError("unknown archive format '%s'" % format) + +    func = format_info[0] +    for arg, val in format_info[1]: +        kwargs[arg] = val + +    if format != 'zip': +        kwargs['owner'] = owner +        kwargs['group'] = group + +    try: +        filename = func(base_name, base_dir, **kwargs) +    finally: +        if root_dir is not None: +            log.debug("changing back to '%s'", save_cwd) +            os.chdir(save_cwd) + +    return filename diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/bcppcompiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/bcppcompiler.py new file mode 100644 index 0000000..80b6bd8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/bcppcompiler.py @@ -0,0 +1,408 @@ +"""distutils.bcppcompiler + +Contains BorlandCCompiler, an implementation of the abstract CCompiler class +for the Borland C++ compiler. +""" + +# This implementation by Lyle Johnson, based on the original msvccompiler.py +# module and using the directions originally published by Gordon Williams. + +# XXX looks like there's a LOT of overlap between these two classes: +# someone should sit down and factor out the common code as +# WindowsCCompiler!  --GPW + + +import os +import warnings + +from distutils.errors import ( +    DistutilsExecError, +    CompileError, +    LibError, +    LinkError, +    UnknownFileError, +) +from distutils.ccompiler import CCompiler, gen_preprocess_options +from distutils.file_util import write_file +from distutils.dep_util import newer +from distutils import log + + +warnings.warn( +    "bcppcompiler is deprecated and slated to be removed " +    "in the future. Please discontinue use or file an issue " +    "with pypa/distutils describing your use case.", +    DeprecationWarning, +) + + +class BCPPCompiler(CCompiler): +    """Concrete class that implements an interface to the Borland C/C++ +    compiler, as defined by the CCompiler abstract class. +    """ + +    compiler_type = 'bcpp' + +    # Just set this so CCompiler's constructor doesn't barf.  We currently +    # don't use the 'set_executables()' bureaucracy provided by CCompiler, +    # as it really isn't necessary for this sort of single-compiler class. +    # Would be nice to have a consistent interface with UnixCCompiler, +    # though, so it's worth thinking about. +    executables = {} + +    # Private class data (need to distinguish C from C++ source for compiler) +    _c_extensions = ['.c'] +    _cpp_extensions = ['.cc', '.cpp', '.cxx'] + +    # Needed for the filename generation methods provided by the +    # base class, CCompiler. +    src_extensions = _c_extensions + _cpp_extensions +    obj_extension = '.obj' +    static_lib_extension = '.lib' +    shared_lib_extension = '.dll' +    static_lib_format = shared_lib_format = '%s%s' +    exe_extension = '.exe' + +    def __init__(self, verbose=0, dry_run=0, force=0): + +        super().__init__(verbose, dry_run, force) + +        # These executables are assumed to all be in the path. +        # Borland doesn't seem to use any special registry settings to +        # indicate their installation locations. + +        self.cc = "bcc32.exe" +        self.linker = "ilink32.exe" +        self.lib = "tlib.exe" + +        self.preprocess_options = None +        self.compile_options = ['/tWM', '/O2', '/q', '/g0'] +        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] + +        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] +        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] +        self.ldflags_static = [] +        self.ldflags_exe = ['/Gn', '/q', '/x'] +        self.ldflags_exe_debug = ['/Gn', '/q', '/x', '/r'] + +    # -- Worker methods ------------------------------------------------ + +    def compile(  # noqa: C901 +        self, +        sources, +        output_dir=None, +        macros=None, +        include_dirs=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        depends=None, +    ): + +        macros, objects, extra_postargs, pp_opts, build = self._setup_compile( +            output_dir, macros, include_dirs, sources, depends, extra_postargs +        ) +        compile_opts = extra_preargs or [] +        compile_opts.append('-c') +        if debug: +            compile_opts.extend(self.compile_options_debug) +        else: +            compile_opts.extend(self.compile_options) + +        for obj in objects: +            try: +                src, ext = build[obj] +            except KeyError: +                continue +            # XXX why do the normpath here? +            src = os.path.normpath(src) +            obj = os.path.normpath(obj) +            # XXX _setup_compile() did a mkpath() too but before the normpath. +            # Is it possible to skip the normpath? +            self.mkpath(os.path.dirname(obj)) + +            if ext == '.res': +                # This is already a binary file -- skip it. +                continue  # the 'for' loop +            if ext == '.rc': +                # This needs to be compiled to a .res file -- do it now. +                try: +                    self.spawn(["brcc32", "-fo", obj, src]) +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue  # the 'for' loop + +            # The next two are both for the real compiler. +            if ext in self._c_extensions: +                input_opt = "" +            elif ext in self._cpp_extensions: +                input_opt = "-P" +            else: +                # Unknown file type -- no extra options.  The compiler +                # will probably fail, but let it just in case this is a +                # file the compiler recognizes even if we don't. +                input_opt = "" + +            output_opt = "-o" + obj + +            # Compiler command line syntax is: "bcc32 [options] file(s)". +            # Note that the source file names must appear at the end of +            # the command line. +            try: +                self.spawn( +                    [self.cc] +                    + compile_opts +                    + pp_opts +                    + [input_opt, output_opt] +                    + extra_postargs +                    + [src] +                ) +            except DistutilsExecError as msg: +                raise CompileError(msg) + +        return objects + +    # compile () + +    def create_static_lib( +        self, objects, output_libname, output_dir=None, debug=0, target_lang=None +    ): + +        (objects, output_dir) = self._fix_object_args(objects, output_dir) +        output_filename = self.library_filename(output_libname, output_dir=output_dir) + +        if self._need_link(objects, output_filename): +            lib_args = [output_filename, '/u'] + objects +            if debug: +                pass  # XXX what goes here? +            try: +                self.spawn([self.lib] + lib_args) +            except DistutilsExecError as msg: +                raise LibError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    # create_static_lib () + +    def link(  # noqa: C901 +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): + +        # XXX this ignores 'build_temp'!  should follow the lead of +        # msvccompiler.py + +        (objects, output_dir) = self._fix_object_args(objects, output_dir) +        (libraries, library_dirs, runtime_library_dirs) = self._fix_lib_args( +            libraries, library_dirs, runtime_library_dirs +        ) + +        if runtime_library_dirs: +            log.warn( +                "I don't know what to do with 'runtime_library_dirs': %s", +                str(runtime_library_dirs), +            ) + +        if output_dir is not None: +            output_filename = os.path.join(output_dir, output_filename) + +        if self._need_link(objects, output_filename): + +            # Figure out linker args based on type of target. +            if target_desc == CCompiler.EXECUTABLE: +                startup_obj = 'c0w32' +                if debug: +                    ld_args = self.ldflags_exe_debug[:] +                else: +                    ld_args = self.ldflags_exe[:] +            else: +                startup_obj = 'c0d32' +                if debug: +                    ld_args = self.ldflags_shared_debug[:] +                else: +                    ld_args = self.ldflags_shared[:] + +            # Create a temporary exports file for use by the linker +            if export_symbols is None: +                def_file = '' +            else: +                head, tail = os.path.split(output_filename) +                modname, ext = os.path.splitext(tail) +                temp_dir = os.path.dirname(objects[0])  # preserve tree structure +                def_file = os.path.join(temp_dir, '%s.def' % modname) +                contents = ['EXPORTS'] +                for sym in export_symbols or []: +                    contents.append('  {}=_{}'.format(sym, sym)) +                self.execute(write_file, (def_file, contents), "writing %s" % def_file) + +            # Borland C++ has problems with '/' in paths +            objects2 = map(os.path.normpath, objects) +            # split objects in .obj and .res files +            # Borland C++ needs them at different positions in the command line +            objects = [startup_obj] +            resources = [] +            for file in objects2: +                (base, ext) = os.path.splitext(os.path.normcase(file)) +                if ext == '.res': +                    resources.append(file) +                else: +                    objects.append(file) + +            for ell in library_dirs: +                ld_args.append("/L%s" % os.path.normpath(ell)) +            ld_args.append("/L.")  # we sometimes use relative paths + +            # list of object files +            ld_args.extend(objects) + +            # XXX the command-line syntax for Borland C++ is a bit wonky; +            # certain filenames are jammed together in one big string, but +            # comma-delimited.  This doesn't mesh too well with the +            # Unix-centric attitude (with a DOS/Windows quoting hack) of +            # 'spawn()', so constructing the argument list is a bit +            # awkward.  Note that doing the obvious thing and jamming all +            # the filenames and commas into one argument would be wrong, +            # because 'spawn()' would quote any filenames with spaces in +            # them.  Arghghh!.  Apparently it works fine as coded... + +            # name of dll/exe file +            ld_args.extend([',', output_filename]) +            # no map file and start libraries +            ld_args.append(',,') + +            for lib in libraries: +                # see if we find it and if there is a bcpp specific lib +                # (xxx_bcpp.lib) +                libfile = self.find_library_file(library_dirs, lib, debug) +                if libfile is None: +                    ld_args.append(lib) +                    # probably a BCPP internal library -- don't warn +                else: +                    # full name which prefers bcpp_xxx.lib over xxx.lib +                    ld_args.append(libfile) + +            # some default libraries +            ld_args.append('import32') +            ld_args.append('cw32mt') + +            # def file for export symbols +            ld_args.extend([',', def_file]) +            # add resource files +            ld_args.append(',') +            ld_args.extend(resources) + +            if extra_preargs: +                ld_args[:0] = extra_preargs +            if extra_postargs: +                ld_args.extend(extra_postargs) + +            self.mkpath(os.path.dirname(output_filename)) +            try: +                self.spawn([self.linker] + ld_args) +            except DistutilsExecError as msg: +                raise LinkError(msg) + +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    # link () + +    # -- Miscellaneous methods ----------------------------------------- + +    def find_library_file(self, dirs, lib, debug=0): +        # List of effective library names to try, in order of preference: +        # xxx_bcpp.lib is better than xxx.lib +        # and xxx_d.lib is better than xxx.lib if debug is set +        # +        # The "_bcpp" suffix is to handle a Python installation for people +        # with multiple compilers (primarily Distutils hackers, I suspect +        # ;-).  The idea is they'd have one static library for each +        # compiler they care about, since (almost?) every Windows compiler +        # seems to have a different format for static libraries. +        if debug: +            dlib = lib + "_d" +            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) +        else: +            try_names = (lib + "_bcpp", lib) + +        for dir in dirs: +            for name in try_names: +                libfile = os.path.join(dir, self.library_filename(name)) +                if os.path.exists(libfile): +                    return libfile +        else: +            # Oops, didn't find it in *any* of 'dirs' +            return None + +    # overwrite the one from CCompiler to support rc and res-files +    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): +        if output_dir is None: +            output_dir = '' +        obj_names = [] +        for src_name in source_filenames: +            # use normcase to make sure '.rc' is really '.rc' and not '.RC' +            (base, ext) = os.path.splitext(os.path.normcase(src_name)) +            if ext not in (self.src_extensions + ['.rc', '.res']): +                raise UnknownFileError( +                    "unknown file type '{}' (from '{}')".format(ext, src_name) +                ) +            if strip_dir: +                base = os.path.basename(base) +            if ext == '.res': +                # these can go unchanged +                obj_names.append(os.path.join(output_dir, base + ext)) +            elif ext == '.rc': +                # these need to be compiled to .res-files +                obj_names.append(os.path.join(output_dir, base + '.res')) +            else: +                obj_names.append(os.path.join(output_dir, base + self.obj_extension)) +        return obj_names + +    # object_filenames () + +    def preprocess( +        self, +        source, +        output_file=None, +        macros=None, +        include_dirs=None, +        extra_preargs=None, +        extra_postargs=None, +    ): + +        (_, macros, include_dirs) = self._fix_compile_args(None, macros, include_dirs) +        pp_opts = gen_preprocess_options(macros, include_dirs) +        pp_args = ['cpp32.exe'] + pp_opts +        if output_file is not None: +            pp_args.append('-o' + output_file) +        if extra_preargs: +            pp_args[:0] = extra_preargs +        if extra_postargs: +            pp_args.extend(extra_postargs) +        pp_args.append(source) + +        # We need to preprocess: either we're being forced to, or the +        # source file is newer than the target (or the target doesn't +        # exist). +        if self.force or output_file is None or newer(source, output_file): +            if output_file: +                self.mkpath(os.path.dirname(output_file)) +            try: +                self.spawn(pp_args) +            except DistutilsExecError as msg: +                print(msg) +                raise CompileError(msg) + +    # preprocess() diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/ccompiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/ccompiler.py new file mode 100644 index 0000000..97551c9 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/ccompiler.py @@ -0,0 +1,1220 @@ +"""distutils.ccompiler + +Contains CCompiler, an abstract base class that defines the interface +for the Distutils compiler abstraction model.""" + +import sys +import os +import re + +from distutils.errors import ( +    CompileError, +    LinkError, +    UnknownFileError, +    DistutilsPlatformError, +    DistutilsModuleError, +) +from distutils.spawn import spawn +from distutils.file_util import move_file +from distutils.dir_util import mkpath +from distutils.dep_util import newer_group +from distutils.util import split_quoted, execute +from distutils import log + + +class CCompiler: +    """Abstract base class to define the interface that must be implemented +    by real compiler classes.  Also has some utility methods used by +    several compiler classes. + +    The basic idea behind a compiler abstraction class is that each +    instance can be used for all the compile/link steps in building a +    single project.  Thus, attributes common to all of those compile and +    link steps -- include directories, macros to define, libraries to link +    against, etc. -- are attributes of the compiler instance.  To allow for +    variability in how individual files are treated, most of those +    attributes may be varied on a per-compilation or per-link basis. +    """ + +    # 'compiler_type' is a class attribute that identifies this class.  It +    # keeps code that wants to know what kind of compiler it's dealing with +    # from having to import all possible compiler classes just to do an +    # 'isinstance'.  In concrete CCompiler subclasses, 'compiler_type' +    # should really, really be one of the keys of the 'compiler_class' +    # dictionary (see below -- used by the 'new_compiler()' factory +    # function) -- authors of new compiler interface classes are +    # responsible for updating 'compiler_class'! +    compiler_type = None + +    # XXX things not handled by this compiler abstraction model: +    #   * client can't provide additional options for a compiler, +    #     e.g. warning, optimization, debugging flags.  Perhaps this +    #     should be the domain of concrete compiler abstraction classes +    #     (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base +    #     class should have methods for the common ones. +    #   * can't completely override the include or library searchg +    #     path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2". +    #     I'm not sure how widely supported this is even by Unix +    #     compilers, much less on other platforms.  And I'm even less +    #     sure how useful it is; maybe for cross-compiling, but +    #     support for that is a ways off.  (And anyways, cross +    #     compilers probably have a dedicated binary with the +    #     right paths compiled in.  I hope.) +    #   * can't do really freaky things with the library list/library +    #     dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against +    #     different versions of libfoo.a in different locations.  I +    #     think this is useless without the ability to null out the +    #     library search path anyways. + +    # Subclasses that rely on the standard filename generation methods +    # implemented below should override these; see the comment near +    # those methods ('object_filenames()' et. al.) for details: +    src_extensions = None  # list of strings +    obj_extension = None  # string +    static_lib_extension = None +    shared_lib_extension = None  # string +    static_lib_format = None  # format string +    shared_lib_format = None  # prob. same as static_lib_format +    exe_extension = None  # string + +    # Default language settings. language_map is used to detect a source +    # file or Extension target language, checking source filenames. +    # language_order is used to detect the language precedence, when deciding +    # what language to use when mixing source types. For example, if some +    # extension has two files with ".c" extension, and one with ".cpp", it +    # is still linked as c++. +    language_map = { +        ".c": "c", +        ".cc": "c++", +        ".cpp": "c++", +        ".cxx": "c++", +        ".m": "objc", +    } +    language_order = ["c++", "objc", "c"] + +    include_dirs = [] +    """ +    include dirs specific to this compiler class +    """ + +    library_dirs = [] +    """ +    library dirs specific to this compiler class +    """ + +    def __init__(self, verbose=0, dry_run=0, force=0): +        self.dry_run = dry_run +        self.force = force +        self.verbose = verbose + +        # 'output_dir': a common output directory for object, library, +        # shared object, and shared library files +        self.output_dir = None + +        # 'macros': a list of macro definitions (or undefinitions).  A +        # macro definition is a 2-tuple (name, value), where the value is +        # either a string or None (no explicit value).  A macro +        # undefinition is a 1-tuple (name,). +        self.macros = [] + +        # 'include_dirs': a list of directories to search for include files +        self.include_dirs = [] + +        # 'libraries': a list of libraries to include in any link +        # (library names, not filenames: eg. "foo" not "libfoo.a") +        self.libraries = [] + +        # 'library_dirs': a list of directories to search for libraries +        self.library_dirs = [] + +        # 'runtime_library_dirs': a list of directories to search for +        # shared libraries/objects at runtime +        self.runtime_library_dirs = [] + +        # 'objects': a list of object files (or similar, such as explicitly +        # named library files) to include on any link +        self.objects = [] + +        for key in self.executables.keys(): +            self.set_executable(key, self.executables[key]) + +    def set_executables(self, **kwargs): +        """Define the executables (and options for them) that will be run +        to perform the various stages of compilation.  The exact set of +        executables that may be specified here depends on the compiler +        class (via the 'executables' class attribute), but most will have: +          compiler      the C/C++ compiler +          linker_so     linker used to create shared objects and libraries +          linker_exe    linker used to create binary executables +          archiver      static library creator + +        On platforms with a command-line (Unix, DOS/Windows), each of these +        is a string that will be split into executable name and (optional) +        list of arguments.  (Splitting the string is done similarly to how +        Unix shells operate: words are delimited by spaces, but quotes and +        backslashes can override this.  See +        'distutils.util.split_quoted()'.) +        """ + +        # Note that some CCompiler implementation classes will define class +        # attributes 'cpp', 'cc', etc. with hard-coded executable names; +        # this is appropriate when a compiler class is for exactly one +        # compiler/OS combination (eg. MSVCCompiler).  Other compiler +        # classes (UnixCCompiler, in particular) are driven by information +        # discovered at run-time, since there are many different ways to do +        # basically the same things with Unix C compilers. + +        for key in kwargs: +            if key not in self.executables: +                raise ValueError( +                    "unknown executable '%s' for class %s" +                    % (key, self.__class__.__name__) +                ) +            self.set_executable(key, kwargs[key]) + +    def set_executable(self, key, value): +        if isinstance(value, str): +            setattr(self, key, split_quoted(value)) +        else: +            setattr(self, key, value) + +    def _find_macro(self, name): +        i = 0 +        for defn in self.macros: +            if defn[0] == name: +                return i +            i += 1 +        return None + +    def _check_macro_definitions(self, definitions): +        """Ensures that every element of 'definitions' is a valid macro +        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do +        nothing if all definitions are OK, raise TypeError otherwise. +        """ +        for defn in definitions: +            if not ( +                isinstance(defn, tuple) +                and ( +                    len(defn) in (1, 2) +                    and (isinstance(defn[1], str) or defn[1] is None) +                ) +                and isinstance(defn[0], str) +            ): +                raise TypeError( +                    ("invalid macro definition '%s': " % defn) +                    + "must be tuple (string,), (string, string), or " +                    + "(string, None)" +                ) + +    # -- Bookkeeping methods ------------------------------------------- + +    def define_macro(self, name, value=None): +        """Define a preprocessor macro for all compilations driven by this +        compiler object.  The optional parameter 'value' should be a +        string; if it is not supplied, then the macro will be defined +        without an explicit value and the exact outcome depends on the +        compiler used (XXX true? does ANSI say anything about this?) +        """ +        # Delete from the list of macro definitions/undefinitions if +        # already there (so that this one will take precedence). +        i = self._find_macro(name) +        if i is not None: +            del self.macros[i] + +        self.macros.append((name, value)) + +    def undefine_macro(self, name): +        """Undefine a preprocessor macro for all compilations driven by +        this compiler object.  If the same macro is defined by +        'define_macro()' and undefined by 'undefine_macro()' the last call +        takes precedence (including multiple redefinitions or +        undefinitions).  If the macro is redefined/undefined on a +        per-compilation basis (ie. in the call to 'compile()'), then that +        takes precedence. +        """ +        # Delete from the list of macro definitions/undefinitions if +        # already there (so that this one will take precedence). +        i = self._find_macro(name) +        if i is not None: +            del self.macros[i] + +        undefn = (name,) +        self.macros.append(undefn) + +    def add_include_dir(self, dir): +        """Add 'dir' to the list of directories that will be searched for +        header files.  The compiler is instructed to search directories in +        the order in which they are supplied by successive calls to +        'add_include_dir()'. +        """ +        self.include_dirs.append(dir) + +    def set_include_dirs(self, dirs): +        """Set the list of directories that will be searched to 'dirs' (a +        list of strings).  Overrides any preceding calls to +        'add_include_dir()'; subsequence calls to 'add_include_dir()' add +        to the list passed to 'set_include_dirs()'.  This does not affect +        any list of standard include directories that the compiler may +        search by default. +        """ +        self.include_dirs = dirs[:] + +    def add_library(self, libname): +        """Add 'libname' to the list of libraries that will be included in +        all links driven by this compiler object.  Note that 'libname' +        should *not* be the name of a file containing a library, but the +        name of the library itself: the actual filename will be inferred by +        the linker, the compiler, or the compiler class (depending on the +        platform). + +        The linker will be instructed to link against libraries in the +        order they were supplied to 'add_library()' and/or +        'set_libraries()'.  It is perfectly valid to duplicate library +        names; the linker will be instructed to link against libraries as +        many times as they are mentioned. +        """ +        self.libraries.append(libname) + +    def set_libraries(self, libnames): +        """Set the list of libraries to be included in all links driven by +        this compiler object to 'libnames' (a list of strings).  This does +        not affect any standard system libraries that the linker may +        include by default. +        """ +        self.libraries = libnames[:] + +    def add_library_dir(self, dir): +        """Add 'dir' to the list of directories that will be searched for +        libraries specified to 'add_library()' and 'set_libraries()'.  The +        linker will be instructed to search for libraries in the order they +        are supplied to 'add_library_dir()' and/or 'set_library_dirs()'. +        """ +        self.library_dirs.append(dir) + +    def set_library_dirs(self, dirs): +        """Set the list of library search directories to 'dirs' (a list of +        strings).  This does not affect any standard library search path +        that the linker may search by default. +        """ +        self.library_dirs = dirs[:] + +    def add_runtime_library_dir(self, dir): +        """Add 'dir' to the list of directories that will be searched for +        shared libraries at runtime. +        """ +        self.runtime_library_dirs.append(dir) + +    def set_runtime_library_dirs(self, dirs): +        """Set the list of directories to search for shared libraries at +        runtime to 'dirs' (a list of strings).  This does not affect any +        standard search path that the runtime linker may search by +        default. +        """ +        self.runtime_library_dirs = dirs[:] + +    def add_link_object(self, object): +        """Add 'object' to the list of object files (or analogues, such as +        explicitly named library files or the output of "resource +        compilers") to be included in every link driven by this compiler +        object. +        """ +        self.objects.append(object) + +    def set_link_objects(self, objects): +        """Set the list of object files (or analogues) to be included in +        every link to 'objects'.  This does not affect any standard object +        files that the linker may include by default (such as system +        libraries). +        """ +        self.objects = objects[:] + +    # -- Private utility methods -------------------------------------- +    # (here for the convenience of subclasses) + +    # Helper method to prep compiler in subclass compile() methods + +    def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra): +        """Process arguments and decide which source files to compile.""" +        outdir, macros, incdirs = self._fix_compile_args(outdir, macros, incdirs) + +        if extra is None: +            extra = [] + +        # Get the list of expected output (object) files +        objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir) +        assert len(objects) == len(sources) + +        pp_opts = gen_preprocess_options(macros, incdirs) + +        build = {} +        for i in range(len(sources)): +            src = sources[i] +            obj = objects[i] +            ext = os.path.splitext(src)[1] +            self.mkpath(os.path.dirname(obj)) +            build[obj] = (src, ext) + +        return macros, objects, extra, pp_opts, build + +    def _get_cc_args(self, pp_opts, debug, before): +        # works for unixccompiler, cygwinccompiler +        cc_args = pp_opts + ['-c'] +        if debug: +            cc_args[:0] = ['-g'] +        if before: +            cc_args[:0] = before +        return cc_args + +    def _fix_compile_args(self, output_dir, macros, include_dirs): +        """Typecheck and fix-up some of the arguments to the 'compile()' +        method, and return fixed-up values.  Specifically: if 'output_dir' +        is None, replaces it with 'self.output_dir'; ensures that 'macros' +        is a list, and augments it with 'self.macros'; ensures that +        'include_dirs' is a list, and augments it with 'self.include_dirs'. +        Guarantees that the returned values are of the correct type, +        i.e. for 'output_dir' either string or None, and for 'macros' and +        'include_dirs' either list or None. +        """ +        if output_dir is None: +            output_dir = self.output_dir +        elif not isinstance(output_dir, str): +            raise TypeError("'output_dir' must be a string or None") + +        if macros is None: +            macros = self.macros +        elif isinstance(macros, list): +            macros = macros + (self.macros or []) +        else: +            raise TypeError("'macros' (if supplied) must be a list of tuples") + +        if include_dirs is None: +            include_dirs = self.include_dirs +        elif isinstance(include_dirs, (list, tuple)): +            include_dirs = list(include_dirs) + (self.include_dirs or []) +        else: +            raise TypeError("'include_dirs' (if supplied) must be a list of strings") + +        # add include dirs for class +        include_dirs += self.__class__.include_dirs + +        return output_dir, macros, include_dirs + +    def _prep_compile(self, sources, output_dir, depends=None): +        """Decide which source files must be recompiled. + +        Determine the list of object files corresponding to 'sources', +        and figure out which ones really need to be recompiled. +        Return a list of all object files and a dictionary telling +        which source files can be skipped. +        """ +        # Get the list of expected output (object) files +        objects = self.object_filenames(sources, output_dir=output_dir) +        assert len(objects) == len(sources) + +        # Return an empty dict for the "which source files can be skipped" +        # return value to preserve API compatibility. +        return objects, {} + +    def _fix_object_args(self, objects, output_dir): +        """Typecheck and fix up some arguments supplied to various methods. +        Specifically: ensure that 'objects' is a list; if output_dir is +        None, replace with self.output_dir.  Return fixed versions of +        'objects' and 'output_dir'. +        """ +        if not isinstance(objects, (list, tuple)): +            raise TypeError("'objects' must be a list or tuple of strings") +        objects = list(objects) + +        if output_dir is None: +            output_dir = self.output_dir +        elif not isinstance(output_dir, str): +            raise TypeError("'output_dir' must be a string or None") + +        return (objects, output_dir) + +    def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs): +        """Typecheck and fix up some of the arguments supplied to the +        'link_*' methods.  Specifically: ensure that all arguments are +        lists, and augment them with their permanent versions +        (eg. 'self.libraries' augments 'libraries').  Return a tuple with +        fixed versions of all arguments. +        """ +        if libraries is None: +            libraries = self.libraries +        elif isinstance(libraries, (list, tuple)): +            libraries = list(libraries) + (self.libraries or []) +        else: +            raise TypeError("'libraries' (if supplied) must be a list of strings") + +        if library_dirs is None: +            library_dirs = self.library_dirs +        elif isinstance(library_dirs, (list, tuple)): +            library_dirs = list(library_dirs) + (self.library_dirs or []) +        else: +            raise TypeError("'library_dirs' (if supplied) must be a list of strings") + +        # add library dirs for class +        library_dirs += self.__class__.library_dirs + +        if runtime_library_dirs is None: +            runtime_library_dirs = self.runtime_library_dirs +        elif isinstance(runtime_library_dirs, (list, tuple)): +            runtime_library_dirs = list(runtime_library_dirs) + ( +                self.runtime_library_dirs or [] +            ) +        else: +            raise TypeError( +                "'runtime_library_dirs' (if supplied) " "must be a list of strings" +            ) + +        return (libraries, library_dirs, runtime_library_dirs) + +    def _need_link(self, objects, output_file): +        """Return true if we need to relink the files listed in 'objects' +        to recreate 'output_file'. +        """ +        if self.force: +            return True +        else: +            if self.dry_run: +                newer = newer_group(objects, output_file, missing='newer') +            else: +                newer = newer_group(objects, output_file) +            return newer + +    def detect_language(self, sources): +        """Detect the language of a given file, or list of files. Uses +        language_map, and language_order to do the job. +        """ +        if not isinstance(sources, list): +            sources = [sources] +        lang = None +        index = len(self.language_order) +        for source in sources: +            base, ext = os.path.splitext(source) +            extlang = self.language_map.get(ext) +            try: +                extindex = self.language_order.index(extlang) +                if extindex < index: +                    lang = extlang +                    index = extindex +            except ValueError: +                pass +        return lang + +    # -- Worker methods ------------------------------------------------ +    # (must be implemented by subclasses) + +    def preprocess( +        self, +        source, +        output_file=None, +        macros=None, +        include_dirs=None, +        extra_preargs=None, +        extra_postargs=None, +    ): +        """Preprocess a single C/C++ source file, named in 'source'. +        Output will be written to file named 'output_file', or stdout if +        'output_file' not supplied.  'macros' is a list of macro +        definitions as for 'compile()', which will augment the macros set +        with 'define_macro()' and 'undefine_macro()'.  'include_dirs' is a +        list of directory names that will be added to the default list. + +        Raises PreprocessError on failure. +        """ +        pass + +    def compile( +        self, +        sources, +        output_dir=None, +        macros=None, +        include_dirs=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        depends=None, +    ): +        """Compile one or more source files. + +        'sources' must be a list of filenames, most likely C/C++ +        files, but in reality anything that can be handled by a +        particular compiler and compiler class (eg. MSVCCompiler can +        handle resource files in 'sources').  Return a list of object +        filenames, one per source filename in 'sources'.  Depending on +        the implementation, not all source files will necessarily be +        compiled, but all corresponding object filenames will be +        returned. + +        If 'output_dir' is given, object files will be put under it, while +        retaining their original path component.  That is, "foo/bar.c" +        normally compiles to "foo/bar.o" (for a Unix implementation); if +        'output_dir' is "build", then it would compile to +        "build/foo/bar.o". + +        'macros', if given, must be a list of macro definitions.  A macro +        definition is either a (name, value) 2-tuple or a (name,) 1-tuple. +        The former defines a macro; if the value is None, the macro is +        defined without an explicit value.  The 1-tuple case undefines a +        macro.  Later definitions/redefinitions/ undefinitions take +        precedence. + +        'include_dirs', if given, must be a list of strings, the +        directories to add to the default include file search path for this +        compilation only. + +        'debug' is a boolean; if true, the compiler will be instructed to +        output debug symbols in (or alongside) the object file(s). + +        'extra_preargs' and 'extra_postargs' are implementation- dependent. +        On platforms that have the notion of a command-line (e.g. Unix, +        DOS/Windows), they are most likely lists of strings: extra +        command-line arguments to prepend/append to the compiler command +        line.  On other platforms, consult the implementation class +        documentation.  In any event, they are intended as an escape hatch +        for those occasions when the abstract compiler framework doesn't +        cut the mustard. + +        'depends', if given, is a list of filenames that all targets +        depend on.  If a source file is older than any file in +        depends, then the source file will be recompiled.  This +        supports dependency tracking, but only at a coarse +        granularity. + +        Raises CompileError on failure. +        """ +        # A concrete compiler class can either override this method +        # entirely or implement _compile(). +        macros, objects, extra_postargs, pp_opts, build = self._setup_compile( +            output_dir, macros, include_dirs, sources, depends, extra_postargs +        ) +        cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) + +        for obj in objects: +            try: +                src, ext = build[obj] +            except KeyError: +                continue +            self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) + +        # Return *all* object filenames, not just the ones we just built. +        return objects + +    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): +        """Compile 'src' to product 'obj'.""" +        # A concrete compiler class that does not override compile() +        # should implement _compile(). +        pass + +    def create_static_lib( +        self, objects, output_libname, output_dir=None, debug=0, target_lang=None +    ): +        """Link a bunch of stuff together to create a static library file. +        The "bunch of stuff" consists of the list of object files supplied +        as 'objects', the extra object files supplied to +        'add_link_object()' and/or 'set_link_objects()', the libraries +        supplied to 'add_library()' and/or 'set_libraries()', and the +        libraries supplied as 'libraries' (if any). + +        'output_libname' should be a library name, not a filename; the +        filename will be inferred from the library name.  'output_dir' is +        the directory where the library file will be put. + +        'debug' is a boolean; if true, debugging information will be +        included in the library (note that on most platforms, it is the +        compile step where this matters: the 'debug' flag is included here +        just for consistency). + +        'target_lang' is the target language for which the given objects +        are being compiled. This allows specific linkage time treatment of +        certain languages. + +        Raises LibError on failure. +        """ +        pass + +    # values for target_desc parameter in link() +    SHARED_OBJECT = "shared_object" +    SHARED_LIBRARY = "shared_library" +    EXECUTABLE = "executable" + +    def link( +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): +        """Link a bunch of stuff together to create an executable or +        shared library file. + +        The "bunch of stuff" consists of the list of object files supplied +        as 'objects'.  'output_filename' should be a filename.  If +        'output_dir' is supplied, 'output_filename' is relative to it +        (i.e. 'output_filename' can provide directory components if +        needed). + +        'libraries' is a list of libraries to link against.  These are +        library names, not filenames, since they're translated into +        filenames in a platform-specific way (eg. "foo" becomes "libfoo.a" +        on Unix and "foo.lib" on DOS/Windows).  However, they can include a +        directory component, which means the linker will look in that +        specific directory rather than searching all the normal locations. + +        'library_dirs', if supplied, should be a list of directories to +        search for libraries that were specified as bare library names +        (ie. no directory component).  These are on top of the system +        default and those supplied to 'add_library_dir()' and/or +        'set_library_dirs()'.  'runtime_library_dirs' is a list of +        directories that will be embedded into the shared library and used +        to search for other shared libraries that *it* depends on at +        run-time.  (This may only be relevant on Unix.) + +        'export_symbols' is a list of symbols that the shared library will +        export.  (This appears to be relevant only on Windows.) + +        'debug' is as for 'compile()' and 'create_static_lib()', with the +        slight distinction that it actually matters on most platforms (as +        opposed to 'create_static_lib()', which includes a 'debug' flag +        mostly for form's sake). + +        'extra_preargs' and 'extra_postargs' are as for 'compile()' (except +        of course that they supply command-line arguments for the +        particular linker being used). + +        'target_lang' is the target language for which the given objects +        are being compiled. This allows specific linkage time treatment of +        certain languages. + +        Raises LinkError on failure. +        """ +        raise NotImplementedError + +    # Old 'link_*()' methods, rewritten to use the new 'link()' method. + +    def link_shared_lib( +        self, +        objects, +        output_libname, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): +        self.link( +            CCompiler.SHARED_LIBRARY, +            objects, +            self.library_filename(output_libname, lib_type='shared'), +            output_dir, +            libraries, +            library_dirs, +            runtime_library_dirs, +            export_symbols, +            debug, +            extra_preargs, +            extra_postargs, +            build_temp, +            target_lang, +        ) + +    def link_shared_object( +        self, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): +        self.link( +            CCompiler.SHARED_OBJECT, +            objects, +            output_filename, +            output_dir, +            libraries, +            library_dirs, +            runtime_library_dirs, +            export_symbols, +            debug, +            extra_preargs, +            extra_postargs, +            build_temp, +            target_lang, +        ) + +    def link_executable( +        self, +        objects, +        output_progname, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        target_lang=None, +    ): +        self.link( +            CCompiler.EXECUTABLE, +            objects, +            self.executable_filename(output_progname), +            output_dir, +            libraries, +            library_dirs, +            runtime_library_dirs, +            None, +            debug, +            extra_preargs, +            extra_postargs, +            None, +            target_lang, +        ) + +    # -- Miscellaneous methods ----------------------------------------- +    # These are all used by the 'gen_lib_options() function; there is +    # no appropriate default implementation so subclasses should +    # implement all of these. + +    def library_dir_option(self, dir): +        """Return the compiler option to add 'dir' to the list of +        directories searched for libraries. +        """ +        raise NotImplementedError + +    def runtime_library_dir_option(self, dir): +        """Return the compiler option to add 'dir' to the list of +        directories searched for runtime libraries. +        """ +        raise NotImplementedError + +    def library_option(self, lib): +        """Return the compiler option to add 'lib' to the list of libraries +        linked into the shared library or executable. +        """ +        raise NotImplementedError + +    def has_function(  # noqa: C901 +        self, +        funcname, +        includes=None, +        include_dirs=None, +        libraries=None, +        library_dirs=None, +    ): +        """Return a boolean indicating whether funcname is supported on +        the current platform.  The optional arguments can be used to +        augment the compilation environment. +        """ +        # this can't be included at module scope because it tries to +        # import math which might not be available at that point - maybe +        # the necessary logic should just be inlined? +        import tempfile + +        if includes is None: +            includes = [] +        if include_dirs is None: +            include_dirs = [] +        if libraries is None: +            libraries = [] +        if library_dirs is None: +            library_dirs = [] +        fd, fname = tempfile.mkstemp(".c", funcname, text=True) +        f = os.fdopen(fd, "w") +        try: +            for incl in includes: +                f.write("""#include "%s"\n""" % incl) +            f.write( +                """\ +int main (int argc, char **argv) { +    %s(); +    return 0; +} +""" +                % funcname +            ) +        finally: +            f.close() +        try: +            objects = self.compile([fname], include_dirs=include_dirs) +        except CompileError: +            return False +        finally: +            os.remove(fname) + +        try: +            self.link_executable( +                objects, "a.out", libraries=libraries, library_dirs=library_dirs +            ) +        except (LinkError, TypeError): +            return False +        else: +            os.remove(os.path.join(self.output_dir or '', "a.out")) +        finally: +            for fn in objects: +                os.remove(fn) +        return True + +    def find_library_file(self, dirs, lib, debug=0): +        """Search the specified list of directories for a static or shared +        library file 'lib' and return the full path to that file.  If +        'debug' true, look for a debugging version (if that makes sense on +        the current platform).  Return None if 'lib' wasn't found in any of +        the specified directories. +        """ +        raise NotImplementedError + +    # -- Filename generation methods ----------------------------------- + +    # The default implementation of the filename generating methods are +    # prejudiced towards the Unix/DOS/Windows view of the world: +    #   * object files are named by replacing the source file extension +    #     (eg. .c/.cpp -> .o/.obj) +    #   * library files (shared or static) are named by plugging the +    #     library name and extension into a format string, eg. +    #     "lib%s.%s" % (lib_name, ".a") for Unix static libraries +    #   * executables are named by appending an extension (possibly +    #     empty) to the program name: eg. progname + ".exe" for +    #     Windows +    # +    # To reduce redundant code, these methods expect to find +    # several attributes in the current object (presumably defined +    # as class attributes): +    #   * src_extensions - +    #     list of C/C++ source file extensions, eg. ['.c', '.cpp'] +    #   * obj_extension - +    #     object file extension, eg. '.o' or '.obj' +    #   * static_lib_extension - +    #     extension for static library files, eg. '.a' or '.lib' +    #   * shared_lib_extension - +    #     extension for shared library/object files, eg. '.so', '.dll' +    #   * static_lib_format - +    #     format string for generating static library filenames, +    #     eg. 'lib%s.%s' or '%s.%s' +    #   * shared_lib_format +    #     format string for generating shared library filenames +    #     (probably same as static_lib_format, since the extension +    #     is one of the intended parameters to the format string) +    #   * exe_extension - +    #     extension for executable files, eg. '' or '.exe' + +    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): +        if output_dir is None: +            output_dir = '' +        return list( +            self._make_out_path(output_dir, strip_dir, src_name) +            for src_name in source_filenames +        ) + +    @property +    def out_extensions(self): +        return dict.fromkeys(self.src_extensions, self.obj_extension) + +    def _make_out_path(self, output_dir, strip_dir, src_name): +        base, ext = os.path.splitext(src_name) +        base = self._make_relative(base) +        try: +            new_ext = self.out_extensions[ext] +        except LookupError: +            raise UnknownFileError( +                "unknown file type '{}' (from '{}')".format(ext, src_name) +            ) +        if strip_dir: +            base = os.path.basename(base) +        return os.path.join(output_dir, base + new_ext) + +    @staticmethod +    def _make_relative(base): +        """ +        In order to ensure that a filename always honors the +        indicated output_dir, make sure it's relative. +        Ref python/cpython#37775. +        """ +        # Chop off the drive +        no_drive = os.path.splitdrive(base)[1] +        # If abs, chop off leading / +        return no_drive[os.path.isabs(no_drive) :] + +    def shared_object_filename(self, basename, strip_dir=0, output_dir=''): +        assert output_dir is not None +        if strip_dir: +            basename = os.path.basename(basename) +        return os.path.join(output_dir, basename + self.shared_lib_extension) + +    def executable_filename(self, basename, strip_dir=0, output_dir=''): +        assert output_dir is not None +        if strip_dir: +            basename = os.path.basename(basename) +        return os.path.join(output_dir, basename + (self.exe_extension or '')) + +    def library_filename( +        self, libname, lib_type='static', strip_dir=0, output_dir=''  # or 'shared' +    ): +        assert output_dir is not None +        expected = '"static", "shared", "dylib", "xcode_stub"' +        if lib_type not in eval(expected): +            raise ValueError(f"'lib_type' must be {expected}") +        fmt = getattr(self, lib_type + "_lib_format") +        ext = getattr(self, lib_type + "_lib_extension") + +        dir, base = os.path.split(libname) +        filename = fmt % (base, ext) +        if strip_dir: +            dir = '' + +        return os.path.join(output_dir, dir, filename) + +    # -- Utility methods ----------------------------------------------- + +    def announce(self, msg, level=1): +        log.debug(msg) + +    def debug_print(self, msg): +        from distutils.debug import DEBUG + +        if DEBUG: +            print(msg) + +    def warn(self, msg): +        sys.stderr.write("warning: %s\n" % msg) + +    def execute(self, func, args, msg=None, level=1): +        execute(func, args, msg, self.dry_run) + +    def spawn(self, cmd, **kwargs): +        spawn(cmd, dry_run=self.dry_run, **kwargs) + +    def move_file(self, src, dst): +        return move_file(src, dst, dry_run=self.dry_run) + +    def mkpath(self, name, mode=0o777): +        mkpath(name, mode, dry_run=self.dry_run) + + +# Map a sys.platform/os.name ('posix', 'nt') to the default compiler +# type for that platform. Keys are interpreted as re match +# patterns. Order is important; platform mappings are preferred over +# OS names. +_default_compilers = ( +    # Platform string mappings +    # on a cygwin built python we can use gcc like an ordinary UNIXish +    # compiler +    ('cygwin.*', 'unix'), +    # OS name mappings +    ('posix', 'unix'), +    ('nt', 'msvc'), +) + + +def get_default_compiler(osname=None, platform=None): +    """Determine the default compiler to use for the given platform. + +    osname should be one of the standard Python OS names (i.e. the +    ones returned by os.name) and platform the common value +    returned by sys.platform for the platform in question. + +    The default values are os.name and sys.platform in case the +    parameters are not given. +    """ +    if osname is None: +        osname = os.name +    if platform is None: +        platform = sys.platform +    for pattern, compiler in _default_compilers: +        if ( +            re.match(pattern, platform) is not None +            or re.match(pattern, osname) is not None +        ): +            return compiler +    # Default to Unix compiler +    return 'unix' + + +# Map compiler types to (module_name, class_name) pairs -- ie. where to +# find the code that implements an interface to this compiler.  (The module +# is assumed to be in the 'distutils' package.) +compiler_class = { +    'unix': ('unixccompiler', 'UnixCCompiler', "standard UNIX-style compiler"), +    'msvc': ('_msvccompiler', 'MSVCCompiler', "Microsoft Visual C++"), +    'cygwin': ( +        'cygwinccompiler', +        'CygwinCCompiler', +        "Cygwin port of GNU C Compiler for Win32", +    ), +    'mingw32': ( +        'cygwinccompiler', +        'Mingw32CCompiler', +        "Mingw32 port of GNU C Compiler for Win32", +    ), +    'bcpp': ('bcppcompiler', 'BCPPCompiler', "Borland C++ Compiler"), +} + + +def show_compilers(): +    """Print list of available compilers (used by the "--help-compiler" +    options to "build", "build_ext", "build_clib"). +    """ +    # XXX this "knows" that the compiler option it's describing is +    # "--compiler", which just happens to be the case for the three +    # commands that use it. +    from distutils.fancy_getopt import FancyGetopt + +    compilers = [] +    for compiler in compiler_class.keys(): +        compilers.append(("compiler=" + compiler, None, compiler_class[compiler][2])) +    compilers.sort() +    pretty_printer = FancyGetopt(compilers) +    pretty_printer.print_help("List of available compilers:") + + +def new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0): +    """Generate an instance of some CCompiler subclass for the supplied +    platform/compiler combination.  'plat' defaults to 'os.name' +    (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler +    for that platform.  Currently only 'posix' and 'nt' are supported, and +    the default compilers are "traditional Unix interface" (UnixCCompiler +    class) and Visual C++ (MSVCCompiler class).  Note that it's perfectly +    possible to ask for a Unix compiler object under Windows, and a +    Microsoft compiler object under Unix -- if you supply a value for +    'compiler', 'plat' is ignored. +    """ +    if plat is None: +        plat = os.name + +    try: +        if compiler is None: +            compiler = get_default_compiler(plat) + +        (module_name, class_name, long_description) = compiler_class[compiler] +    except KeyError: +        msg = "don't know how to compile C/C++ code on platform '%s'" % plat +        if compiler is not None: +            msg = msg + " with '%s' compiler" % compiler +        raise DistutilsPlatformError(msg) + +    try: +        module_name = "distutils." + module_name +        __import__(module_name) +        module = sys.modules[module_name] +        klass = vars(module)[class_name] +    except ImportError: +        raise DistutilsModuleError( +            "can't compile C/C++ code: unable to load module '%s'" % module_name +        ) +    except KeyError: +        raise DistutilsModuleError( +            "can't compile C/C++ code: unable to find class '%s' " +            "in module '%s'" % (class_name, module_name) +        ) + +    # XXX The None is necessary to preserve backwards compatibility +    # with classes that expect verbose to be the first positional +    # argument. +    return klass(None, dry_run, force) + + +def gen_preprocess_options(macros, include_dirs): +    """Generate C pre-processor options (-D, -U, -I) as used by at least +    two types of compilers: the typical Unix compiler and Visual C++. +    'macros' is the usual thing, a list of 1- or 2-tuples, where (name,) +    means undefine (-U) macro 'name', and (name,value) means define (-D) +    macro 'name' to 'value'.  'include_dirs' is just a list of directory +    names to be added to the header file search path (-I).  Returns a list +    of command-line options suitable for either Unix compilers or Visual +    C++. +    """ +    # XXX it would be nice (mainly aesthetic, and so we don't generate +    # stupid-looking command lines) to go over 'macros' and eliminate +    # redundant definitions/undefinitions (ie. ensure that only the +    # latest mention of a particular macro winds up on the command +    # line).  I don't think it's essential, though, since most (all?) +    # Unix C compilers only pay attention to the latest -D or -U +    # mention of a macro on their command line.  Similar situation for +    # 'include_dirs'.  I'm punting on both for now.  Anyways, weeding out +    # redundancies like this should probably be the province of +    # CCompiler, since the data structures used are inherited from it +    # and therefore common to all CCompiler classes. +    pp_opts = [] +    for macro in macros: +        if not (isinstance(macro, tuple) and 1 <= len(macro) <= 2): +            raise TypeError( +                "bad macro definition '%s': " +                "each element of 'macros' list must be a 1- or 2-tuple" % macro +            ) + +        if len(macro) == 1:  # undefine this macro +            pp_opts.append("-U%s" % macro[0]) +        elif len(macro) == 2: +            if macro[1] is None:  # define with no explicit value +                pp_opts.append("-D%s" % macro[0]) +            else: +                # XXX *don't* need to be clever about quoting the +                # macro value here, because we're going to avoid the +                # shell at all costs when we spawn the command! +                pp_opts.append("-D%s=%s" % macro) + +    for dir in include_dirs: +        pp_opts.append("-I%s" % dir) +    return pp_opts + + +def gen_lib_options(compiler, library_dirs, runtime_library_dirs, libraries): +    """Generate linker options for searching library directories and +    linking with specific libraries.  'libraries' and 'library_dirs' are, +    respectively, lists of library names (not filenames!) and search +    directories.  Returns a list of command-line options suitable for use +    with some compiler (depending on the two format strings passed in). +    """ +    lib_opts = [] + +    for dir in library_dirs: +        lib_opts.append(compiler.library_dir_option(dir)) + +    for dir in runtime_library_dirs: +        opt = compiler.runtime_library_dir_option(dir) +        if isinstance(opt, list): +            lib_opts = lib_opts + opt +        else: +            lib_opts.append(opt) + +    # XXX it's important that we *not* remove redundant library mentions! +    # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to +    # resolve all symbols.  I just hope we never have to say "-lfoo obj.o +    # -lbar" to get things to work -- that's certainly a possibility, but a +    # pretty nasty way to arrange your C code. + +    for lib in libraries: +        (lib_dir, lib_name) = os.path.split(lib) +        if lib_dir: +            lib_file = compiler.find_library_file([lib_dir], lib_name) +            if lib_file: +                lib_opts.append(lib_file) +            else: +                compiler.warn( +                    "no library file corresponding to " "'%s' found (skipping)" % lib +                ) +        else: +            lib_opts.append(compiler.library_option(lib)) +    return lib_opts diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/cmd.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/cmd.py new file mode 100644 index 0000000..68a9267 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/cmd.py @@ -0,0 +1,436 @@ +"""distutils.cmd + +Provides the Command class, the base class for the command classes +in the distutils.command package. +""" + +import sys +import os +import re +from distutils.errors import DistutilsOptionError +from distutils import util, dir_util, file_util, archive_util, dep_util +from distutils import log + + +class Command: +    """Abstract base class for defining command classes, the "worker bees" +    of the Distutils.  A useful analogy for command classes is to think of +    them as subroutines with local variables called "options".  The options +    are "declared" in 'initialize_options()' and "defined" (given their +    final values, aka "finalized") in 'finalize_options()', both of which +    must be defined by every command class.  The distinction between the +    two is necessary because option values might come from the outside +    world (command line, config file, ...), and any options dependent on +    other options must be computed *after* these outside influences have +    been processed -- hence 'finalize_options()'.  The "body" of the +    subroutine, where it does all its work based on the values of its +    options, is the 'run()' method, which must also be implemented by every +    command class. +    """ + +    # 'sub_commands' formalizes the notion of a "family" of commands, +    # eg. "install" as the parent with sub-commands "install_lib", +    # "install_headers", etc.  The parent of a family of commands +    # defines 'sub_commands' as a class attribute; it's a list of +    #    (command_name : string, predicate : unbound_method | string | None) +    # tuples, where 'predicate' is a method of the parent command that +    # determines whether the corresponding command is applicable in the +    # current situation.  (Eg. we "install_headers" is only applicable if +    # we have any C header files to install.)  If 'predicate' is None, +    # that command is always applicable. +    # +    # 'sub_commands' is usually defined at the *end* of a class, because +    # predicates can be unbound methods, so they must already have been +    # defined.  The canonical example is the "install" command. +    sub_commands = [] + +    # -- Creation/initialization methods ------------------------------- + +    def __init__(self, dist): +        """Create and initialize a new Command object.  Most importantly, +        invokes the 'initialize_options()' method, which is the real +        initializer and depends on the actual command being +        instantiated. +        """ +        # late import because of mutual dependence between these classes +        from distutils.dist import Distribution + +        if not isinstance(dist, Distribution): +            raise TypeError("dist must be a Distribution instance") +        if self.__class__ is Command: +            raise RuntimeError("Command is an abstract class") + +        self.distribution = dist +        self.initialize_options() + +        # Per-command versions of the global flags, so that the user can +        # customize Distutils' behaviour command-by-command and let some +        # commands fall back on the Distribution's behaviour.  None means +        # "not defined, check self.distribution's copy", while 0 or 1 mean +        # false and true (duh).  Note that this means figuring out the real +        # value of each flag is a touch complicated -- hence "self._dry_run" +        # will be handled by __getattr__, below. +        # XXX This needs to be fixed. +        self._dry_run = None + +        # verbose is largely ignored, but needs to be set for +        # backwards compatibility (I think)? +        self.verbose = dist.verbose + +        # Some commands define a 'self.force' option to ignore file +        # timestamps, but methods defined *here* assume that +        # 'self.force' exists for all commands.  So define it here +        # just to be safe. +        self.force = None + +        # The 'help' flag is just used for command-line parsing, so +        # none of that complicated bureaucracy is needed. +        self.help = 0 + +        # 'finalized' records whether or not 'finalize_options()' has been +        # called.  'finalize_options()' itself should not pay attention to +        # this flag: it is the business of 'ensure_finalized()', which +        # always calls 'finalize_options()', to respect/update it. +        self.finalized = 0 + +    # XXX A more explicit way to customize dry_run would be better. +    def __getattr__(self, attr): +        if attr == 'dry_run': +            myval = getattr(self, "_" + attr) +            if myval is None: +                return getattr(self.distribution, attr) +            else: +                return myval +        else: +            raise AttributeError(attr) + +    def ensure_finalized(self): +        if not self.finalized: +            self.finalize_options() +        self.finalized = 1 + +    # Subclasses must define: +    #   initialize_options() +    #     provide default values for all options; may be customized by +    #     setup script, by options from config file(s), or by command-line +    #     options +    #   finalize_options() +    #     decide on the final values for all options; this is called +    #     after all possible intervention from the outside world +    #     (command-line, option file, etc.) has been processed +    #   run() +    #     run the command: do whatever it is we're here to do, +    #     controlled by the command's various option values + +    def initialize_options(self): +        """Set default values for all the options that this command +        supports.  Note that these defaults may be overridden by other +        commands, by the setup script, by config files, or by the +        command-line.  Thus, this is not the place to code dependencies +        between options; generally, 'initialize_options()' implementations +        are just a bunch of "self.foo = None" assignments. + +        This method must be implemented by all command classes. +        """ +        raise RuntimeError( +            "abstract method -- subclass %s must override" % self.__class__ +        ) + +    def finalize_options(self): +        """Set final values for all the options that this command supports. +        This is always called as late as possible, ie.  after any option +        assignments from the command-line or from other commands have been +        done.  Thus, this is the place to code option dependencies: if +        'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as +        long as 'foo' still has the same value it was assigned in +        'initialize_options()'. + +        This method must be implemented by all command classes. +        """ +        raise RuntimeError( +            "abstract method -- subclass %s must override" % self.__class__ +        ) + +    def dump_options(self, header=None, indent=""): +        from distutils.fancy_getopt import longopt_xlate + +        if header is None: +            header = "command options for '%s':" % self.get_command_name() +        self.announce(indent + header, level=log.INFO) +        indent = indent + "  " +        for (option, _, _) in self.user_options: +            option = option.translate(longopt_xlate) +            if option[-1] == "=": +                option = option[:-1] +            value = getattr(self, option) +            self.announce(indent + "{} = {}".format(option, value), level=log.INFO) + +    def run(self): +        """A command's raison d'etre: carry out the action it exists to +        perform, controlled by the options initialized in +        'initialize_options()', customized by other commands, the setup +        script, the command-line, and config files, and finalized in +        'finalize_options()'.  All terminal output and filesystem +        interaction should be done by 'run()'. + +        This method must be implemented by all command classes. +        """ +        raise RuntimeError( +            "abstract method -- subclass %s must override" % self.__class__ +        ) + +    def announce(self, msg, level=1): +        """If the current verbosity level is of greater than or equal to +        'level' print 'msg' to stdout. +        """ +        log.log(level, msg) + +    def debug_print(self, msg): +        """Print 'msg' to stdout if the global DEBUG (taken from the +        DISTUTILS_DEBUG environment variable) flag is true. +        """ +        from distutils.debug import DEBUG + +        if DEBUG: +            print(msg) +            sys.stdout.flush() + +    # -- Option validation methods ------------------------------------- +    # (these are very handy in writing the 'finalize_options()' method) +    # +    # NB. the general philosophy here is to ensure that a particular option +    # value meets certain type and value constraints.  If not, we try to +    # force it into conformance (eg. if we expect a list but have a string, +    # split the string on comma and/or whitespace).  If we can't force the +    # option into conformance, raise DistutilsOptionError.  Thus, command +    # classes need do nothing more than (eg.) +    #   self.ensure_string_list('foo') +    # and they can be guaranteed that thereafter, self.foo will be +    # a list of strings. + +    def _ensure_stringlike(self, option, what, default=None): +        val = getattr(self, option) +        if val is None: +            setattr(self, option, default) +            return default +        elif not isinstance(val, str): +            raise DistutilsOptionError( +                "'{}' must be a {} (got `{}`)".format(option, what, val) +            ) +        return val + +    def ensure_string(self, option, default=None): +        """Ensure that 'option' is a string; if not defined, set it to +        'default'. +        """ +        self._ensure_stringlike(option, "string", default) + +    def ensure_string_list(self, option): +        r"""Ensure that 'option' is a list of strings.  If 'option' is +        currently a string, we split it either on /,\s*/ or /\s+/, so +        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become +        ["foo", "bar", "baz"]. +        """ +        val = getattr(self, option) +        if val is None: +            return +        elif isinstance(val, str): +            setattr(self, option, re.split(r',\s*|\s+', val)) +        else: +            if isinstance(val, list): +                ok = all(isinstance(v, str) for v in val) +            else: +                ok = False +            if not ok: +                raise DistutilsOptionError( +                    "'{}' must be a list of strings (got {!r})".format(option, val) +                ) + +    def _ensure_tested_string(self, option, tester, what, error_fmt, default=None): +        val = self._ensure_stringlike(option, what, default) +        if val is not None and not tester(val): +            raise DistutilsOptionError( +                ("error in '%s' option: " + error_fmt) % (option, val) +            ) + +    def ensure_filename(self, option): +        """Ensure that 'option' is the name of an existing file.""" +        self._ensure_tested_string( +            option, os.path.isfile, "filename", "'%s' does not exist or is not a file" +        ) + +    def ensure_dirname(self, option): +        self._ensure_tested_string( +            option, +            os.path.isdir, +            "directory name", +            "'%s' does not exist or is not a directory", +        ) + +    # -- Convenience methods for commands ------------------------------ + +    def get_command_name(self): +        if hasattr(self, 'command_name'): +            return self.command_name +        else: +            return self.__class__.__name__ + +    def set_undefined_options(self, src_cmd, *option_pairs): +        """Set the values of any "undefined" options from corresponding +        option values in some other command object.  "Undefined" here means +        "is None", which is the convention used to indicate that an option +        has not been changed between 'initialize_options()' and +        'finalize_options()'.  Usually called from 'finalize_options()' for +        options that depend on some other command rather than another +        option of the same command.  'src_cmd' is the other command from +        which option values will be taken (a command object will be created +        for it if necessary); the remaining arguments are +        '(src_option,dst_option)' tuples which mean "take the value of +        'src_option' in the 'src_cmd' command object, and copy it to +        'dst_option' in the current command object". +        """ +        # Option_pairs: list of (src_option, dst_option) tuples +        src_cmd_obj = self.distribution.get_command_obj(src_cmd) +        src_cmd_obj.ensure_finalized() +        for (src_option, dst_option) in option_pairs: +            if getattr(self, dst_option) is None: +                setattr(self, dst_option, getattr(src_cmd_obj, src_option)) + +    def get_finalized_command(self, command, create=1): +        """Wrapper around Distribution's 'get_command_obj()' method: find +        (create if necessary and 'create' is true) the command object for +        'command', call its 'ensure_finalized()' method, and return the +        finalized command object. +        """ +        cmd_obj = self.distribution.get_command_obj(command, create) +        cmd_obj.ensure_finalized() +        return cmd_obj + +    # XXX rename to 'get_reinitialized_command()'? (should do the +    # same in dist.py, if so) +    def reinitialize_command(self, command, reinit_subcommands=0): +        return self.distribution.reinitialize_command(command, reinit_subcommands) + +    def run_command(self, command): +        """Run some other command: uses the 'run_command()' method of +        Distribution, which creates and finalizes the command object if +        necessary and then invokes its 'run()' method. +        """ +        self.distribution.run_command(command) + +    def get_sub_commands(self): +        """Determine the sub-commands that are relevant in the current +        distribution (ie., that need to be run).  This is based on the +        'sub_commands' class attribute: each tuple in that list may include +        a method that we call to determine if the subcommand needs to be +        run for the current distribution.  Return a list of command names. +        """ +        commands = [] +        for (cmd_name, method) in self.sub_commands: +            if method is None or method(self): +                commands.append(cmd_name) +        return commands + +    # -- External world manipulation ----------------------------------- + +    def warn(self, msg): +        log.warn("warning: %s: %s\n", self.get_command_name(), msg) + +    def execute(self, func, args, msg=None, level=1): +        util.execute(func, args, msg, dry_run=self.dry_run) + +    def mkpath(self, name, mode=0o777): +        dir_util.mkpath(name, mode, dry_run=self.dry_run) + +    def copy_file( +        self, infile, outfile, preserve_mode=1, preserve_times=1, link=None, level=1 +    ): +        """Copy a file respecting verbose, dry-run and force flags.  (The +        former two default to whatever is in the Distribution object, and +        the latter defaults to false for commands that don't define it.)""" +        return file_util.copy_file( +            infile, +            outfile, +            preserve_mode, +            preserve_times, +            not self.force, +            link, +            dry_run=self.dry_run, +        ) + +    def copy_tree( +        self, +        infile, +        outfile, +        preserve_mode=1, +        preserve_times=1, +        preserve_symlinks=0, +        level=1, +    ): +        """Copy an entire directory tree respecting verbose, dry-run, +        and force flags. +        """ +        return dir_util.copy_tree( +            infile, +            outfile, +            preserve_mode, +            preserve_times, +            preserve_symlinks, +            not self.force, +            dry_run=self.dry_run, +        ) + +    def move_file(self, src, dst, level=1): +        """Move a file respecting dry-run flag.""" +        return file_util.move_file(src, dst, dry_run=self.dry_run) + +    def spawn(self, cmd, search_path=1, level=1): +        """Spawn an external command respecting dry-run flag.""" +        from distutils.spawn import spawn + +        spawn(cmd, search_path, dry_run=self.dry_run) + +    def make_archive( +        self, base_name, format, root_dir=None, base_dir=None, owner=None, group=None +    ): +        return archive_util.make_archive( +            base_name, +            format, +            root_dir, +            base_dir, +            dry_run=self.dry_run, +            owner=owner, +            group=group, +        ) + +    def make_file( +        self, infiles, outfile, func, args, exec_msg=None, skip_msg=None, level=1 +    ): +        """Special case of 'execute()' for operations that process one or +        more input files and generate one output file.  Works just like +        'execute()', except the operation is skipped and a different +        message printed if 'outfile' already exists and is newer than all +        files listed in 'infiles'.  If the command defined 'self.force', +        and it is true, then the command is unconditionally run -- does no +        timestamp checks. +        """ +        if skip_msg is None: +            skip_msg = "skipping %s (inputs unchanged)" % outfile + +        # Allow 'infiles' to be a single string +        if isinstance(infiles, str): +            infiles = (infiles,) +        elif not isinstance(infiles, (list, tuple)): +            raise TypeError("'infiles' must be a string, or a list or tuple of strings") + +        if exec_msg is None: +            exec_msg = "generating {} from {}".format(outfile, ', '.join(infiles)) + +        # If 'outfile' must be regenerated (either because it doesn't +        # exist, is out-of-date, or the 'force' flag is true) then +        # perform the action that presumably regenerates it +        if self.force or dep_util.newer_group(infiles, outfile): +            self.execute(func, args, exec_msg, level) +        # Otherwise, print the "skip" message +        else: +            log.debug(skip_msg) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__init__.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__init__.py new file mode 100644 index 0000000..028dcfa --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__init__.py @@ -0,0 +1,25 @@ +"""distutils.command + +Package containing implementation of all the standard Distutils +commands.""" + +__all__ = [  # noqa: F822 +    'build', +    'build_py', +    'build_ext', +    'build_clib', +    'build_scripts', +    'clean', +    'install', +    'install_lib', +    'install_headers', +    'install_scripts', +    'install_data', +    'sdist', +    'register', +    'bdist', +    'bdist_dumb', +    'bdist_rpm', +    'check', +    'upload', +] diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pycBinary files differ new file mode 100644 index 0000000..75bdb3b --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/__init__.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pycBinary files differ new file mode 100644 index 0000000..5fb53f8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/_framework_compat.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f39d795 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-311.pycBinary files differ new file mode 100644 index 0000000..4970e89 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist_dumb.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-311.pycBinary files differ new file mode 100644 index 0000000..43f1b4e --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/bdist_rpm.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pycBinary files differ new file mode 100644 index 0000000..ff6ed8a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-311.pycBinary files differ new file mode 100644 index 0000000..246bc58 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_clib.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pycBinary files differ new file mode 100644 index 0000000..a412461 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_ext.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pycBinary files differ new file mode 100644 index 0000000..2ccbf4a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_py.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pycBinary files differ new file mode 100644 index 0000000..d97306d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/build_scripts.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f02926d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/check.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-311.pycBinary files differ new file mode 100644 index 0000000..e9404cf --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/clean.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-311.pycBinary files differ new file mode 100644 index 0000000..a320222 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/config.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f503c1e --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-311.pycBinary files differ new file mode 100644 index 0000000..80eb5d0 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_data.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-311.pycBinary files differ new file mode 100644 index 0000000..b854d29 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_egg_info.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-311.pycBinary files differ new file mode 100644 index 0000000..9d57b22 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_headers.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pycBinary files differ new file mode 100644 index 0000000..90c0710 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_lib.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pycBinary files differ new file mode 100644 index 0000000..49e4891 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/install_scripts.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-311.pycBinary files differ new file mode 100644 index 0000000..e180f89 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/py37compat.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-311.pycBinary files differ new file mode 100644 index 0000000..12f6979 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/register.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pycBinary files differ new file mode 100644 index 0000000..6eaba69 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/sdist.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-311.pycBinary files differ new file mode 100644 index 0000000..785172d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/__pycache__/upload.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/_framework_compat.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/_framework_compat.py new file mode 100644 index 0000000..cffa27c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/_framework_compat.py @@ -0,0 +1,55 @@ +""" +Backward compatibility for homebrew builds on macOS. +""" + + +import sys +import os +import functools +import subprocess +import sysconfig + + +@functools.lru_cache() +def enabled(): +    """ +    Only enabled for Python 3.9 framework homebrew builds +    except ensurepip and venv. +    """ +    PY39 = (3, 9) < sys.version_info < (3, 10) +    framework = sys.platform == 'darwin' and sys._framework +    homebrew = "Cellar" in sysconfig.get_config_var('projectbase') +    venv = sys.prefix != sys.base_prefix +    ensurepip = os.environ.get("ENSUREPIP_OPTIONS") +    return PY39 and framework and homebrew and not venv and not ensurepip + + +schemes = dict( +    osx_framework_library=dict( +        stdlib='{installed_base}/{platlibdir}/python{py_version_short}', +        platstdlib='{platbase}/{platlibdir}/python{py_version_short}', +        purelib='{homebrew_prefix}/lib/python{py_version_short}/site-packages', +        platlib='{homebrew_prefix}/{platlibdir}/python{py_version_short}/site-packages', +        include='{installed_base}/include/python{py_version_short}{abiflags}', +        platinclude='{installed_platbase}/include/python{py_version_short}{abiflags}', +        scripts='{homebrew_prefix}/bin', +        data='{homebrew_prefix}', +    ) +) + + +@functools.lru_cache() +def vars(): +    if not enabled(): +        return {} +    homebrew_prefix = subprocess.check_output(['brew', '--prefix'], text=True).strip() +    return locals() + + +def scheme(name): +    """ +    Override the selected scheme for posix_prefix. +    """ +    if not enabled() or not name.endswith('_prefix'): +        return name +    return 'osx_framework_library' diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist.py new file mode 100644 index 0000000..de37dae --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist.py @@ -0,0 +1,157 @@ +"""distutils.command.bdist + +Implements the Distutils 'bdist' command (create a built [binary] +distribution).""" + +import os +import warnings + +from distutils.core import Command +from distutils.errors import DistutilsPlatformError, DistutilsOptionError +from distutils.util import get_platform + + +def show_formats(): +    """Print list of available formats (arguments to "--format" option).""" +    from distutils.fancy_getopt import FancyGetopt + +    formats = [] +    for format in bdist.format_commands: +        formats.append(("formats=" + format, None, bdist.format_commands[format][1])) +    pretty_printer = FancyGetopt(formats) +    pretty_printer.print_help("List of available distribution formats:") + + +class ListCompat(dict): +    # adapter to allow for Setuptools compatibility in format_commands +    def append(self, item): +        warnings.warn( +            """format_commands is now a dict. append is deprecated.""", +            DeprecationWarning, +            stacklevel=2, +        ) + + +class bdist(Command): + +    description = "create a built (binary) distribution" + +    user_options = [ +        ('bdist-base=', 'b', "temporary directory for creating built distributions"), +        ( +            'plat-name=', +            'p', +            "platform name to embed in generated filenames " +            "(default: %s)" % get_platform(), +        ), +        ('formats=', None, "formats for distribution (comma-separated list)"), +        ( +            'dist-dir=', +            'd', +            "directory to put final built distributions in " "[default: dist]", +        ), +        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), +        ( +            'owner=', +            'u', +            "Owner name used when creating a tar file" " [default: current user]", +        ), +        ( +            'group=', +            'g', +            "Group name used when creating a tar file" " [default: current group]", +        ), +    ] + +    boolean_options = ['skip-build'] + +    help_options = [ +        ('help-formats', None, "lists available distribution formats", show_formats), +    ] + +    # The following commands do not take a format option from bdist +    no_format_option = ('bdist_rpm',) + +    # This won't do in reality: will need to distinguish RPM-ish Linux, +    # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS. +    default_format = {'posix': 'gztar', 'nt': 'zip'} + +    # Define commands in preferred order for the --help-formats option +    format_commands = ListCompat( +        { +            'rpm': ('bdist_rpm', "RPM distribution"), +            'gztar': ('bdist_dumb', "gzip'ed tar file"), +            'bztar': ('bdist_dumb', "bzip2'ed tar file"), +            'xztar': ('bdist_dumb', "xz'ed tar file"), +            'ztar': ('bdist_dumb', "compressed tar file"), +            'tar': ('bdist_dumb', "tar file"), +            'zip': ('bdist_dumb', "ZIP file"), +        } +    ) + +    # for compatibility until consumers only reference format_commands +    format_command = format_commands + +    def initialize_options(self): +        self.bdist_base = None +        self.plat_name = None +        self.formats = None +        self.dist_dir = None +        self.skip_build = 0 +        self.group = None +        self.owner = None + +    def finalize_options(self): +        # have to finalize 'plat_name' before 'bdist_base' +        if self.plat_name is None: +            if self.skip_build: +                self.plat_name = get_platform() +            else: +                self.plat_name = self.get_finalized_command('build').plat_name + +        # 'bdist_base' -- parent of per-built-distribution-format +        # temporary directories (eg. we'll probably have +        # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.) +        if self.bdist_base is None: +            build_base = self.get_finalized_command('build').build_base +            self.bdist_base = os.path.join(build_base, 'bdist.' + self.plat_name) + +        self.ensure_string_list('formats') +        if self.formats is None: +            try: +                self.formats = [self.default_format[os.name]] +            except KeyError: +                raise DistutilsPlatformError( +                    "don't know how to create built distributions " +                    "on platform %s" % os.name +                ) + +        if self.dist_dir is None: +            self.dist_dir = "dist" + +    def run(self): +        # Figure out which sub-commands we need to run. +        commands = [] +        for format in self.formats: +            try: +                commands.append(self.format_commands[format][0]) +            except KeyError: +                raise DistutilsOptionError("invalid format '%s'" % format) + +        # Reinitialize and run each command. +        for i in range(len(self.formats)): +            cmd_name = commands[i] +            sub_cmd = self.reinitialize_command(cmd_name) +            if cmd_name not in self.no_format_option: +                sub_cmd.format = self.formats[i] + +            # passing the owner and group names for tar archiving +            if cmd_name == 'bdist_dumb': +                sub_cmd.owner = self.owner +                sub_cmd.group = self.group + +            # If we're going to need to run this command again, tell it to +            # keep its temporary files around so subsequent runs go faster. +            if cmd_name in commands[i + 1 :]: +                sub_cmd.keep_temp = 1 +            self.run_command(cmd_name) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist_dumb.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist_dumb.py new file mode 100644 index 0000000..0f52330 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist_dumb.py @@ -0,0 +1,144 @@ +"""distutils.command.bdist_dumb + +Implements the Distutils 'bdist_dumb' command (create a "dumb" built +distribution -- i.e., just an archive to be unpacked under $prefix or +$exec_prefix).""" + +import os +from distutils.core import Command +from distutils.util import get_platform +from distutils.dir_util import remove_tree, ensure_relative +from distutils.errors import DistutilsPlatformError +from distutils.sysconfig import get_python_version +from distutils import log + + +class bdist_dumb(Command): + +    description = "create a \"dumb\" built distribution" + +    user_options = [ +        ('bdist-dir=', 'd', "temporary directory for creating the distribution"), +        ( +            'plat-name=', +            'p', +            "platform name to embed in generated filenames " +            "(default: %s)" % get_platform(), +        ), +        ( +            'format=', +            'f', +            "archive format to create (tar, gztar, bztar, xztar, " "ztar, zip)", +        ), +        ( +            'keep-temp', +            'k', +            "keep the pseudo-installation tree around after " +            + "creating the distribution archive", +        ), +        ('dist-dir=', 'd', "directory to put final built distributions in"), +        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), +        ( +            'relative', +            None, +            "build the archive using relative paths " "(default: false)", +        ), +        ( +            'owner=', +            'u', +            "Owner name used when creating a tar file" " [default: current user]", +        ), +        ( +            'group=', +            'g', +            "Group name used when creating a tar file" " [default: current group]", +        ), +    ] + +    boolean_options = ['keep-temp', 'skip-build', 'relative'] + +    default_format = {'posix': 'gztar', 'nt': 'zip'} + +    def initialize_options(self): +        self.bdist_dir = None +        self.plat_name = None +        self.format = None +        self.keep_temp = 0 +        self.dist_dir = None +        self.skip_build = None +        self.relative = 0 +        self.owner = None +        self.group = None + +    def finalize_options(self): +        if self.bdist_dir is None: +            bdist_base = self.get_finalized_command('bdist').bdist_base +            self.bdist_dir = os.path.join(bdist_base, 'dumb') + +        if self.format is None: +            try: +                self.format = self.default_format[os.name] +            except KeyError: +                raise DistutilsPlatformError( +                    "don't know how to create dumb built distributions " +                    "on platform %s" % os.name +                ) + +        self.set_undefined_options( +            'bdist', +            ('dist_dir', 'dist_dir'), +            ('plat_name', 'plat_name'), +            ('skip_build', 'skip_build'), +        ) + +    def run(self): +        if not self.skip_build: +            self.run_command('build') + +        install = self.reinitialize_command('install', reinit_subcommands=1) +        install.root = self.bdist_dir +        install.skip_build = self.skip_build +        install.warn_dir = 0 + +        log.info("installing to %s", self.bdist_dir) +        self.run_command('install') + +        # And make an archive relative to the root of the +        # pseudo-installation tree. +        archive_basename = "{}.{}".format( +            self.distribution.get_fullname(), self.plat_name +        ) + +        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename) +        if not self.relative: +            archive_root = self.bdist_dir +        else: +            if self.distribution.has_ext_modules() and ( +                install.install_base != install.install_platbase +            ): +                raise DistutilsPlatformError( +                    "can't make a dumb built distribution where " +                    "base and platbase are different (%s, %s)" +                    % (repr(install.install_base), repr(install.install_platbase)) +                ) +            else: +                archive_root = os.path.join( +                    self.bdist_dir, ensure_relative(install.install_base) +                ) + +        # Make the archive +        filename = self.make_archive( +            pseudoinstall_root, +            self.format, +            root_dir=archive_root, +            owner=self.owner, +            group=self.group, +        ) +        if self.distribution.has_ext_modules(): +            pyversion = get_python_version() +        else: +            pyversion = 'any' +        self.distribution.dist_files.append(('bdist_dumb', pyversion, filename)) + +        if not self.keep_temp: +            remove_tree(self.bdist_dir, dry_run=self.dry_run) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist_rpm.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist_rpm.py new file mode 100644 index 0000000..6a50ef3 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/bdist_rpm.py @@ -0,0 +1,615 @@ +"""distutils.command.bdist_rpm + +Implements the Distutils 'bdist_rpm' command (create RPM source and binary +distributions).""" + +import subprocess +import sys +import os + +from distutils.core import Command +from distutils.debug import DEBUG +from distutils.file_util import write_file +from distutils.errors import ( +    DistutilsOptionError, +    DistutilsPlatformError, +    DistutilsFileError, +    DistutilsExecError, +) +from distutils.sysconfig import get_python_version +from distutils import log + + +class bdist_rpm(Command): + +    description = "create an RPM distribution" + +    user_options = [ +        ('bdist-base=', None, "base directory for creating built distributions"), +        ( +            'rpm-base=', +            None, +            "base directory for creating RPMs (defaults to \"rpm\" under " +            "--bdist-base; must be specified for RPM 2)", +        ), +        ( +            'dist-dir=', +            'd', +            "directory to put final RPM files in " "(and .spec files if --spec-only)", +        ), +        ( +            'python=', +            None, +            "path to Python interpreter to hard-code in the .spec file " +            "(default: \"python\")", +        ), +        ( +            'fix-python', +            None, +            "hard-code the exact path to the current Python interpreter in " +            "the .spec file", +        ), +        ('spec-only', None, "only regenerate spec file"), +        ('source-only', None, "only generate source RPM"), +        ('binary-only', None, "only generate binary RPM"), +        ('use-bzip2', None, "use bzip2 instead of gzip to create source distribution"), +        # More meta-data: too RPM-specific to put in the setup script, +        # but needs to go in the .spec file -- so we make these options +        # to "bdist_rpm".  The idea is that packagers would put this +        # info in setup.cfg, although they are of course free to +        # supply it on the command line. +        ( +            'distribution-name=', +            None, +            "name of the (Linux) distribution to which this " +            "RPM applies (*not* the name of the module distribution!)", +        ), +        ('group=', None, "package classification [default: \"Development/Libraries\"]"), +        ('release=', None, "RPM release number"), +        ('serial=', None, "RPM serial number"), +        ( +            'vendor=', +            None, +            "RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") " +            "[default: maintainer or author from setup script]", +        ), +        ( +            'packager=', +            None, +            "RPM packager (eg. \"Jane Doe <jane@example.net>\") " "[default: vendor]", +        ), +        ('doc-files=', None, "list of documentation files (space or comma-separated)"), +        ('changelog=', None, "RPM changelog"), +        ('icon=', None, "name of icon file"), +        ('provides=', None, "capabilities provided by this package"), +        ('requires=', None, "capabilities required by this package"), +        ('conflicts=', None, "capabilities which conflict with this package"), +        ('build-requires=', None, "capabilities required to build this package"), +        ('obsoletes=', None, "capabilities made obsolete by this package"), +        ('no-autoreq', None, "do not automatically calculate dependencies"), +        # Actions to take when building RPM +        ('keep-temp', 'k', "don't clean up RPM build directory"), +        ('no-keep-temp', None, "clean up RPM build directory [default]"), +        ( +            'use-rpm-opt-flags', +            None, +            "compile with RPM_OPT_FLAGS when building from source RPM", +        ), +        ('no-rpm-opt-flags', None, "do not pass any RPM CFLAGS to compiler"), +        ('rpm3-mode', None, "RPM 3 compatibility mode (default)"), +        ('rpm2-mode', None, "RPM 2 compatibility mode"), +        # Add the hooks necessary for specifying custom scripts +        ('prep-script=', None, "Specify a script for the PREP phase of RPM building"), +        ('build-script=', None, "Specify a script for the BUILD phase of RPM building"), +        ( +            'pre-install=', +            None, +            "Specify a script for the pre-INSTALL phase of RPM building", +        ), +        ( +            'install-script=', +            None, +            "Specify a script for the INSTALL phase of RPM building", +        ), +        ( +            'post-install=', +            None, +            "Specify a script for the post-INSTALL phase of RPM building", +        ), +        ( +            'pre-uninstall=', +            None, +            "Specify a script for the pre-UNINSTALL phase of RPM building", +        ), +        ( +            'post-uninstall=', +            None, +            "Specify a script for the post-UNINSTALL phase of RPM building", +        ), +        ('clean-script=', None, "Specify a script for the CLEAN phase of RPM building"), +        ( +            'verify-script=', +            None, +            "Specify a script for the VERIFY phase of the RPM build", +        ), +        # Allow a packager to explicitly force an architecture +        ('force-arch=', None, "Force an architecture onto the RPM build process"), +        ('quiet', 'q', "Run the INSTALL phase of RPM building in quiet mode"), +    ] + +    boolean_options = [ +        'keep-temp', +        'use-rpm-opt-flags', +        'rpm3-mode', +        'no-autoreq', +        'quiet', +    ] + +    negative_opt = { +        'no-keep-temp': 'keep-temp', +        'no-rpm-opt-flags': 'use-rpm-opt-flags', +        'rpm2-mode': 'rpm3-mode', +    } + +    def initialize_options(self): +        self.bdist_base = None +        self.rpm_base = None +        self.dist_dir = None +        self.python = None +        self.fix_python = None +        self.spec_only = None +        self.binary_only = None +        self.source_only = None +        self.use_bzip2 = None + +        self.distribution_name = None +        self.group = None +        self.release = None +        self.serial = None +        self.vendor = None +        self.packager = None +        self.doc_files = None +        self.changelog = None +        self.icon = None + +        self.prep_script = None +        self.build_script = None +        self.install_script = None +        self.clean_script = None +        self.verify_script = None +        self.pre_install = None +        self.post_install = None +        self.pre_uninstall = None +        self.post_uninstall = None +        self.prep = None +        self.provides = None +        self.requires = None +        self.conflicts = None +        self.build_requires = None +        self.obsoletes = None + +        self.keep_temp = 0 +        self.use_rpm_opt_flags = 1 +        self.rpm3_mode = 1 +        self.no_autoreq = 0 + +        self.force_arch = None +        self.quiet = 0 + +    def finalize_options(self): +        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) +        if self.rpm_base is None: +            if not self.rpm3_mode: +                raise DistutilsOptionError("you must specify --rpm-base in RPM 2 mode") +            self.rpm_base = os.path.join(self.bdist_base, "rpm") + +        if self.python is None: +            if self.fix_python: +                self.python = sys.executable +            else: +                self.python = "python3" +        elif self.fix_python: +            raise DistutilsOptionError( +                "--python and --fix-python are mutually exclusive options" +            ) + +        if os.name != 'posix': +            raise DistutilsPlatformError( +                "don't know how to create RPM " "distributions on platform %s" % os.name +            ) +        if self.binary_only and self.source_only: +            raise DistutilsOptionError( +                "cannot supply both '--source-only' and '--binary-only'" +            ) + +        # don't pass CFLAGS to pure python distributions +        if not self.distribution.has_ext_modules(): +            self.use_rpm_opt_flags = 0 + +        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) +        self.finalize_package_data() + +    def finalize_package_data(self): +        self.ensure_string('group', "Development/Libraries") +        self.ensure_string( +            'vendor', +            "%s <%s>" +            % (self.distribution.get_contact(), self.distribution.get_contact_email()), +        ) +        self.ensure_string('packager') +        self.ensure_string_list('doc_files') +        if isinstance(self.doc_files, list): +            for readme in ('README', 'README.txt'): +                if os.path.exists(readme) and readme not in self.doc_files: +                    self.doc_files.append(readme) + +        self.ensure_string('release', "1") +        self.ensure_string('serial')  # should it be an int? + +        self.ensure_string('distribution_name') + +        self.ensure_string('changelog') +        # Format changelog correctly +        self.changelog = self._format_changelog(self.changelog) + +        self.ensure_filename('icon') + +        self.ensure_filename('prep_script') +        self.ensure_filename('build_script') +        self.ensure_filename('install_script') +        self.ensure_filename('clean_script') +        self.ensure_filename('verify_script') +        self.ensure_filename('pre_install') +        self.ensure_filename('post_install') +        self.ensure_filename('pre_uninstall') +        self.ensure_filename('post_uninstall') + +        # XXX don't forget we punted on summaries and descriptions -- they +        # should be handled here eventually! + +        # Now *this* is some meta-data that belongs in the setup script... +        self.ensure_string_list('provides') +        self.ensure_string_list('requires') +        self.ensure_string_list('conflicts') +        self.ensure_string_list('build_requires') +        self.ensure_string_list('obsoletes') + +        self.ensure_string('force_arch') + +    def run(self):  # noqa: C901 +        if DEBUG: +            print("before _get_package_data():") +            print("vendor =", self.vendor) +            print("packager =", self.packager) +            print("doc_files =", self.doc_files) +            print("changelog =", self.changelog) + +        # make directories +        if self.spec_only: +            spec_dir = self.dist_dir +            self.mkpath(spec_dir) +        else: +            rpm_dir = {} +            for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'): +                rpm_dir[d] = os.path.join(self.rpm_base, d) +                self.mkpath(rpm_dir[d]) +            spec_dir = rpm_dir['SPECS'] + +        # Spec file goes into 'dist_dir' if '--spec-only specified', +        # build/rpm.<plat> otherwise. +        spec_path = os.path.join(spec_dir, "%s.spec" % self.distribution.get_name()) +        self.execute( +            write_file, (spec_path, self._make_spec_file()), "writing '%s'" % spec_path +        ) + +        if self.spec_only:  # stop if requested +            return + +        # Make a source distribution and copy to SOURCES directory with +        # optional icon. +        saved_dist_files = self.distribution.dist_files[:] +        sdist = self.reinitialize_command('sdist') +        if self.use_bzip2: +            sdist.formats = ['bztar'] +        else: +            sdist.formats = ['gztar'] +        self.run_command('sdist') +        self.distribution.dist_files = saved_dist_files + +        source = sdist.get_archive_files()[0] +        source_dir = rpm_dir['SOURCES'] +        self.copy_file(source, source_dir) + +        if self.icon: +            if os.path.exists(self.icon): +                self.copy_file(self.icon, source_dir) +            else: +                raise DistutilsFileError("icon file '%s' does not exist" % self.icon) + +        # build package +        log.info("building RPMs") +        rpm_cmd = ['rpmbuild'] + +        if self.source_only:  # what kind of RPMs? +            rpm_cmd.append('-bs') +        elif self.binary_only: +            rpm_cmd.append('-bb') +        else: +            rpm_cmd.append('-ba') +        rpm_cmd.extend(['--define', '__python %s' % self.python]) +        if self.rpm3_mode: +            rpm_cmd.extend(['--define', '_topdir %s' % os.path.abspath(self.rpm_base)]) +        if not self.keep_temp: +            rpm_cmd.append('--clean') + +        if self.quiet: +            rpm_cmd.append('--quiet') + +        rpm_cmd.append(spec_path) +        # Determine the binary rpm names that should be built out of this spec +        # file +        # Note that some of these may not be really built (if the file +        # list is empty) +        nvr_string = "%{name}-%{version}-%{release}" +        src_rpm = nvr_string + ".src.rpm" +        non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm" +        q_cmd = r"rpm -q --qf '{} {}\n' --specfile '{}'".format( +            src_rpm, +            non_src_rpm, +            spec_path, +        ) + +        out = os.popen(q_cmd) +        try: +            binary_rpms = [] +            source_rpm = None +            while True: +                line = out.readline() +                if not line: +                    break +                ell = line.strip().split() +                assert len(ell) == 2 +                binary_rpms.append(ell[1]) +                # The source rpm is named after the first entry in the spec file +                if source_rpm is None: +                    source_rpm = ell[0] + +            status = out.close() +            if status: +                raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd)) + +        finally: +            out.close() + +        self.spawn(rpm_cmd) + +        if not self.dry_run: +            if self.distribution.has_ext_modules(): +                pyversion = get_python_version() +            else: +                pyversion = 'any' + +            if not self.binary_only: +                srpm = os.path.join(rpm_dir['SRPMS'], source_rpm) +                assert os.path.exists(srpm) +                self.move_file(srpm, self.dist_dir) +                filename = os.path.join(self.dist_dir, source_rpm) +                self.distribution.dist_files.append(('bdist_rpm', pyversion, filename)) + +            if not self.source_only: +                for rpm in binary_rpms: +                    rpm = os.path.join(rpm_dir['RPMS'], rpm) +                    if os.path.exists(rpm): +                        self.move_file(rpm, self.dist_dir) +                        filename = os.path.join(self.dist_dir, os.path.basename(rpm)) +                        self.distribution.dist_files.append( +                            ('bdist_rpm', pyversion, filename) +                        ) + +    def _dist_path(self, path): +        return os.path.join(self.dist_dir, os.path.basename(path)) + +    def _make_spec_file(self):  # noqa: C901 +        """Generate the text of an RPM spec file and return it as a +        list of strings (one per line). +        """ +        # definitions and headers +        spec_file = [ +            '%define name ' + self.distribution.get_name(), +            '%define version ' + self.distribution.get_version().replace('-', '_'), +            '%define unmangled_version ' + self.distribution.get_version(), +            '%define release ' + self.release.replace('-', '_'), +            '', +            'Summary: ' + (self.distribution.get_description() or "UNKNOWN"), +        ] + +        # Workaround for #14443 which affects some RPM based systems such as +        # RHEL6 (and probably derivatives) +        vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}') +        # Generate a potential replacement value for __os_install_post (whilst +        # normalizing the whitespace to simplify the test for whether the +        # invocation of brp-python-bytecompile passes in __python): +        vendor_hook = '\n'.join( +            ['  %s \\' % line.strip() for line in vendor_hook.splitlines()] +        ) +        problem = "brp-python-bytecompile \\\n" +        fixed = "brp-python-bytecompile %{__python} \\\n" +        fixed_hook = vendor_hook.replace(problem, fixed) +        if fixed_hook != vendor_hook: +            spec_file.append('# Workaround for http://bugs.python.org/issue14443') +            spec_file.append('%define __os_install_post ' + fixed_hook + '\n') + +        # put locale summaries into spec file +        # XXX not supported for now (hard to put a dictionary +        # in a config file -- arg!) +        # for locale in self.summaries.keys(): +        #    spec_file.append('Summary(%s): %s' % (locale, +        #                                          self.summaries[locale])) + +        spec_file.extend( +            [ +                'Name: %{name}', +                'Version: %{version}', +                'Release: %{release}', +            ] +        ) + +        # XXX yuck! this filename is available from the "sdist" command, +        # but only after it has run: and we create the spec file before +        # running "sdist", in case of --spec-only. +        if self.use_bzip2: +            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2') +        else: +            spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz') + +        spec_file.extend( +            [ +                'License: ' + (self.distribution.get_license() or "UNKNOWN"), +                'Group: ' + self.group, +                'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', +                'Prefix: %{_prefix}', +            ] +        ) + +        if not self.force_arch: +            # noarch if no extension modules +            if not self.distribution.has_ext_modules(): +                spec_file.append('BuildArch: noarch') +        else: +            spec_file.append('BuildArch: %s' % self.force_arch) + +        for field in ( +            'Vendor', +            'Packager', +            'Provides', +            'Requires', +            'Conflicts', +            'Obsoletes', +        ): +            val = getattr(self, field.lower()) +            if isinstance(val, list): +                spec_file.append('{}: {}'.format(field, ' '.join(val))) +            elif val is not None: +                spec_file.append('{}: {}'.format(field, val)) + +        if self.distribution.get_url(): +            spec_file.append('Url: ' + self.distribution.get_url()) + +        if self.distribution_name: +            spec_file.append('Distribution: ' + self.distribution_name) + +        if self.build_requires: +            spec_file.append('BuildRequires: ' + ' '.join(self.build_requires)) + +        if self.icon: +            spec_file.append('Icon: ' + os.path.basename(self.icon)) + +        if self.no_autoreq: +            spec_file.append('AutoReq: 0') + +        spec_file.extend( +            [ +                '', +                '%description', +                self.distribution.get_long_description() or "", +            ] +        ) + +        # put locale descriptions into spec file +        # XXX again, suppressed because config file syntax doesn't +        # easily support this ;-( +        # for locale in self.descriptions.keys(): +        #    spec_file.extend([ +        #        '', +        #        '%description -l ' + locale, +        #        self.descriptions[locale], +        #        ]) + +        # rpm scripts +        # figure out default build script +        def_setup_call = "{} {}".format(self.python, os.path.basename(sys.argv[0])) +        def_build = "%s build" % def_setup_call +        if self.use_rpm_opt_flags: +            def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build + +        # insert contents of files + +        # XXX this is kind of misleading: user-supplied options are files +        # that we open and interpolate into the spec file, but the defaults +        # are just text that we drop in as-is.  Hmmm. + +        install_cmd = ( +            '%s install -O1 --root=$RPM_BUILD_ROOT ' '--record=INSTALLED_FILES' +        ) % def_setup_call + +        script_options = [ +            ('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"), +            ('build', 'build_script', def_build), +            ('install', 'install_script', install_cmd), +            ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"), +            ('verifyscript', 'verify_script', None), +            ('pre', 'pre_install', None), +            ('post', 'post_install', None), +            ('preun', 'pre_uninstall', None), +            ('postun', 'post_uninstall', None), +        ] + +        for (rpm_opt, attr, default) in script_options: +            # Insert contents of file referred to, if no file is referred to +            # use 'default' as contents of script +            val = getattr(self, attr) +            if val or default: +                spec_file.extend( +                    [ +                        '', +                        '%' + rpm_opt, +                    ] +                ) +                if val: +                    with open(val) as f: +                        spec_file.extend(f.read().split('\n')) +                else: +                    spec_file.append(default) + +        # files section +        spec_file.extend( +            [ +                '', +                '%files -f INSTALLED_FILES', +                '%defattr(-,root,root)', +            ] +        ) + +        if self.doc_files: +            spec_file.append('%doc ' + ' '.join(self.doc_files)) + +        if self.changelog: +            spec_file.extend( +                [ +                    '', +                    '%changelog', +                ] +            ) +            spec_file.extend(self.changelog) + +        return spec_file + +    def _format_changelog(self, changelog): +        """Format the changelog correctly and convert it to a list of strings""" +        if not changelog: +            return changelog +        new_changelog = [] +        for line in changelog.strip().split('\n'): +            line = line.strip() +            if line[0] == '*': +                new_changelog.extend(['', line]) +            elif line[0] == '-': +                new_changelog.append(line) +            else: +                new_changelog.append('  ' + line) + +        # strip trailing newline inserted by first changelog entry +        if not new_changelog[0]: +            del new_changelog[0] + +        return new_changelog diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build.py new file mode 100644 index 0000000..6d45341 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build.py @@ -0,0 +1,153 @@ +"""distutils.command.build + +Implements the Distutils 'build' command.""" + +import sys +import os +from distutils.core import Command +from distutils.errors import DistutilsOptionError +from distutils.util import get_platform + + +def show_compilers(): +    from distutils.ccompiler import show_compilers + +    show_compilers() + + +class build(Command): + +    description = "build everything needed to install" + +    user_options = [ +        ('build-base=', 'b', "base directory for build library"), +        ('build-purelib=', None, "build directory for platform-neutral distributions"), +        ('build-platlib=', None, "build directory for platform-specific distributions"), +        ( +            'build-lib=', +            None, +            "build directory for all distribution (defaults to either " +            + "build-purelib or build-platlib", +        ), +        ('build-scripts=', None, "build directory for scripts"), +        ('build-temp=', 't', "temporary build directory"), +        ( +            'plat-name=', +            'p', +            "platform name to build for, if supported " +            "(default: %s)" % get_platform(), +        ), +        ('compiler=', 'c', "specify the compiler type"), +        ('parallel=', 'j', "number of parallel build jobs"), +        ('debug', 'g', "compile extensions and libraries with debugging information"), +        ('force', 'f', "forcibly build everything (ignore file timestamps)"), +        ('executable=', 'e', "specify final destination interpreter path (build.py)"), +    ] + +    boolean_options = ['debug', 'force'] + +    help_options = [ +        ('help-compiler', None, "list available compilers", show_compilers), +    ] + +    def initialize_options(self): +        self.build_base = 'build' +        # these are decided only after 'build_base' has its final value +        # (unless overridden by the user or client) +        self.build_purelib = None +        self.build_platlib = None +        self.build_lib = None +        self.build_temp = None +        self.build_scripts = None +        self.compiler = None +        self.plat_name = None +        self.debug = None +        self.force = 0 +        self.executable = None +        self.parallel = None + +    def finalize_options(self):  # noqa: C901 +        if self.plat_name is None: +            self.plat_name = get_platform() +        else: +            # plat-name only supported for windows (other platforms are +            # supported via ./configure flags, if at all).  Avoid misleading +            # other platforms. +            if os.name != 'nt': +                raise DistutilsOptionError( +                    "--plat-name only supported on Windows (try " +                    "using './configure --help' on your platform)" +                ) + +        plat_specifier = ".{}-{}".format(self.plat_name, sys.implementation.cache_tag) + +        # Make it so Python 2.x and Python 2.x with --with-pydebug don't +        # share the same build directories. Doing so confuses the build +        # process for C modules +        if hasattr(sys, 'gettotalrefcount'): +            plat_specifier += '-pydebug' + +        # 'build_purelib' and 'build_platlib' just default to 'lib' and +        # 'lib.<plat>' under the base build directory.  We only use one of +        # them for a given distribution, though -- +        if self.build_purelib is None: +            self.build_purelib = os.path.join(self.build_base, 'lib') +        if self.build_platlib is None: +            self.build_platlib = os.path.join(self.build_base, 'lib' + plat_specifier) + +        # 'build_lib' is the actual directory that we will use for this +        # particular module distribution -- if user didn't supply it, pick +        # one of 'build_purelib' or 'build_platlib'. +        if self.build_lib is None: +            if self.distribution.has_ext_modules(): +                self.build_lib = self.build_platlib +            else: +                self.build_lib = self.build_purelib + +        # 'build_temp' -- temporary directory for compiler turds, +        # "build/temp.<plat>" +        if self.build_temp is None: +            self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier) +        if self.build_scripts is None: +            self.build_scripts = os.path.join( +                self.build_base, 'scripts-%d.%d' % sys.version_info[:2] +            ) + +        if self.executable is None and sys.executable: +            self.executable = os.path.normpath(sys.executable) + +        if isinstance(self.parallel, str): +            try: +                self.parallel = int(self.parallel) +            except ValueError: +                raise DistutilsOptionError("parallel should be an integer") + +    def run(self): +        # Run all relevant sub-commands.  This will be some subset of: +        #  - build_py      - pure Python modules +        #  - build_clib    - standalone C libraries +        #  - build_ext     - Python extensions +        #  - build_scripts - (Python) scripts +        for cmd_name in self.get_sub_commands(): +            self.run_command(cmd_name) + +    # -- Predicates for the sub-command list --------------------------- + +    def has_pure_modules(self): +        return self.distribution.has_pure_modules() + +    def has_c_libraries(self): +        return self.distribution.has_c_libraries() + +    def has_ext_modules(self): +        return self.distribution.has_ext_modules() + +    def has_scripts(self): +        return self.distribution.has_scripts() + +    sub_commands = [ +        ('build_py', has_pure_modules), +        ('build_clib', has_c_libraries), +        ('build_ext', has_ext_modules), +        ('build_scripts', has_scripts), +    ] diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_clib.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_clib.py new file mode 100644 index 0000000..50bb9bb --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_clib.py @@ -0,0 +1,208 @@ +"""distutils.command.build_clib + +Implements the Distutils 'build_clib' command, to build a C/C++ library +that is included in the module distribution and needed by an extension +module.""" + + +# XXX this module has *lots* of code ripped-off quite transparently from +# build_ext.py -- not surprisingly really, as the work required to build +# a static library from a collection of C source files is not really all +# that different from what's required to build a shared object file from +# a collection of C source files.  Nevertheless, I haven't done the +# necessary refactoring to account for the overlap in code between the +# two modules, mainly because a number of subtle details changed in the +# cut 'n paste.  Sigh. + +import os +from distutils.core import Command +from distutils.errors import DistutilsSetupError +from distutils.sysconfig import customize_compiler +from distutils import log + + +def show_compilers(): +    from distutils.ccompiler import show_compilers + +    show_compilers() + + +class build_clib(Command): + +    description = "build C/C++ libraries used by Python extensions" + +    user_options = [ +        ('build-clib=', 'b', "directory to build C/C++ libraries to"), +        ('build-temp=', 't', "directory to put temporary build by-products"), +        ('debug', 'g', "compile with debugging information"), +        ('force', 'f', "forcibly build everything (ignore file timestamps)"), +        ('compiler=', 'c', "specify the compiler type"), +    ] + +    boolean_options = ['debug', 'force'] + +    help_options = [ +        ('help-compiler', None, "list available compilers", show_compilers), +    ] + +    def initialize_options(self): +        self.build_clib = None +        self.build_temp = None + +        # List of libraries to build +        self.libraries = None + +        # Compilation options for all libraries +        self.include_dirs = None +        self.define = None +        self.undef = None +        self.debug = None +        self.force = 0 +        self.compiler = None + +    def finalize_options(self): +        # This might be confusing: both build-clib and build-temp default +        # to build-temp as defined by the "build" command.  This is because +        # I think that C libraries are really just temporary build +        # by-products, at least from the point of view of building Python +        # extensions -- but I want to keep my options open. +        self.set_undefined_options( +            'build', +            ('build_temp', 'build_clib'), +            ('build_temp', 'build_temp'), +            ('compiler', 'compiler'), +            ('debug', 'debug'), +            ('force', 'force'), +        ) + +        self.libraries = self.distribution.libraries +        if self.libraries: +            self.check_library_list(self.libraries) + +        if self.include_dirs is None: +            self.include_dirs = self.distribution.include_dirs or [] +        if isinstance(self.include_dirs, str): +            self.include_dirs = self.include_dirs.split(os.pathsep) + +        # XXX same as for build_ext -- what about 'self.define' and +        # 'self.undef' ? + +    def run(self): +        if not self.libraries: +            return + +        # Yech -- this is cut 'n pasted from build_ext.py! +        from distutils.ccompiler import new_compiler + +        self.compiler = new_compiler( +            compiler=self.compiler, dry_run=self.dry_run, force=self.force +        ) +        customize_compiler(self.compiler) + +        if self.include_dirs is not None: +            self.compiler.set_include_dirs(self.include_dirs) +        if self.define is not None: +            # 'define' option is a list of (name,value) tuples +            for (name, value) in self.define: +                self.compiler.define_macro(name, value) +        if self.undef is not None: +            for macro in self.undef: +                self.compiler.undefine_macro(macro) + +        self.build_libraries(self.libraries) + +    def check_library_list(self, libraries): +        """Ensure that the list of libraries is valid. + +        `library` is presumably provided as a command option 'libraries'. +        This method checks that it is a list of 2-tuples, where the tuples +        are (library_name, build_info_dict). + +        Raise DistutilsSetupError if the structure is invalid anywhere; +        just returns otherwise. +        """ +        if not isinstance(libraries, list): +            raise DistutilsSetupError("'libraries' option must be a list of tuples") + +        for lib in libraries: +            if not isinstance(lib, tuple) and len(lib) != 2: +                raise DistutilsSetupError("each element of 'libraries' must a 2-tuple") + +            name, build_info = lib + +            if not isinstance(name, str): +                raise DistutilsSetupError( +                    "first element of each tuple in 'libraries' " +                    "must be a string (the library name)" +                ) + +            if '/' in name or (os.sep != '/' and os.sep in name): +                raise DistutilsSetupError( +                    "bad library name '%s': " +                    "may not contain directory separators" % lib[0] +                ) + +            if not isinstance(build_info, dict): +                raise DistutilsSetupError( +                    "second element of each tuple in 'libraries' " +                    "must be a dictionary (build info)" +                ) + +    def get_library_names(self): +        # Assume the library list is valid -- 'check_library_list()' is +        # called from 'finalize_options()', so it should be! +        if not self.libraries: +            return None + +        lib_names = [] +        for (lib_name, build_info) in self.libraries: +            lib_names.append(lib_name) +        return lib_names + +    def get_source_files(self): +        self.check_library_list(self.libraries) +        filenames = [] +        for (lib_name, build_info) in self.libraries: +            sources = build_info.get('sources') +            if sources is None or not isinstance(sources, (list, tuple)): +                raise DistutilsSetupError( +                    "in 'libraries' option (library '%s'), " +                    "'sources' must be present and must be " +                    "a list of source filenames" % lib_name +                ) + +            filenames.extend(sources) +        return filenames + +    def build_libraries(self, libraries): +        for (lib_name, build_info) in libraries: +            sources = build_info.get('sources') +            if sources is None or not isinstance(sources, (list, tuple)): +                raise DistutilsSetupError( +                    "in 'libraries' option (library '%s'), " +                    "'sources' must be present and must be " +                    "a list of source filenames" % lib_name +                ) +            sources = list(sources) + +            log.info("building '%s' library", lib_name) + +            # First, compile the source code to object files in the library +            # directory.  (This should probably change to putting object +            # files in a temporary build directory.) +            macros = build_info.get('macros') +            include_dirs = build_info.get('include_dirs') +            objects = self.compiler.compile( +                sources, +                output_dir=self.build_temp, +                macros=macros, +                include_dirs=include_dirs, +                debug=self.debug, +            ) + +            # Now "link" the object files together into a static library. +            # (On Unix at least, this isn't really linking -- it just +            # builds an archive.  Whatever.) +            self.compiler.create_static_lib( +                objects, lib_name, output_dir=self.build_clib, debug=self.debug +            ) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_ext.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_ext.py new file mode 100644 index 0000000..3c6cee7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_ext.py @@ -0,0 +1,787 @@ +"""distutils.command.build_ext + +Implements the Distutils 'build_ext' command, for building extension +modules (currently limited to C extensions, should accommodate C++ +extensions ASAP).""" + +import contextlib +import os +import re +import sys +from distutils.core import Command +from distutils.errors import ( +    DistutilsOptionError, +    DistutilsSetupError, +    CCompilerError, +    DistutilsError, +    CompileError, +    DistutilsPlatformError, +) +from distutils.sysconfig import customize_compiler, get_python_version +from distutils.sysconfig import get_config_h_filename +from distutils.dep_util import newer_group +from distutils.extension import Extension +from distutils.util import get_platform +from distutils import log +from . import py37compat + +from site import USER_BASE + +# An extension name is just a dot-separated list of Python NAMEs (ie. +# the same as a fully-qualified module name). +extension_name_re = re.compile(r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$') + + +def show_compilers(): +    from distutils.ccompiler import show_compilers + +    show_compilers() + + +class build_ext(Command): + +    description = "build C/C++ extensions (compile/link to build directory)" + +    # XXX thoughts on how to deal with complex command-line options like +    # these, i.e. how to make it so fancy_getopt can suck them off the +    # command line and make it look like setup.py defined the appropriate +    # lists of tuples of what-have-you. +    #   - each command needs a callback to process its command-line options +    #   - Command.__init__() needs access to its share of the whole +    #     command line (must ultimately come from +    #     Distribution.parse_command_line()) +    #   - it then calls the current command class' option-parsing +    #     callback to deal with weird options like -D, which have to +    #     parse the option text and churn out some custom data +    #     structure +    #   - that data structure (in this case, a list of 2-tuples) +    #     will then be present in the command object by the time +    #     we get to finalize_options() (i.e. the constructor +    #     takes care of both command-line and client options +    #     in between initialize_options() and finalize_options()) + +    sep_by = " (separated by '%s')" % os.pathsep +    user_options = [ +        ('build-lib=', 'b', "directory for compiled extension modules"), +        ('build-temp=', 't', "directory for temporary files (build by-products)"), +        ( +            'plat-name=', +            'p', +            "platform name to cross-compile for, if supported " +            "(default: %s)" % get_platform(), +        ), +        ( +            'inplace', +            'i', +            "ignore build-lib and put compiled extensions into the source " +            + "directory alongside your pure Python modules", +        ), +        ( +            'include-dirs=', +            'I', +            "list of directories to search for header files" + sep_by, +        ), +        ('define=', 'D', "C preprocessor macros to define"), +        ('undef=', 'U', "C preprocessor macros to undefine"), +        ('libraries=', 'l', "external C libraries to link with"), +        ( +            'library-dirs=', +            'L', +            "directories to search for external C libraries" + sep_by, +        ), +        ('rpath=', 'R', "directories to search for shared C libraries at runtime"), +        ('link-objects=', 'O', "extra explicit link objects to include in the link"), +        ('debug', 'g', "compile/link with debugging information"), +        ('force', 'f', "forcibly build everything (ignore file timestamps)"), +        ('compiler=', 'c', "specify the compiler type"), +        ('parallel=', 'j', "number of parallel build jobs"), +        ('swig-cpp', None, "make SWIG create C++ files (default is C)"), +        ('swig-opts=', None, "list of SWIG command line options"), +        ('swig=', None, "path to the SWIG executable"), +        ('user', None, "add user include, library and rpath"), +    ] + +    boolean_options = ['inplace', 'debug', 'force', 'swig-cpp', 'user'] + +    help_options = [ +        ('help-compiler', None, "list available compilers", show_compilers), +    ] + +    def initialize_options(self): +        self.extensions = None +        self.build_lib = None +        self.plat_name = None +        self.build_temp = None +        self.inplace = 0 +        self.package = None + +        self.include_dirs = None +        self.define = None +        self.undef = None +        self.libraries = None +        self.library_dirs = None +        self.rpath = None +        self.link_objects = None +        self.debug = None +        self.force = None +        self.compiler = None +        self.swig = None +        self.swig_cpp = None +        self.swig_opts = None +        self.user = None +        self.parallel = None + +    def finalize_options(self):  # noqa: C901 +        from distutils import sysconfig + +        self.set_undefined_options( +            'build', +            ('build_lib', 'build_lib'), +            ('build_temp', 'build_temp'), +            ('compiler', 'compiler'), +            ('debug', 'debug'), +            ('force', 'force'), +            ('parallel', 'parallel'), +            ('plat_name', 'plat_name'), +        ) + +        if self.package is None: +            self.package = self.distribution.ext_package + +        self.extensions = self.distribution.ext_modules + +        # Make sure Python's include directories (for Python.h, pyconfig.h, +        # etc.) are in the include search path. +        py_include = sysconfig.get_python_inc() +        plat_py_include = sysconfig.get_python_inc(plat_specific=1) +        if self.include_dirs is None: +            self.include_dirs = self.distribution.include_dirs or [] +        if isinstance(self.include_dirs, str): +            self.include_dirs = self.include_dirs.split(os.pathsep) + +        # If in a virtualenv, add its include directory +        # Issue 16116 +        if sys.exec_prefix != sys.base_exec_prefix: +            self.include_dirs.append(os.path.join(sys.exec_prefix, 'include')) + +        # Put the Python "system" include dir at the end, so that +        # any local include dirs take precedence. +        self.include_dirs.extend(py_include.split(os.path.pathsep)) +        if plat_py_include != py_include: +            self.include_dirs.extend(plat_py_include.split(os.path.pathsep)) + +        self.ensure_string_list('libraries') +        self.ensure_string_list('link_objects') + +        # Life is easier if we're not forever checking for None, so +        # simplify these options to empty lists if unset +        if self.libraries is None: +            self.libraries = [] +        if self.library_dirs is None: +            self.library_dirs = [] +        elif isinstance(self.library_dirs, str): +            self.library_dirs = self.library_dirs.split(os.pathsep) + +        if self.rpath is None: +            self.rpath = [] +        elif isinstance(self.rpath, str): +            self.rpath = self.rpath.split(os.pathsep) + +        # for extensions under windows use different directories +        # for Release and Debug builds. +        # also Python's library directory must be appended to library_dirs +        if os.name == 'nt': +            # the 'libs' directory is for binary installs - we assume that +            # must be the *native* platform.  But we don't really support +            # cross-compiling via a binary install anyway, so we let it go. +            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) +            if sys.base_exec_prefix != sys.prefix:  # Issue 16116 +                self.library_dirs.append(os.path.join(sys.base_exec_prefix, 'libs')) +            if self.debug: +                self.build_temp = os.path.join(self.build_temp, "Debug") +            else: +                self.build_temp = os.path.join(self.build_temp, "Release") + +            # Append the source distribution include and library directories, +            # this allows distutils on windows to work in the source tree +            self.include_dirs.append(os.path.dirname(get_config_h_filename())) +            self.library_dirs.append(sys.base_exec_prefix) + +            # Use the .lib files for the correct architecture +            if self.plat_name == 'win32': +                suffix = 'win32' +            else: +                # win-amd64 +                suffix = self.plat_name[4:] +            new_lib = os.path.join(sys.exec_prefix, 'PCbuild') +            if suffix: +                new_lib = os.path.join(new_lib, suffix) +            self.library_dirs.append(new_lib) + +        # For extensions under Cygwin, Python's library directory must be +        # appended to library_dirs +        if sys.platform[:6] == 'cygwin': +            if not sysconfig.python_build: +                # building third party extensions +                self.library_dirs.append( +                    os.path.join( +                        sys.prefix, "lib", "python" + get_python_version(), "config" +                    ) +                ) +            else: +                # building python standard extensions +                self.library_dirs.append('.') + +        # For building extensions with a shared Python library, +        # Python's library directory must be appended to library_dirs +        # See Issues: #1600860, #4366 +        if sysconfig.get_config_var('Py_ENABLE_SHARED'): +            if not sysconfig.python_build: +                # building third party extensions +                self.library_dirs.append(sysconfig.get_config_var('LIBDIR')) +            else: +                # building python standard extensions +                self.library_dirs.append('.') + +        # The argument parsing will result in self.define being a string, but +        # it has to be a list of 2-tuples.  All the preprocessor symbols +        # specified by the 'define' option will be set to '1'.  Multiple +        # symbols can be separated with commas. + +        if self.define: +            defines = self.define.split(',') +            self.define = [(symbol, '1') for symbol in defines] + +        # The option for macros to undefine is also a string from the +        # option parsing, but has to be a list.  Multiple symbols can also +        # be separated with commas here. +        if self.undef: +            self.undef = self.undef.split(',') + +        if self.swig_opts is None: +            self.swig_opts = [] +        else: +            self.swig_opts = self.swig_opts.split(' ') + +        # Finally add the user include and library directories if requested +        if self.user: +            user_include = os.path.join(USER_BASE, "include") +            user_lib = os.path.join(USER_BASE, "lib") +            if os.path.isdir(user_include): +                self.include_dirs.append(user_include) +            if os.path.isdir(user_lib): +                self.library_dirs.append(user_lib) +                self.rpath.append(user_lib) + +        if isinstance(self.parallel, str): +            try: +                self.parallel = int(self.parallel) +            except ValueError: +                raise DistutilsOptionError("parallel should be an integer") + +    def run(self):  # noqa: C901 +        from distutils.ccompiler import new_compiler + +        # 'self.extensions', as supplied by setup.py, is a list of +        # Extension instances.  See the documentation for Extension (in +        # distutils.extension) for details. +        # +        # For backwards compatibility with Distutils 0.8.2 and earlier, we +        # also allow the 'extensions' list to be a list of tuples: +        #    (ext_name, build_info) +        # where build_info is a dictionary containing everything that +        # Extension instances do except the name, with a few things being +        # differently named.  We convert these 2-tuples to Extension +        # instances as needed. + +        if not self.extensions: +            return + +        # If we were asked to build any C/C++ libraries, make sure that the +        # directory where we put them is in the library search path for +        # linking extensions. +        if self.distribution.has_c_libraries(): +            build_clib = self.get_finalized_command('build_clib') +            self.libraries.extend(build_clib.get_library_names() or []) +            self.library_dirs.append(build_clib.build_clib) + +        # Setup the CCompiler object that we'll use to do all the +        # compiling and linking +        self.compiler = new_compiler( +            compiler=self.compiler, +            verbose=self.verbose, +            dry_run=self.dry_run, +            force=self.force, +        ) +        customize_compiler(self.compiler) +        # If we are cross-compiling, init the compiler now (if we are not +        # cross-compiling, init would not hurt, but people may rely on +        # late initialization of compiler even if they shouldn't...) +        if os.name == 'nt' and self.plat_name != get_platform(): +            self.compiler.initialize(self.plat_name) + +        # And make sure that any compile/link-related options (which might +        # come from the command-line or from the setup script) are set in +        # that CCompiler object -- that way, they automatically apply to +        # all compiling and linking done here. +        if self.include_dirs is not None: +            self.compiler.set_include_dirs(self.include_dirs) +        if self.define is not None: +            # 'define' option is a list of (name,value) tuples +            for (name, value) in self.define: +                self.compiler.define_macro(name, value) +        if self.undef is not None: +            for macro in self.undef: +                self.compiler.undefine_macro(macro) +        if self.libraries is not None: +            self.compiler.set_libraries(self.libraries) +        if self.library_dirs is not None: +            self.compiler.set_library_dirs(self.library_dirs) +        if self.rpath is not None: +            self.compiler.set_runtime_library_dirs(self.rpath) +        if self.link_objects is not None: +            self.compiler.set_link_objects(self.link_objects) + +        # Now actually compile and link everything. +        self.build_extensions() + +    def check_extensions_list(self, extensions):  # noqa: C901 +        """Ensure that the list of extensions (presumably provided as a +        command option 'extensions') is valid, i.e. it is a list of +        Extension objects.  We also support the old-style list of 2-tuples, +        where the tuples are (ext_name, build_info), which are converted to +        Extension instances here. + +        Raise DistutilsSetupError if the structure is invalid anywhere; +        just returns otherwise. +        """ +        if not isinstance(extensions, list): +            raise DistutilsSetupError( +                "'ext_modules' option must be a list of Extension instances" +            ) + +        for i, ext in enumerate(extensions): +            if isinstance(ext, Extension): +                continue  # OK! (assume type-checking done +                # by Extension constructor) + +            if not isinstance(ext, tuple) or len(ext) != 2: +                raise DistutilsSetupError( +                    "each element of 'ext_modules' option must be an " +                    "Extension instance or 2-tuple" +                ) + +            ext_name, build_info = ext + +            log.warn( +                "old-style (ext_name, build_info) tuple found in " +                "ext_modules for extension '%s' " +                "-- please convert to Extension instance", +                ext_name, +            ) + +            if not (isinstance(ext_name, str) and extension_name_re.match(ext_name)): +                raise DistutilsSetupError( +                    "first element of each tuple in 'ext_modules' " +                    "must be the extension name (a string)" +                ) + +            if not isinstance(build_info, dict): +                raise DistutilsSetupError( +                    "second element of each tuple in 'ext_modules' " +                    "must be a dictionary (build info)" +                ) + +            # OK, the (ext_name, build_info) dict is type-safe: convert it +            # to an Extension instance. +            ext = Extension(ext_name, build_info['sources']) + +            # Easy stuff: one-to-one mapping from dict elements to +            # instance attributes. +            for key in ( +                'include_dirs', +                'library_dirs', +                'libraries', +                'extra_objects', +                'extra_compile_args', +                'extra_link_args', +            ): +                val = build_info.get(key) +                if val is not None: +                    setattr(ext, key, val) + +            # Medium-easy stuff: same syntax/semantics, different names. +            ext.runtime_library_dirs = build_info.get('rpath') +            if 'def_file' in build_info: +                log.warn("'def_file' element of build info dict " "no longer supported") + +            # Non-trivial stuff: 'macros' split into 'define_macros' +            # and 'undef_macros'. +            macros = build_info.get('macros') +            if macros: +                ext.define_macros = [] +                ext.undef_macros = [] +                for macro in macros: +                    if not (isinstance(macro, tuple) and len(macro) in (1, 2)): +                        raise DistutilsSetupError( +                            "'macros' element of build info dict " +                            "must be 1- or 2-tuple" +                        ) +                    if len(macro) == 1: +                        ext.undef_macros.append(macro[0]) +                    elif len(macro) == 2: +                        ext.define_macros.append(macro) + +            extensions[i] = ext + +    def get_source_files(self): +        self.check_extensions_list(self.extensions) +        filenames = [] + +        # Wouldn't it be neat if we knew the names of header files too... +        for ext in self.extensions: +            filenames.extend(ext.sources) +        return filenames + +    def get_outputs(self): +        # Sanity check the 'extensions' list -- can't assume this is being +        # done in the same run as a 'build_extensions()' call (in fact, we +        # can probably assume that it *isn't*!). +        self.check_extensions_list(self.extensions) + +        # And build the list of output (built) filenames.  Note that this +        # ignores the 'inplace' flag, and assumes everything goes in the +        # "build" tree. +        outputs = [] +        for ext in self.extensions: +            outputs.append(self.get_ext_fullpath(ext.name)) +        return outputs + +    def build_extensions(self): +        # First, sanity-check the 'extensions' list +        self.check_extensions_list(self.extensions) +        if self.parallel: +            self._build_extensions_parallel() +        else: +            self._build_extensions_serial() + +    def _build_extensions_parallel(self): +        workers = self.parallel +        if self.parallel is True: +            workers = os.cpu_count()  # may return None +        try: +            from concurrent.futures import ThreadPoolExecutor +        except ImportError: +            workers = None + +        if workers is None: +            self._build_extensions_serial() +            return + +        with ThreadPoolExecutor(max_workers=workers) as executor: +            futures = [ +                executor.submit(self.build_extension, ext) for ext in self.extensions +            ] +            for ext, fut in zip(self.extensions, futures): +                with self._filter_build_errors(ext): +                    fut.result() + +    def _build_extensions_serial(self): +        for ext in self.extensions: +            with self._filter_build_errors(ext): +                self.build_extension(ext) + +    @contextlib.contextmanager +    def _filter_build_errors(self, ext): +        try: +            yield +        except (CCompilerError, DistutilsError, CompileError) as e: +            if not ext.optional: +                raise +            self.warn('building extension "{}" failed: {}'.format(ext.name, e)) + +    def build_extension(self, ext): +        sources = ext.sources +        if sources is None or not isinstance(sources, (list, tuple)): +            raise DistutilsSetupError( +                "in 'ext_modules' option (extension '%s'), " +                "'sources' must be present and must be " +                "a list of source filenames" % ext.name +            ) +        # sort to make the resulting .so file build reproducible +        sources = sorted(sources) + +        ext_path = self.get_ext_fullpath(ext.name) +        depends = sources + ext.depends +        if not (self.force or newer_group(depends, ext_path, 'newer')): +            log.debug("skipping '%s' extension (up-to-date)", ext.name) +            return +        else: +            log.info("building '%s' extension", ext.name) + +        # First, scan the sources for SWIG definition files (.i), run +        # SWIG on 'em to create .c files, and modify the sources list +        # accordingly. +        sources = self.swig_sources(sources, ext) + +        # Next, compile the source code to object files. + +        # XXX not honouring 'define_macros' or 'undef_macros' -- the +        # CCompiler API needs to change to accommodate this, and I +        # want to do one thing at a time! + +        # Two possible sources for extra compiler arguments: +        #   - 'extra_compile_args' in Extension object +        #   - CFLAGS environment variable (not particularly +        #     elegant, but people seem to expect it and I +        #     guess it's useful) +        # The environment variable should take precedence, and +        # any sensible compiler will give precedence to later +        # command line args.  Hence we combine them in order: +        extra_args = ext.extra_compile_args or [] + +        macros = ext.define_macros[:] +        for undef in ext.undef_macros: +            macros.append((undef,)) + +        objects = self.compiler.compile( +            sources, +            output_dir=self.build_temp, +            macros=macros, +            include_dirs=ext.include_dirs, +            debug=self.debug, +            extra_postargs=extra_args, +            depends=ext.depends, +        ) + +        # XXX outdated variable, kept here in case third-part code +        # needs it. +        self._built_objects = objects[:] + +        # Now link the object files together into a "shared object" -- +        # of course, first we have to figure out all the other things +        # that go into the mix. +        if ext.extra_objects: +            objects.extend(ext.extra_objects) +        extra_args = ext.extra_link_args or [] + +        # Detect target language, if not provided +        language = ext.language or self.compiler.detect_language(sources) + +        self.compiler.link_shared_object( +            objects, +            ext_path, +            libraries=self.get_libraries(ext), +            library_dirs=ext.library_dirs, +            runtime_library_dirs=ext.runtime_library_dirs, +            extra_postargs=extra_args, +            export_symbols=self.get_export_symbols(ext), +            debug=self.debug, +            build_temp=self.build_temp, +            target_lang=language, +        ) + +    def swig_sources(self, sources, extension): +        """Walk the list of source files in 'sources', looking for SWIG +        interface (.i) files.  Run SWIG on all that are found, and +        return a modified 'sources' list with SWIG source files replaced +        by the generated C (or C++) files. +        """ +        new_sources = [] +        swig_sources = [] +        swig_targets = {} + +        # XXX this drops generated C/C++ files into the source tree, which +        # is fine for developers who want to distribute the generated +        # source -- but there should be an option to put SWIG output in +        # the temp dir. + +        if self.swig_cpp: +            log.warn("--swig-cpp is deprecated - use --swig-opts=-c++") + +        if ( +            self.swig_cpp +            or ('-c++' in self.swig_opts) +            or ('-c++' in extension.swig_opts) +        ): +            target_ext = '.cpp' +        else: +            target_ext = '.c' + +        for source in sources: +            (base, ext) = os.path.splitext(source) +            if ext == ".i":  # SWIG interface file +                new_sources.append(base + '_wrap' + target_ext) +                swig_sources.append(source) +                swig_targets[source] = new_sources[-1] +            else: +                new_sources.append(source) + +        if not swig_sources: +            return new_sources + +        swig = self.swig or self.find_swig() +        swig_cmd = [swig, "-python"] +        swig_cmd.extend(self.swig_opts) +        if self.swig_cpp: +            swig_cmd.append("-c++") + +        # Do not override commandline arguments +        if not self.swig_opts: +            for o in extension.swig_opts: +                swig_cmd.append(o) + +        for source in swig_sources: +            target = swig_targets[source] +            log.info("swigging %s to %s", source, target) +            self.spawn(swig_cmd + ["-o", target, source]) + +        return new_sources + +    def find_swig(self): +        """Return the name of the SWIG executable.  On Unix, this is +        just "swig" -- it should be in the PATH.  Tries a bit harder on +        Windows. +        """ +        if os.name == "posix": +            return "swig" +        elif os.name == "nt": +            # Look for SWIG in its standard installation directory on +            # Windows (or so I presume!).  If we find it there, great; +            # if not, act like Unix and assume it's in the PATH. +            for vers in ("1.3", "1.2", "1.1"): +                fn = os.path.join("c:\\swig%s" % vers, "swig.exe") +                if os.path.isfile(fn): +                    return fn +            else: +                return "swig.exe" +        else: +            raise DistutilsPlatformError( +                "I don't know how to find (much less run) SWIG " +                "on platform '%s'" % os.name +            ) + +    # -- Name generators ----------------------------------------------- +    # (extension names, filenames, whatever) +    def get_ext_fullpath(self, ext_name): +        """Returns the path of the filename for a given extension. + +        The file is located in `build_lib` or directly in the package +        (inplace option). +        """ +        fullname = self.get_ext_fullname(ext_name) +        modpath = fullname.split('.') +        filename = self.get_ext_filename(modpath[-1]) + +        if not self.inplace: +            # no further work needed +            # returning : +            #   build_dir/package/path/filename +            filename = os.path.join(*modpath[:-1] + [filename]) +            return os.path.join(self.build_lib, filename) + +        # the inplace option requires to find the package directory +        # using the build_py command for that +        package = '.'.join(modpath[0:-1]) +        build_py = self.get_finalized_command('build_py') +        package_dir = os.path.abspath(build_py.get_package_dir(package)) + +        # returning +        #   package_dir/filename +        return os.path.join(package_dir, filename) + +    def get_ext_fullname(self, ext_name): +        """Returns the fullname of a given extension name. + +        Adds the `package.` prefix""" +        if self.package is None: +            return ext_name +        else: +            return self.package + '.' + ext_name + +    def get_ext_filename(self, ext_name): +        r"""Convert the name of an extension (eg. "foo.bar") into the name +        of the file from which it will be loaded (eg. "foo/bar.so", or +        "foo\bar.pyd"). +        """ +        from distutils.sysconfig import get_config_var + +        ext_path = ext_name.split('.') +        ext_suffix = get_config_var('EXT_SUFFIX') +        return os.path.join(*ext_path) + ext_suffix + +    def get_export_symbols(self, ext): +        """Return the list of symbols that a shared extension has to +        export.  This either uses 'ext.export_symbols' or, if it's not +        provided, "PyInit_" + module_name.  Only relevant on Windows, where +        the .pyd file (DLL) must export the module "PyInit_" function. +        """ +        name = ext.name.split('.')[-1] +        try: +            # Unicode module name support as defined in PEP-489 +            # https://www.python.org/dev/peps/pep-0489/#export-hook-name +            name.encode('ascii') +        except UnicodeEncodeError: +            suffix = 'U_' + name.encode('punycode').replace(b'-', b'_').decode('ascii') +        else: +            suffix = "_" + name + +        initfunc_name = "PyInit" + suffix +        if initfunc_name not in ext.export_symbols: +            ext.export_symbols.append(initfunc_name) +        return ext.export_symbols + +    def get_libraries(self, ext):  # noqa: C901 +        """Return the list of libraries to link against when building a +        shared extension.  On most platforms, this is just 'ext.libraries'; +        on Windows, we add the Python library (eg. python20.dll). +        """ +        # The python library is always needed on Windows.  For MSVC, this +        # is redundant, since the library is mentioned in a pragma in +        # pyconfig.h that MSVC groks.  The other Windows compilers all seem +        # to need it mentioned explicitly, though, so that's what we do. +        # Append '_d' to the python import library on debug builds. +        if sys.platform == "win32": +            from distutils._msvccompiler import MSVCCompiler + +            if not isinstance(self.compiler, MSVCCompiler): +                template = "python%d%d" +                if self.debug: +                    template = template + '_d' +                pythonlib = template % ( +                    sys.hexversion >> 24, +                    (sys.hexversion >> 16) & 0xFF, +                ) +                # don't extend ext.libraries, it may be shared with other +                # extensions, it is a reference to the original list +                return ext.libraries + [pythonlib] +        else: +            # On Android only the main executable and LD_PRELOADs are considered +            # to be RTLD_GLOBAL, all the dependencies of the main executable +            # remain RTLD_LOCAL and so the shared libraries must be linked with +            # libpython when python is built with a shared python library (issue +            # bpo-21536). +            # On Cygwin (and if required, other POSIX-like platforms based on +            # Windows like MinGW) it is simply necessary that all symbols in +            # shared libraries are resolved at link time. +            from distutils.sysconfig import get_config_var + +            link_libpython = False +            if get_config_var('Py_ENABLE_SHARED'): +                # A native build on an Android device or on Cygwin +                if hasattr(sys, 'getandroidapilevel'): +                    link_libpython = True +                elif sys.platform == 'cygwin': +                    link_libpython = True +                elif '_PYTHON_HOST_PLATFORM' in os.environ: +                    # We are cross-compiling for one of the relevant platforms +                    if get_config_var('ANDROID_API_LEVEL') != 0: +                        link_libpython = True +                    elif get_config_var('MACHDEP') == 'cygwin': +                        link_libpython = True + +            if link_libpython: +                ldversion = get_config_var('LDVERSION') +                return ext.libraries + ['python' + ldversion] + +        return ext.libraries + py37compat.pythonlib() diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_py.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_py.py new file mode 100644 index 0000000..47c6158 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_py.py @@ -0,0 +1,407 @@ +"""distutils.command.build_py + +Implements the Distutils 'build_py' command.""" + +import os +import importlib.util +import sys +import glob + +from distutils.core import Command +from distutils.errors import DistutilsOptionError, DistutilsFileError +from distutils.util import convert_path +from distutils import log + + +class build_py(Command): + +    description = "\"build\" pure Python modules (copy to build directory)" + +    user_options = [ +        ('build-lib=', 'd', "directory to \"build\" (copy) to"), +        ('compile', 'c', "compile .py to .pyc"), +        ('no-compile', None, "don't compile .py files [default]"), +        ( +            'optimize=', +            'O', +            "also compile with optimization: -O1 for \"python -O\", " +            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", +        ), +        ('force', 'f', "forcibly build everything (ignore file timestamps)"), +    ] + +    boolean_options = ['compile', 'force'] +    negative_opt = {'no-compile': 'compile'} + +    def initialize_options(self): +        self.build_lib = None +        self.py_modules = None +        self.package = None +        self.package_data = None +        self.package_dir = None +        self.compile = 0 +        self.optimize = 0 +        self.force = None + +    def finalize_options(self): +        self.set_undefined_options( +            'build', ('build_lib', 'build_lib'), ('force', 'force') +        ) + +        # Get the distribution options that are aliases for build_py +        # options -- list of packages and list of modules. +        self.packages = self.distribution.packages +        self.py_modules = self.distribution.py_modules +        self.package_data = self.distribution.package_data +        self.package_dir = {} +        if self.distribution.package_dir: +            for name, path in self.distribution.package_dir.items(): +                self.package_dir[name] = convert_path(path) +        self.data_files = self.get_data_files() + +        # Ick, copied straight from install_lib.py (fancy_getopt needs a +        # type system!  Hell, *everything* needs a type system!!!) +        if not isinstance(self.optimize, int): +            try: +                self.optimize = int(self.optimize) +                assert 0 <= self.optimize <= 2 +            except (ValueError, AssertionError): +                raise DistutilsOptionError("optimize must be 0, 1, or 2") + +    def run(self): +        # XXX copy_file by default preserves atime and mtime.  IMHO this is +        # the right thing to do, but perhaps it should be an option -- in +        # particular, a site administrator might want installed files to +        # reflect the time of installation rather than the last +        # modification time before the installed release. + +        # XXX copy_file by default preserves mode, which appears to be the +        # wrong thing to do: if a file is read-only in the working +        # directory, we want it to be installed read/write so that the next +        # installation of the same module distribution can overwrite it +        # without problems.  (This might be a Unix-specific issue.)  Thus +        # we turn off 'preserve_mode' when copying to the build directory, +        # since the build directory is supposed to be exactly what the +        # installation will look like (ie. we preserve mode when +        # installing). + +        # Two options control which modules will be installed: 'packages' +        # and 'py_modules'.  The former lets us work with whole packages, not +        # specifying individual modules at all; the latter is for +        # specifying modules one-at-a-time. + +        if self.py_modules: +            self.build_modules() +        if self.packages: +            self.build_packages() +            self.build_package_data() + +        self.byte_compile(self.get_outputs(include_bytecode=0)) + +    def get_data_files(self): +        """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" +        data = [] +        if not self.packages: +            return data +        for package in self.packages: +            # Locate package source directory +            src_dir = self.get_package_dir(package) + +            # Compute package build directory +            build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + +            # Length of path to strip from found files +            plen = 0 +            if src_dir: +                plen = len(src_dir) + 1 + +            # Strip directory from globbed filenames +            filenames = [file[plen:] for file in self.find_data_files(package, src_dir)] +            data.append((package, src_dir, build_dir, filenames)) +        return data + +    def find_data_files(self, package, src_dir): +        """Return filenames for package's data files in 'src_dir'""" +        globs = self.package_data.get('', []) + self.package_data.get(package, []) +        files = [] +        for pattern in globs: +            # Each pattern has to be converted to a platform-specific path +            filelist = glob.glob( +                os.path.join(glob.escape(src_dir), convert_path(pattern)) +            ) +            # Files that match more than one pattern are only added once +            files.extend( +                [fn for fn in filelist if fn not in files and os.path.isfile(fn)] +            ) +        return files + +    def build_package_data(self): +        """Copy data files into build directory""" +        for package, src_dir, build_dir, filenames in self.data_files: +            for filename in filenames: +                target = os.path.join(build_dir, filename) +                self.mkpath(os.path.dirname(target)) +                self.copy_file( +                    os.path.join(src_dir, filename), target, preserve_mode=False +                ) + +    def get_package_dir(self, package): +        """Return the directory, relative to the top of the source +        distribution, where package 'package' should be found +        (at least according to the 'package_dir' option, if any).""" +        path = package.split('.') + +        if not self.package_dir: +            if path: +                return os.path.join(*path) +            else: +                return '' +        else: +            tail = [] +            while path: +                try: +                    pdir = self.package_dir['.'.join(path)] +                except KeyError: +                    tail.insert(0, path[-1]) +                    del path[-1] +                else: +                    tail.insert(0, pdir) +                    return os.path.join(*tail) +            else: +                # Oops, got all the way through 'path' without finding a +                # match in package_dir.  If package_dir defines a directory +                # for the root (nameless) package, then fallback on it; +                # otherwise, we might as well have not consulted +                # package_dir at all, as we just use the directory implied +                # by 'tail' (which should be the same as the original value +                # of 'path' at this point). +                pdir = self.package_dir.get('') +                if pdir is not None: +                    tail.insert(0, pdir) + +                if tail: +                    return os.path.join(*tail) +                else: +                    return '' + +    def check_package(self, package, package_dir): +        # Empty dir name means current directory, which we can probably +        # assume exists.  Also, os.path.exists and isdir don't know about +        # my "empty string means current dir" convention, so we have to +        # circumvent them. +        if package_dir != "": +            if not os.path.exists(package_dir): +                raise DistutilsFileError( +                    "package directory '%s' does not exist" % package_dir +                ) +            if not os.path.isdir(package_dir): +                raise DistutilsFileError( +                    "supposed package directory '%s' exists, " +                    "but is not a directory" % package_dir +                ) + +        # Directories without __init__.py are namespace packages (PEP 420). +        if package: +            init_py = os.path.join(package_dir, "__init__.py") +            if os.path.isfile(init_py): +                return init_py + +        # Either not in a package at all (__init__.py not expected), or +        # __init__.py doesn't exist -- so don't return the filename. +        return None + +    def check_module(self, module, module_file): +        if not os.path.isfile(module_file): +            log.warn("file %s (for module %s) not found", module_file, module) +            return False +        else: +            return True + +    def find_package_modules(self, package, package_dir): +        self.check_package(package, package_dir) +        module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) +        modules = [] +        setup_script = os.path.abspath(self.distribution.script_name) + +        for f in module_files: +            abs_f = os.path.abspath(f) +            if abs_f != setup_script: +                module = os.path.splitext(os.path.basename(f))[0] +                modules.append((package, module, f)) +            else: +                self.debug_print("excluding %s" % setup_script) +        return modules + +    def find_modules(self): +        """Finds individually-specified Python modules, ie. those listed by +        module name in 'self.py_modules'.  Returns a list of tuples (package, +        module_base, filename): 'package' is a tuple of the path through +        package-space to the module; 'module_base' is the bare (no +        packages, no dots) module name, and 'filename' is the path to the +        ".py" file (relative to the distribution root) that implements the +        module. +        """ +        # Map package names to tuples of useful info about the package: +        #    (package_dir, checked) +        # package_dir - the directory where we'll find source files for +        #   this package +        # checked - true if we have checked that the package directory +        #   is valid (exists, contains __init__.py, ... ?) +        packages = {} + +        # List of (package, module, filename) tuples to return +        modules = [] + +        # We treat modules-in-packages almost the same as toplevel modules, +        # just the "package" for a toplevel is empty (either an empty +        # string or empty list, depending on context).  Differences: +        #   - don't check for __init__.py in directory for empty package +        for module in self.py_modules: +            path = module.split('.') +            package = '.'.join(path[0:-1]) +            module_base = path[-1] + +            try: +                (package_dir, checked) = packages[package] +            except KeyError: +                package_dir = self.get_package_dir(package) +                checked = 0 + +            if not checked: +                init_py = self.check_package(package, package_dir) +                packages[package] = (package_dir, 1) +                if init_py: +                    modules.append((package, "__init__", init_py)) + +            # XXX perhaps we should also check for just .pyc files +            # (so greedy closed-source bastards can distribute Python +            # modules too) +            module_file = os.path.join(package_dir, module_base + ".py") +            if not self.check_module(module, module_file): +                continue + +            modules.append((package, module_base, module_file)) + +        return modules + +    def find_all_modules(self): +        """Compute the list of all modules that will be built, whether +        they are specified one-module-at-a-time ('self.py_modules') or +        by whole packages ('self.packages').  Return a list of tuples +        (package, module, module_file), just like 'find_modules()' and +        'find_package_modules()' do.""" +        modules = [] +        if self.py_modules: +            modules.extend(self.find_modules()) +        if self.packages: +            for package in self.packages: +                package_dir = self.get_package_dir(package) +                m = self.find_package_modules(package, package_dir) +                modules.extend(m) +        return modules + +    def get_source_files(self): +        return [module[-1] for module in self.find_all_modules()] + +    def get_module_outfile(self, build_dir, package, module): +        outfile_path = [build_dir] + list(package) + [module + ".py"] +        return os.path.join(*outfile_path) + +    def get_outputs(self, include_bytecode=1): +        modules = self.find_all_modules() +        outputs = [] +        for (package, module, module_file) in modules: +            package = package.split('.') +            filename = self.get_module_outfile(self.build_lib, package, module) +            outputs.append(filename) +            if include_bytecode: +                if self.compile: +                    outputs.append( +                        importlib.util.cache_from_source(filename, optimization='') +                    ) +                if self.optimize > 0: +                    outputs.append( +                        importlib.util.cache_from_source( +                            filename, optimization=self.optimize +                        ) +                    ) + +        outputs += [ +            os.path.join(build_dir, filename) +            for package, src_dir, build_dir, filenames in self.data_files +            for filename in filenames +        ] + +        return outputs + +    def build_module(self, module, module_file, package): +        if isinstance(package, str): +            package = package.split('.') +        elif not isinstance(package, (list, tuple)): +            raise TypeError( +                "'package' must be a string (dot-separated), list, or tuple" +            ) + +        # Now put the module source file into the "build" area -- this is +        # easy, we just copy it somewhere under self.build_lib (the build +        # directory for Python source). +        outfile = self.get_module_outfile(self.build_lib, package, module) +        dir = os.path.dirname(outfile) +        self.mkpath(dir) +        return self.copy_file(module_file, outfile, preserve_mode=0) + +    def build_modules(self): +        modules = self.find_modules() +        for (package, module, module_file) in modules: +            # Now "build" the module -- ie. copy the source file to +            # self.build_lib (the build directory for Python source). +            # (Actually, it gets copied to the directory for this package +            # under self.build_lib.) +            self.build_module(module, module_file, package) + +    def build_packages(self): +        for package in self.packages: +            # Get list of (package, module, module_file) tuples based on +            # scanning the package directory.  'package' is only included +            # in the tuple so that 'find_modules()' and +            # 'find_package_tuples()' have a consistent interface; it's +            # ignored here (apart from a sanity check).  Also, 'module' is +            # the *unqualified* module name (ie. no dots, no package -- we +            # already know its package!), and 'module_file' is the path to +            # the .py file, relative to the current directory +            # (ie. including 'package_dir'). +            package_dir = self.get_package_dir(package) +            modules = self.find_package_modules(package, package_dir) + +            # Now loop over the modules we found, "building" each one (just +            # copy it to self.build_lib). +            for (package_, module, module_file) in modules: +                assert package == package_ +                self.build_module(module, module_file, package) + +    def byte_compile(self, files): +        if sys.dont_write_bytecode: +            self.warn('byte-compiling is disabled, skipping.') +            return + +        from distutils.util import byte_compile + +        prefix = self.build_lib +        if prefix[-1] != os.sep: +            prefix = prefix + os.sep + +        # XXX this code is essentially the same as the 'byte_compile() +        # method of the "install_lib" command, except for the determination +        # of the 'prefix' string.  Hmmm. +        if self.compile: +            byte_compile( +                files, optimize=0, force=self.force, prefix=prefix, dry_run=self.dry_run +            ) +        if self.optimize > 0: +            byte_compile( +                files, +                optimize=self.optimize, +                force=self.force, +                prefix=prefix, +                dry_run=self.dry_run, +            ) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_scripts.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_scripts.py new file mode 100644 index 0000000..2cc5d1e --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/build_scripts.py @@ -0,0 +1,173 @@ +"""distutils.command.build_scripts + +Implements the Distutils 'build_scripts' command.""" + +import os +import re +from stat import ST_MODE +from distutils import sysconfig +from distutils.core import Command +from distutils.dep_util import newer +from distutils.util import convert_path +from distutils import log +import tokenize + +shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$') +""" +Pattern matching a Python interpreter indicated in first line of a script. +""" + +# for Setuptools compatibility +first_line_re = shebang_pattern + + +class build_scripts(Command): + +    description = "\"build\" scripts (copy and fixup #! line)" + +    user_options = [ +        ('build-dir=', 'd', "directory to \"build\" (copy) to"), +        ('force', 'f', "forcibly build everything (ignore file timestamps"), +        ('executable=', 'e', "specify final destination interpreter path"), +    ] + +    boolean_options = ['force'] + +    def initialize_options(self): +        self.build_dir = None +        self.scripts = None +        self.force = None +        self.executable = None + +    def finalize_options(self): +        self.set_undefined_options( +            'build', +            ('build_scripts', 'build_dir'), +            ('force', 'force'), +            ('executable', 'executable'), +        ) +        self.scripts = self.distribution.scripts + +    def get_source_files(self): +        return self.scripts + +    def run(self): +        if not self.scripts: +            return +        self.copy_scripts() + +    def copy_scripts(self): +        """ +        Copy each script listed in ``self.scripts``. + +        If a script is marked as a Python script (first line matches +        'shebang_pattern', i.e. starts with ``#!`` and contains +        "python"), then adjust in the copy the first line to refer to +        the current Python interpreter. +        """ +        self.mkpath(self.build_dir) +        outfiles = [] +        updated_files = [] +        for script in self.scripts: +            self._copy_script(script, outfiles, updated_files) + +        self._change_modes(outfiles) + +        return outfiles, updated_files + +    def _copy_script(self, script, outfiles, updated_files):  # noqa: C901 +        shebang_match = None +        script = convert_path(script) +        outfile = os.path.join(self.build_dir, os.path.basename(script)) +        outfiles.append(outfile) + +        if not self.force and not newer(script, outfile): +            log.debug("not copying %s (up-to-date)", script) +            return + +        # Always open the file, but ignore failures in dry-run mode +        # in order to attempt to copy directly. +        try: +            f = tokenize.open(script) +        except OSError: +            if not self.dry_run: +                raise +            f = None +        else: +            first_line = f.readline() +            if not first_line: +                self.warn("%s is an empty file (skipping)" % script) +                return + +            shebang_match = shebang_pattern.match(first_line) + +        updated_files.append(outfile) +        if shebang_match: +            log.info("copying and adjusting %s -> %s", script, self.build_dir) +            if not self.dry_run: +                if not sysconfig.python_build: +                    executable = self.executable +                else: +                    executable = os.path.join( +                        sysconfig.get_config_var("BINDIR"), +                        "python%s%s" +                        % ( +                            sysconfig.get_config_var("VERSION"), +                            sysconfig.get_config_var("EXE"), +                        ), +                    ) +                post_interp = shebang_match.group(1) or '' +                shebang = "#!" + executable + post_interp + "\n" +                self._validate_shebang(shebang, f.encoding) +                with open(outfile, "w", encoding=f.encoding) as outf: +                    outf.write(shebang) +                    outf.writelines(f.readlines()) +            if f: +                f.close() +        else: +            if f: +                f.close() +            self.copy_file(script, outfile) + +    def _change_modes(self, outfiles): +        if os.name != 'posix': +            return + +        for file in outfiles: +            self._change_mode(file) + +    def _change_mode(self, file): +        if self.dry_run: +            log.info("changing mode of %s", file) +            return + +        oldmode = os.stat(file)[ST_MODE] & 0o7777 +        newmode = (oldmode | 0o555) & 0o7777 +        if newmode != oldmode: +            log.info("changing mode of %s from %o to %o", file, oldmode, newmode) +            os.chmod(file, newmode) + +    @staticmethod +    def _validate_shebang(shebang, encoding): +        # Python parser starts to read a script using UTF-8 until +        # it gets a #coding:xxx cookie. The shebang has to be the +        # first line of a file, the #coding:xxx cookie cannot be +        # written before. So the shebang has to be encodable to +        # UTF-8. +        try: +            shebang.encode('utf-8') +        except UnicodeEncodeError: +            raise ValueError( +                "The shebang ({!r}) is not encodable " "to utf-8".format(shebang) +            ) + +        # If the script is encoded to a custom encoding (use a +        # #coding:xxx cookie), the shebang has to be encodable to +        # the script encoding too. +        try: +            shebang.encode(encoding) +        except UnicodeEncodeError: +            raise ValueError( +                "The shebang ({!r}) is not encodable " +                "to the script encoding ({})".format(shebang, encoding) +            ) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/check.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/check.py new file mode 100644 index 0000000..539481c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/check.py @@ -0,0 +1,151 @@ +"""distutils.command.check + +Implements the Distutils 'check' command. +""" +import contextlib + +from distutils.core import Command +from distutils.errors import DistutilsSetupError + +with contextlib.suppress(ImportError): +    import docutils.utils +    import docutils.parsers.rst +    import docutils.frontend +    import docutils.nodes + +    class SilentReporter(docutils.utils.Reporter): +        def __init__( +            self, +            source, +            report_level, +            halt_level, +            stream=None, +            debug=0, +            encoding='ascii', +            error_handler='replace', +        ): +            self.messages = [] +            super().__init__( +                source, report_level, halt_level, stream, debug, encoding, error_handler +            ) + +        def system_message(self, level, message, *children, **kwargs): +            self.messages.append((level, message, children, kwargs)) +            return docutils.nodes.system_message( +                message, level=level, type=self.levels[level], *children, **kwargs +            ) + + +class check(Command): +    """This command checks the meta-data of the package.""" + +    description = "perform some checks on the package" +    user_options = [ +        ('metadata', 'm', 'Verify meta-data'), +        ( +            'restructuredtext', +            'r', +            ( +                'Checks if long string meta-data syntax ' +                'are reStructuredText-compliant' +            ), +        ), +        ('strict', 's', 'Will exit with an error if a check fails'), +    ] + +    boolean_options = ['metadata', 'restructuredtext', 'strict'] + +    def initialize_options(self): +        """Sets default values for options.""" +        self.restructuredtext = 0 +        self.metadata = 1 +        self.strict = 0 +        self._warnings = 0 + +    def finalize_options(self): +        pass + +    def warn(self, msg): +        """Counts the number of warnings that occurs.""" +        self._warnings += 1 +        return Command.warn(self, msg) + +    def run(self): +        """Runs the command.""" +        # perform the various tests +        if self.metadata: +            self.check_metadata() +        if self.restructuredtext: +            if 'docutils' in globals(): +                try: +                    self.check_restructuredtext() +                except TypeError as exc: +                    raise DistutilsSetupError(str(exc)) +            elif self.strict: +                raise DistutilsSetupError('The docutils package is needed.') + +        # let's raise an error in strict mode, if we have at least +        # one warning +        if self.strict and self._warnings > 0: +            raise DistutilsSetupError('Please correct your package.') + +    def check_metadata(self): +        """Ensures that all required elements of meta-data are supplied. + +        Required fields: +            name, version + +        Warns if any are missing. +        """ +        metadata = self.distribution.metadata + +        missing = [] +        for attr in 'name', 'version': +            if not getattr(metadata, attr, None): +                missing.append(attr) + +        if missing: +            self.warn("missing required meta-data: %s" % ', '.join(missing)) + +    def check_restructuredtext(self): +        """Checks if the long string fields are reST-compliant.""" +        data = self.distribution.get_long_description() +        for warning in self._check_rst_data(data): +            line = warning[-1].get('line') +            if line is None: +                warning = warning[1] +            else: +                warning = '{} (line {})'.format(warning[1], line) +            self.warn(warning) + +    def _check_rst_data(self, data): +        """Returns warnings when the provided data doesn't compile.""" +        # the include and csv_table directives need this to be a path +        source_path = self.distribution.script_name or 'setup.py' +        parser = docutils.parsers.rst.Parser() +        settings = docutils.frontend.OptionParser( +            components=(docutils.parsers.rst.Parser,) +        ).get_default_values() +        settings.tab_width = 4 +        settings.pep_references = None +        settings.rfc_references = None +        reporter = SilentReporter( +            source_path, +            settings.report_level, +            settings.halt_level, +            stream=settings.warning_stream, +            debug=settings.debug, +            encoding=settings.error_encoding, +            error_handler=settings.error_encoding_error_handler, +        ) + +        document = docutils.nodes.document(settings, reporter, source=source_path) +        document.note_source(source_path, -1) +        try: +            parser.parse(data, document) +        except AttributeError as e: +            reporter.messages.append( +                (-1, 'Could not finish the parsing: %s.' % e, '', {}) +            ) + +        return reporter.messages diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/clean.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/clean.py new file mode 100644 index 0000000..b731b60 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/clean.py @@ -0,0 +1,76 @@ +"""distutils.command.clean + +Implements the Distutils 'clean' command.""" + +# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18 + +import os +from distutils.core import Command +from distutils.dir_util import remove_tree +from distutils import log + + +class clean(Command): + +    description = "clean up temporary files from 'build' command" +    user_options = [ +        ('build-base=', 'b', "base build directory (default: 'build.build-base')"), +        ( +            'build-lib=', +            None, +            "build directory for all modules (default: 'build.build-lib')", +        ), +        ('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"), +        ( +            'build-scripts=', +            None, +            "build directory for scripts (default: 'build.build-scripts')", +        ), +        ('bdist-base=', None, "temporary directory for built distributions"), +        ('all', 'a', "remove all build output, not just temporary by-products"), +    ] + +    boolean_options = ['all'] + +    def initialize_options(self): +        self.build_base = None +        self.build_lib = None +        self.build_temp = None +        self.build_scripts = None +        self.bdist_base = None +        self.all = None + +    def finalize_options(self): +        self.set_undefined_options( +            'build', +            ('build_base', 'build_base'), +            ('build_lib', 'build_lib'), +            ('build_scripts', 'build_scripts'), +            ('build_temp', 'build_temp'), +        ) +        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base')) + +    def run(self): +        # remove the build/temp.<plat> directory (unless it's already +        # gone) +        if os.path.exists(self.build_temp): +            remove_tree(self.build_temp, dry_run=self.dry_run) +        else: +            log.debug("'%s' does not exist -- can't clean it", self.build_temp) + +        if self.all: +            # remove build directories +            for directory in (self.build_lib, self.bdist_base, self.build_scripts): +                if os.path.exists(directory): +                    remove_tree(directory, dry_run=self.dry_run) +                else: +                    log.warn("'%s' does not exist -- can't clean it", directory) + +        # just for the heck of it, try to remove the base build directory: +        # we might have emptied it right now, but if not we don't care +        if not self.dry_run: +            try: +                os.rmdir(self.build_base) +                log.info("removing '%s'", self.build_base) +            except OSError: +                pass diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/config.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/config.py new file mode 100644 index 0000000..4492c89 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/config.py @@ -0,0 +1,377 @@ +"""distutils.command.config + +Implements the Distutils 'config' command, a (mostly) empty command class +that exists mainly to be sub-classed by specific module distributions and +applications.  The idea is that while every "config" command is different, +at least they're all named the same, and users always see "config" in the +list of standard commands.  Also, this is a good place to put common +configure-like tasks: "try to compile this C code", or "figure out where +this header file lives". +""" + +import os +import re + +from distutils.core import Command +from distutils.errors import DistutilsExecError +from distutils.sysconfig import customize_compiler +from distutils import log + +LANG_EXT = {"c": ".c", "c++": ".cxx"} + + +class config(Command): + +    description = "prepare to build" + +    user_options = [ +        ('compiler=', None, "specify the compiler type"), +        ('cc=', None, "specify the compiler executable"), +        ('include-dirs=', 'I', "list of directories to search for header files"), +        ('define=', 'D', "C preprocessor macros to define"), +        ('undef=', 'U', "C preprocessor macros to undefine"), +        ('libraries=', 'l', "external C libraries to link with"), +        ('library-dirs=', 'L', "directories to search for external C libraries"), +        ('noisy', None, "show every action (compile, link, run, ...) taken"), +        ( +            'dump-source', +            None, +            "dump generated source files before attempting to compile them", +        ), +    ] + +    # The three standard command methods: since the "config" command +    # does nothing by default, these are empty. + +    def initialize_options(self): +        self.compiler = None +        self.cc = None +        self.include_dirs = None +        self.libraries = None +        self.library_dirs = None + +        # maximal output for now +        self.noisy = 1 +        self.dump_source = 1 + +        # list of temporary files generated along-the-way that we have +        # to clean at some point +        self.temp_files = [] + +    def finalize_options(self): +        if self.include_dirs is None: +            self.include_dirs = self.distribution.include_dirs or [] +        elif isinstance(self.include_dirs, str): +            self.include_dirs = self.include_dirs.split(os.pathsep) + +        if self.libraries is None: +            self.libraries = [] +        elif isinstance(self.libraries, str): +            self.libraries = [self.libraries] + +        if self.library_dirs is None: +            self.library_dirs = [] +        elif isinstance(self.library_dirs, str): +            self.library_dirs = self.library_dirs.split(os.pathsep) + +    def run(self): +        pass + +    # Utility methods for actual "config" commands.  The interfaces are +    # loosely based on Autoconf macros of similar names.  Sub-classes +    # may use these freely. + +    def _check_compiler(self): +        """Check that 'self.compiler' really is a CCompiler object; +        if not, make it one. +        """ +        # We do this late, and only on-demand, because this is an expensive +        # import. +        from distutils.ccompiler import CCompiler, new_compiler + +        if not isinstance(self.compiler, CCompiler): +            self.compiler = new_compiler( +                compiler=self.compiler, dry_run=self.dry_run, force=1 +            ) +            customize_compiler(self.compiler) +            if self.include_dirs: +                self.compiler.set_include_dirs(self.include_dirs) +            if self.libraries: +                self.compiler.set_libraries(self.libraries) +            if self.library_dirs: +                self.compiler.set_library_dirs(self.library_dirs) + +    def _gen_temp_sourcefile(self, body, headers, lang): +        filename = "_configtest" + LANG_EXT[lang] +        with open(filename, "w") as file: +            if headers: +                for header in headers: +                    file.write("#include <%s>\n" % header) +                file.write("\n") +            file.write(body) +            if body[-1] != "\n": +                file.write("\n") +        return filename + +    def _preprocess(self, body, headers, include_dirs, lang): +        src = self._gen_temp_sourcefile(body, headers, lang) +        out = "_configtest.i" +        self.temp_files.extend([src, out]) +        self.compiler.preprocess(src, out, include_dirs=include_dirs) +        return (src, out) + +    def _compile(self, body, headers, include_dirs, lang): +        src = self._gen_temp_sourcefile(body, headers, lang) +        if self.dump_source: +            dump_file(src, "compiling '%s':" % src) +        (obj,) = self.compiler.object_filenames([src]) +        self.temp_files.extend([src, obj]) +        self.compiler.compile([src], include_dirs=include_dirs) +        return (src, obj) + +    def _link(self, body, headers, include_dirs, libraries, library_dirs, lang): +        (src, obj) = self._compile(body, headers, include_dirs, lang) +        prog = os.path.splitext(os.path.basename(src))[0] +        self.compiler.link_executable( +            [obj], +            prog, +            libraries=libraries, +            library_dirs=library_dirs, +            target_lang=lang, +        ) + +        if self.compiler.exe_extension is not None: +            prog = prog + self.compiler.exe_extension +        self.temp_files.append(prog) + +        return (src, obj, prog) + +    def _clean(self, *filenames): +        if not filenames: +            filenames = self.temp_files +            self.temp_files = [] +        log.info("removing: %s", ' '.join(filenames)) +        for filename in filenames: +            try: +                os.remove(filename) +            except OSError: +                pass + +    # XXX these ignore the dry-run flag: what to do, what to do? even if +    # you want a dry-run build, you still need some sort of configuration +    # info.  My inclination is to make it up to the real config command to +    # consult 'dry_run', and assume a default (minimal) configuration if +    # true.  The problem with trying to do it here is that you'd have to +    # return either true or false from all the 'try' methods, neither of +    # which is correct. + +    # XXX need access to the header search path and maybe default macros. + +    def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"): +        """Construct a source file from 'body' (a string containing lines +        of C/C++ code) and 'headers' (a list of header files to include) +        and run it through the preprocessor.  Return true if the +        preprocessor succeeded, false if there were any errors. +        ('body' probably isn't of much use, but what the heck.) +        """ +        from distutils.ccompiler import CompileError + +        self._check_compiler() +        ok = True +        try: +            self._preprocess(body, headers, include_dirs, lang) +        except CompileError: +            ok = False + +        self._clean() +        return ok + +    def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"): +        """Construct a source file (just like 'try_cpp()'), run it through +        the preprocessor, and return true if any line of the output matches +        'pattern'.  'pattern' should either be a compiled regex object or a +        string containing a regex.  If both 'body' and 'headers' are None, +        preprocesses an empty file -- which can be useful to determine the +        symbols the preprocessor and compiler set by default. +        """ +        self._check_compiler() +        src, out = self._preprocess(body, headers, include_dirs, lang) + +        if isinstance(pattern, str): +            pattern = re.compile(pattern) + +        with open(out) as file: +            match = False +            while True: +                line = file.readline() +                if line == '': +                    break +                if pattern.search(line): +                    match = True +                    break + +        self._clean() +        return match + +    def try_compile(self, body, headers=None, include_dirs=None, lang="c"): +        """Try to compile a source file built from 'body' and 'headers'. +        Return true on success, false otherwise. +        """ +        from distutils.ccompiler import CompileError + +        self._check_compiler() +        try: +            self._compile(body, headers, include_dirs, lang) +            ok = True +        except CompileError: +            ok = False + +        log.info(ok and "success!" or "failure.") +        self._clean() +        return ok + +    def try_link( +        self, +        body, +        headers=None, +        include_dirs=None, +        libraries=None, +        library_dirs=None, +        lang="c", +    ): +        """Try to compile and link a source file, built from 'body' and +        'headers', to executable form.  Return true on success, false +        otherwise. +        """ +        from distutils.ccompiler import CompileError, LinkError + +        self._check_compiler() +        try: +            self._link(body, headers, include_dirs, libraries, library_dirs, lang) +            ok = True +        except (CompileError, LinkError): +            ok = False + +        log.info(ok and "success!" or "failure.") +        self._clean() +        return ok + +    def try_run( +        self, +        body, +        headers=None, +        include_dirs=None, +        libraries=None, +        library_dirs=None, +        lang="c", +    ): +        """Try to compile, link to an executable, and run a program +        built from 'body' and 'headers'.  Return true on success, false +        otherwise. +        """ +        from distutils.ccompiler import CompileError, LinkError + +        self._check_compiler() +        try: +            src, obj, exe = self._link( +                body, headers, include_dirs, libraries, library_dirs, lang +            ) +            self.spawn([exe]) +            ok = True +        except (CompileError, LinkError, DistutilsExecError): +            ok = False + +        log.info(ok and "success!" or "failure.") +        self._clean() +        return ok + +    # -- High-level methods -------------------------------------------- +    # (these are the ones that are actually likely to be useful +    # when implementing a real-world config command!) + +    def check_func( +        self, +        func, +        headers=None, +        include_dirs=None, +        libraries=None, +        library_dirs=None, +        decl=0, +        call=0, +    ): +        """Determine if function 'func' is available by constructing a +        source file that refers to 'func', and compiles and links it. +        If everything succeeds, returns true; otherwise returns false. + +        The constructed source file starts out by including the header +        files listed in 'headers'.  If 'decl' is true, it then declares +        'func' (as "int func()"); you probably shouldn't supply 'headers' +        and set 'decl' true in the same call, or you might get errors about +        a conflicting declarations for 'func'.  Finally, the constructed +        'main()' function either references 'func' or (if 'call' is true) +        calls it.  'libraries' and 'library_dirs' are used when +        linking. +        """ +        self._check_compiler() +        body = [] +        if decl: +            body.append("int %s ();" % func) +        body.append("int main () {") +        if call: +            body.append("  %s();" % func) +        else: +            body.append("  %s;" % func) +        body.append("}") +        body = "\n".join(body) + "\n" + +        return self.try_link(body, headers, include_dirs, libraries, library_dirs) + +    def check_lib( +        self, +        library, +        library_dirs=None, +        headers=None, +        include_dirs=None, +        other_libraries=[], +    ): +        """Determine if 'library' is available to be linked against, +        without actually checking that any particular symbols are provided +        by it.  'headers' will be used in constructing the source file to +        be compiled, but the only effect of this is to check if all the +        header files listed are available.  Any libraries listed in +        'other_libraries' will be included in the link, in case 'library' +        has symbols that depend on other libraries. +        """ +        self._check_compiler() +        return self.try_link( +            "int main (void) { }", +            headers, +            include_dirs, +            [library] + other_libraries, +            library_dirs, +        ) + +    def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"): +        """Determine if the system header file named by 'header_file' +        exists and can be found by the preprocessor; return true if so, +        false otherwise. +        """ +        return self.try_cpp( +            body="/* No body */", headers=[header], include_dirs=include_dirs +        ) + + +def dump_file(filename, head=None): +    """Dumps a file content into log.info. + +    If head is not None, will be dumped before the file content. +    """ +    if head is None: +        log.info('%s', filename) +    else: +        log.info(head) +    file = open(filename) +    try: +        log.info(file.read()) +    finally: +        file.close() diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install.py new file mode 100644 index 0000000..a38cddc --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install.py @@ -0,0 +1,814 @@ +"""distutils.command.install + +Implements the Distutils 'install' command.""" + +import sys +import os +import contextlib +import sysconfig +import itertools + +from distutils import log +from distutils.core import Command +from distutils.debug import DEBUG +from distutils.sysconfig import get_config_vars +from distutils.file_util import write_file +from distutils.util import convert_path, subst_vars, change_root +from distutils.util import get_platform +from distutils.errors import DistutilsOptionError, DistutilsPlatformError +from . import _framework_compat as fw +from .. import _collections + +from site import USER_BASE +from site import USER_SITE + +HAS_USER_SITE = True + +WINDOWS_SCHEME = { +    'purelib': '{base}/Lib/site-packages', +    'platlib': '{base}/Lib/site-packages', +    'headers': '{base}/Include/{dist_name}', +    'scripts': '{base}/Scripts', +    'data': '{base}', +} + +INSTALL_SCHEMES = { +    'posix_prefix': { +        'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages', +        'platlib': '{platbase}/{platlibdir}/{implementation_lower}' +        '{py_version_short}/site-packages', +        'headers': '{base}/include/{implementation_lower}' +        '{py_version_short}{abiflags}/{dist_name}', +        'scripts': '{base}/bin', +        'data': '{base}', +    }, +    'posix_home': { +        'purelib': '{base}/lib/{implementation_lower}', +        'platlib': '{base}/{platlibdir}/{implementation_lower}', +        'headers': '{base}/include/{implementation_lower}/{dist_name}', +        'scripts': '{base}/bin', +        'data': '{base}', +    }, +    'nt': WINDOWS_SCHEME, +    'pypy': { +        'purelib': '{base}/site-packages', +        'platlib': '{base}/site-packages', +        'headers': '{base}/include/{dist_name}', +        'scripts': '{base}/bin', +        'data': '{base}', +    }, +    'pypy_nt': { +        'purelib': '{base}/site-packages', +        'platlib': '{base}/site-packages', +        'headers': '{base}/include/{dist_name}', +        'scripts': '{base}/Scripts', +        'data': '{base}', +    }, +} + +# user site schemes +if HAS_USER_SITE: +    INSTALL_SCHEMES['nt_user'] = { +        'purelib': '{usersite}', +        'platlib': '{usersite}', +        'headers': '{userbase}/{implementation}{py_version_nodot_plat}' +        '/Include/{dist_name}', +        'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts', +        'data': '{userbase}', +    } + +    INSTALL_SCHEMES['posix_user'] = { +        'purelib': '{usersite}', +        'platlib': '{usersite}', +        'headers': '{userbase}/include/{implementation_lower}' +        '{py_version_short}{abiflags}/{dist_name}', +        'scripts': '{userbase}/bin', +        'data': '{userbase}', +    } + + +INSTALL_SCHEMES.update(fw.schemes) + + +# The keys to an installation scheme; if any new types of files are to be +# installed, be sure to add an entry to every installation scheme above, +# and to SCHEME_KEYS here. +SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data') + + +def _load_sysconfig_schemes(): +    with contextlib.suppress(AttributeError): +        return { +            scheme: sysconfig.get_paths(scheme, expand=False) +            for scheme in sysconfig.get_scheme_names() +        } + + +def _load_schemes(): +    """ +    Extend default schemes with schemes from sysconfig. +    """ + +    sysconfig_schemes = _load_sysconfig_schemes() or {} + +    return { +        scheme: { +            **INSTALL_SCHEMES.get(scheme, {}), +            **sysconfig_schemes.get(scheme, {}), +        } +        for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes)) +    } + + +def _get_implementation(): +    if hasattr(sys, 'pypy_version_info'): +        return 'PyPy' +    else: +        return 'Python' + + +def _select_scheme(ob, name): +    scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name))) +    vars(ob).update(_remove_set(ob, _scheme_attrs(scheme))) + + +def _remove_set(ob, attrs): +    """ +    Include only attrs that are None in ob. +    """ +    return {key: value for key, value in attrs.items() if getattr(ob, key) is None} + + +def _resolve_scheme(name): +    os_name, sep, key = name.partition('_') +    try: +        resolved = sysconfig.get_preferred_scheme(key) +    except Exception: +        resolved = fw.scheme(_pypy_hack(name)) +    return resolved + + +def _load_scheme(name): +    return _load_schemes()[name] + + +def _inject_headers(name, scheme): +    """ +    Given a scheme name and the resolved scheme, +    if the scheme does not include headers, resolve +    the fallback scheme for the name and use headers +    from it. pypa/distutils#88 +    """ +    # Bypass the preferred scheme, which may not +    # have defined headers. +    fallback = _load_scheme(_pypy_hack(name)) +    scheme.setdefault('headers', fallback['headers']) +    return scheme + + +def _scheme_attrs(scheme): +    """Resolve install directories by applying the install schemes.""" +    return {f'install_{key}': scheme[key] for key in SCHEME_KEYS} + + +def _pypy_hack(name): +    PY37 = sys.version_info < (3, 8) +    old_pypy = hasattr(sys, 'pypy_version_info') and PY37 +    prefix = not name.endswith(('_user', '_home')) +    pypy_name = 'pypy' + '_nt' * (os.name == 'nt') +    return pypy_name if old_pypy and prefix else name + + +class install(Command): + +    description = "install everything from build directory" + +    user_options = [ +        # Select installation scheme and set base director(y|ies) +        ('prefix=', None, "installation prefix"), +        ('exec-prefix=', None, "(Unix only) prefix for platform-specific files"), +        ('home=', None, "(Unix only) home directory to install under"), +        # Or, just set the base director(y|ies) +        ( +            'install-base=', +            None, +            "base installation directory (instead of --prefix or --home)", +        ), +        ( +            'install-platbase=', +            None, +            "base installation directory for platform-specific files " +            + "(instead of --exec-prefix or --home)", +        ), +        ('root=', None, "install everything relative to this alternate root directory"), +        # Or, explicitly set the installation scheme +        ( +            'install-purelib=', +            None, +            "installation directory for pure Python module distributions", +        ), +        ( +            'install-platlib=', +            None, +            "installation directory for non-pure module distributions", +        ), +        ( +            'install-lib=', +            None, +            "installation directory for all module distributions " +            + "(overrides --install-purelib and --install-platlib)", +        ), +        ('install-headers=', None, "installation directory for C/C++ headers"), +        ('install-scripts=', None, "installation directory for Python scripts"), +        ('install-data=', None, "installation directory for data files"), +        # Byte-compilation options -- see install_lib.py for details, as +        # these are duplicated from there (but only install_lib does +        # anything with them). +        ('compile', 'c', "compile .py to .pyc [default]"), +        ('no-compile', None, "don't compile .py files"), +        ( +            'optimize=', +            'O', +            "also compile with optimization: -O1 for \"python -O\", " +            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", +        ), +        # Miscellaneous control options +        ('force', 'f', "force installation (overwrite any existing files)"), +        ('skip-build', None, "skip rebuilding everything (for testing/debugging)"), +        # Where to install documentation (eventually!) +        # ('doc-format=', None, "format of documentation to generate"), +        # ('install-man=', None, "directory for Unix man pages"), +        # ('install-html=', None, "directory for HTML documentation"), +        # ('install-info=', None, "directory for GNU info files"), +        ('record=', None, "filename in which to record list of installed files"), +    ] + +    boolean_options = ['compile', 'force', 'skip-build'] + +    if HAS_USER_SITE: +        user_options.append( +            ('user', None, "install in user site-package '%s'" % USER_SITE) +        ) +        boolean_options.append('user') + +    negative_opt = {'no-compile': 'compile'} + +    def initialize_options(self): +        """Initializes options.""" +        # High-level options: these select both an installation base +        # and scheme. +        self.prefix = None +        self.exec_prefix = None +        self.home = None +        self.user = 0 + +        # These select only the installation base; it's up to the user to +        # specify the installation scheme (currently, that means supplying +        # the --install-{platlib,purelib,scripts,data} options). +        self.install_base = None +        self.install_platbase = None +        self.root = None + +        # These options are the actual installation directories; if not +        # supplied by the user, they are filled in using the installation +        # scheme implied by prefix/exec-prefix/home and the contents of +        # that installation scheme. +        self.install_purelib = None  # for pure module distributions +        self.install_platlib = None  # non-pure (dists w/ extensions) +        self.install_headers = None  # for C/C++ headers +        self.install_lib = None  # set to either purelib or platlib +        self.install_scripts = None +        self.install_data = None +        self.install_userbase = USER_BASE +        self.install_usersite = USER_SITE + +        self.compile = None +        self.optimize = None + +        # Deprecated +        # These two are for putting non-packagized distributions into their +        # own directory and creating a .pth file if it makes sense. +        # 'extra_path' comes from the setup file; 'install_path_file' can +        # be turned off if it makes no sense to install a .pth file.  (But +        # better to install it uselessly than to guess wrong and not +        # install it when it's necessary and would be used!)  Currently, +        # 'install_path_file' is always true unless some outsider meddles +        # with it. +        self.extra_path = None +        self.install_path_file = 1 + +        # 'force' forces installation, even if target files are not +        # out-of-date.  'skip_build' skips running the "build" command, +        # handy if you know it's not necessary.  'warn_dir' (which is *not* +        # a user option, it's just there so the bdist_* commands can turn +        # it off) determines whether we warn about installing to a +        # directory not in sys.path. +        self.force = 0 +        self.skip_build = 0 +        self.warn_dir = 1 + +        # These are only here as a conduit from the 'build' command to the +        # 'install_*' commands that do the real work.  ('build_base' isn't +        # actually used anywhere, but it might be useful in future.)  They +        # are not user options, because if the user told the install +        # command where the build directory is, that wouldn't affect the +        # build command. +        self.build_base = None +        self.build_lib = None + +        # Not defined yet because we don't know anything about +        # documentation yet. +        # self.install_man = None +        # self.install_html = None +        # self.install_info = None + +        self.record = None + +    # -- Option finalizing methods ------------------------------------- +    # (This is rather more involved than for most commands, +    # because this is where the policy for installing third- +    # party Python modules on various platforms given a wide +    # array of user input is decided.  Yes, it's quite complex!) + +    def finalize_options(self):  # noqa: C901 +        """Finalizes options.""" +        # This method (and its helpers, like 'finalize_unix()', +        # 'finalize_other()', and 'select_scheme()') is where the default +        # installation directories for modules, extension modules, and +        # anything else we care to install from a Python module +        # distribution.  Thus, this code makes a pretty important policy +        # statement about how third-party stuff is added to a Python +        # installation!  Note that the actual work of installation is done +        # by the relatively simple 'install_*' commands; they just take +        # their orders from the installation directory options determined +        # here. + +        # Check for errors/inconsistencies in the options; first, stuff +        # that's wrong on any platform. + +        if (self.prefix or self.exec_prefix or self.home) and ( +            self.install_base or self.install_platbase +        ): +            raise DistutilsOptionError( +                "must supply either prefix/exec-prefix/home or " +                + "install-base/install-platbase -- not both" +            ) + +        if self.home and (self.prefix or self.exec_prefix): +            raise DistutilsOptionError( +                "must supply either home or prefix/exec-prefix -- not both" +            ) + +        if self.user and ( +            self.prefix +            or self.exec_prefix +            or self.home +            or self.install_base +            or self.install_platbase +        ): +            raise DistutilsOptionError( +                "can't combine user with prefix, " +                "exec_prefix/home, or install_(plat)base" +            ) + +        # Next, stuff that's wrong (or dubious) only on certain platforms. +        if os.name != "posix": +            if self.exec_prefix: +                self.warn("exec-prefix option ignored on this platform") +                self.exec_prefix = None + +        # Now the interesting logic -- so interesting that we farm it out +        # to other methods.  The goal of these methods is to set the final +        # values for the install_{lib,scripts,data,...}  options, using as +        # input a heady brew of prefix, exec_prefix, home, install_base, +        # install_platbase, user-supplied versions of +        # install_{purelib,platlib,lib,scripts,data,...}, and the +        # install schemes.  Phew! + +        self.dump_dirs("pre-finalize_{unix,other}") + +        if os.name == 'posix': +            self.finalize_unix() +        else: +            self.finalize_other() + +        self.dump_dirs("post-finalize_{unix,other}()") + +        # Expand configuration variables, tilde, etc. in self.install_base +        # and self.install_platbase -- that way, we can use $base or +        # $platbase in the other installation directories and not worry +        # about needing recursive variable expansion (shudder). + +        py_version = sys.version.split()[0] +        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix') +        try: +            abiflags = sys.abiflags +        except AttributeError: +            # sys.abiflags may not be defined on all platforms. +            abiflags = '' +        local_vars = { +            'dist_name': self.distribution.get_name(), +            'dist_version': self.distribution.get_version(), +            'dist_fullname': self.distribution.get_fullname(), +            'py_version': py_version, +            'py_version_short': '%d.%d' % sys.version_info[:2], +            'py_version_nodot': '%d%d' % sys.version_info[:2], +            'sys_prefix': prefix, +            'prefix': prefix, +            'sys_exec_prefix': exec_prefix, +            'exec_prefix': exec_prefix, +            'abiflags': abiflags, +            'platlibdir': getattr(sys, 'platlibdir', 'lib'), +            'implementation_lower': _get_implementation().lower(), +            'implementation': _get_implementation(), +        } + +        # vars for compatibility on older Pythons +        compat_vars = dict( +            # Python 3.9 and earlier +            py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''), +        ) + +        if HAS_USER_SITE: +            local_vars['userbase'] = self.install_userbase +            local_vars['usersite'] = self.install_usersite + +        self.config_vars = _collections.DictStack( +            [fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars] +        ) + +        self.expand_basedirs() + +        self.dump_dirs("post-expand_basedirs()") + +        # Now define config vars for the base directories so we can expand +        # everything else. +        local_vars['base'] = self.install_base +        local_vars['platbase'] = self.install_platbase + +        if DEBUG: +            from pprint import pprint + +            print("config vars:") +            pprint(dict(self.config_vars)) + +        # Expand "~" and configuration variables in the installation +        # directories. +        self.expand_dirs() + +        self.dump_dirs("post-expand_dirs()") + +        # Create directories in the home dir: +        if self.user: +            self.create_home_path() + +        # Pick the actual directory to install all modules to: either +        # install_purelib or install_platlib, depending on whether this +        # module distribution is pure or not.  Of course, if the user +        # already specified install_lib, use their selection. +        if self.install_lib is None: +            if self.distribution.has_ext_modules():  # has extensions: non-pure +                self.install_lib = self.install_platlib +            else: +                self.install_lib = self.install_purelib + +        # Convert directories from Unix /-separated syntax to the local +        # convention. +        self.convert_paths( +            'lib', +            'purelib', +            'platlib', +            'scripts', +            'data', +            'headers', +            'userbase', +            'usersite', +        ) + +        # Deprecated +        # Well, we're not actually fully completely finalized yet: we still +        # have to deal with 'extra_path', which is the hack for allowing +        # non-packagized module distributions (hello, Numerical Python!) to +        # get their own directories. +        self.handle_extra_path() +        self.install_libbase = self.install_lib  # needed for .pth file +        self.install_lib = os.path.join(self.install_lib, self.extra_dirs) + +        # If a new root directory was supplied, make all the installation +        # dirs relative to it. +        if self.root is not None: +            self.change_roots( +                'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers' +            ) + +        self.dump_dirs("after prepending root") + +        # Find out the build directories, ie. where to install from. +        self.set_undefined_options( +            'build', ('build_base', 'build_base'), ('build_lib', 'build_lib') +        ) + +        # Punt on doc directories for now -- after all, we're punting on +        # documentation completely! + +    def dump_dirs(self, msg): +        """Dumps the list of user options.""" +        if not DEBUG: +            return +        from distutils.fancy_getopt import longopt_xlate + +        log.debug(msg + ":") +        for opt in self.user_options: +            opt_name = opt[0] +            if opt_name[-1] == "=": +                opt_name = opt_name[0:-1] +            if opt_name in self.negative_opt: +                opt_name = self.negative_opt[opt_name] +                opt_name = opt_name.translate(longopt_xlate) +                val = not getattr(self, opt_name) +            else: +                opt_name = opt_name.translate(longopt_xlate) +                val = getattr(self, opt_name) +            log.debug("  %s: %s", opt_name, val) + +    def finalize_unix(self): +        """Finalizes options for posix platforms.""" +        if self.install_base is not None or self.install_platbase is not None: +            incomplete_scheme = ( +                ( +                    self.install_lib is None +                    and self.install_purelib is None +                    and self.install_platlib is None +                ) +                or self.install_headers is None +                or self.install_scripts is None +                or self.install_data is None +            ) +            if incomplete_scheme: +                raise DistutilsOptionError( +                    "install-base or install-platbase supplied, but " +                    "installation scheme is incomplete" +                ) +            return + +        if self.user: +            if self.install_userbase is None: +                raise DistutilsPlatformError("User base directory is not specified") +            self.install_base = self.install_platbase = self.install_userbase +            self.select_scheme("posix_user") +        elif self.home is not None: +            self.install_base = self.install_platbase = self.home +            self.select_scheme("posix_home") +        else: +            if self.prefix is None: +                if self.exec_prefix is not None: +                    raise DistutilsOptionError( +                        "must not supply exec-prefix without prefix" +                    ) + +                # Allow Fedora to add components to the prefix +                _prefix_addition = getattr(sysconfig, '_prefix_addition', "") + +                self.prefix = os.path.normpath(sys.prefix) + _prefix_addition +                self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition + +            else: +                if self.exec_prefix is None: +                    self.exec_prefix = self.prefix + +            self.install_base = self.prefix +            self.install_platbase = self.exec_prefix +            self.select_scheme("posix_prefix") + +    def finalize_other(self): +        """Finalizes options for non-posix platforms""" +        if self.user: +            if self.install_userbase is None: +                raise DistutilsPlatformError("User base directory is not specified") +            self.install_base = self.install_platbase = self.install_userbase +            self.select_scheme(os.name + "_user") +        elif self.home is not None: +            self.install_base = self.install_platbase = self.home +            self.select_scheme("posix_home") +        else: +            if self.prefix is None: +                self.prefix = os.path.normpath(sys.prefix) + +            self.install_base = self.install_platbase = self.prefix +            try: +                self.select_scheme(os.name) +            except KeyError: +                raise DistutilsPlatformError( +                    "I don't know how to install stuff on '%s'" % os.name +                ) + +    def select_scheme(self, name): +        _select_scheme(self, name) + +    def _expand_attrs(self, attrs): +        for attr in attrs: +            val = getattr(self, attr) +            if val is not None: +                if os.name == 'posix' or os.name == 'nt': +                    val = os.path.expanduser(val) +                val = subst_vars(val, self.config_vars) +                setattr(self, attr, val) + +    def expand_basedirs(self): +        """Calls `os.path.expanduser` on install_base, install_platbase and +        root.""" +        self._expand_attrs(['install_base', 'install_platbase', 'root']) + +    def expand_dirs(self): +        """Calls `os.path.expanduser` on install dirs.""" +        self._expand_attrs( +            [ +                'install_purelib', +                'install_platlib', +                'install_lib', +                'install_headers', +                'install_scripts', +                'install_data', +            ] +        ) + +    def convert_paths(self, *names): +        """Call `convert_path` over `names`.""" +        for name in names: +            attr = "install_" + name +            setattr(self, attr, convert_path(getattr(self, attr))) + +    def handle_extra_path(self): +        """Set `path_file` and `extra_dirs` using `extra_path`.""" +        if self.extra_path is None: +            self.extra_path = self.distribution.extra_path + +        if self.extra_path is not None: +            log.warn( +                "Distribution option extra_path is deprecated. " +                "See issue27919 for details." +            ) +            if isinstance(self.extra_path, str): +                self.extra_path = self.extra_path.split(',') + +            if len(self.extra_path) == 1: +                path_file = extra_dirs = self.extra_path[0] +            elif len(self.extra_path) == 2: +                path_file, extra_dirs = self.extra_path +            else: +                raise DistutilsOptionError( +                    "'extra_path' option must be a list, tuple, or " +                    "comma-separated string with 1 or 2 elements" +                ) + +            # convert to local form in case Unix notation used (as it +            # should be in setup scripts) +            extra_dirs = convert_path(extra_dirs) +        else: +            path_file = None +            extra_dirs = '' + +        # XXX should we warn if path_file and not extra_dirs? (in which +        # case the path file would be harmless but pointless) +        self.path_file = path_file +        self.extra_dirs = extra_dirs + +    def change_roots(self, *names): +        """Change the install directories pointed by name using root.""" +        for name in names: +            attr = "install_" + name +            setattr(self, attr, change_root(self.root, getattr(self, attr))) + +    def create_home_path(self): +        """Create directories under ~.""" +        if not self.user: +            return +        home = convert_path(os.path.expanduser("~")) +        for name, path in self.config_vars.items(): +            if str(path).startswith(home) and not os.path.isdir(path): +                self.debug_print("os.makedirs('%s', 0o700)" % path) +                os.makedirs(path, 0o700) + +    # -- Command execution methods ------------------------------------- + +    def run(self): +        """Runs the command.""" +        # Obviously have to build before we can install +        if not self.skip_build: +            self.run_command('build') +            # If we built for any other platform, we can't install. +            build_plat = self.distribution.get_command_obj('build').plat_name +            # check warn_dir - it is a clue that the 'install' is happening +            # internally, and not to sys.path, so we don't check the platform +            # matches what we are running. +            if self.warn_dir and build_plat != get_platform(): +                raise DistutilsPlatformError("Can't install when " "cross-compiling") + +        # Run all sub-commands (at least those that need to be run) +        for cmd_name in self.get_sub_commands(): +            self.run_command(cmd_name) + +        if self.path_file: +            self.create_path_file() + +        # write list of installed files, if requested. +        if self.record: +            outputs = self.get_outputs() +            if self.root:  # strip any package prefix +                root_len = len(self.root) +                for counter in range(len(outputs)): +                    outputs[counter] = outputs[counter][root_len:] +            self.execute( +                write_file, +                (self.record, outputs), +                "writing list of installed files to '%s'" % self.record, +            ) + +        sys_path = map(os.path.normpath, sys.path) +        sys_path = map(os.path.normcase, sys_path) +        install_lib = os.path.normcase(os.path.normpath(self.install_lib)) +        if ( +            self.warn_dir +            and not (self.path_file and self.install_path_file) +            and install_lib not in sys_path +        ): +            log.debug( +                ( +                    "modules installed to '%s', which is not in " +                    "Python's module search path (sys.path) -- " +                    "you'll have to change the search path yourself" +                ), +                self.install_lib, +            ) + +    def create_path_file(self): +        """Creates the .pth file""" +        filename = os.path.join(self.install_libbase, self.path_file + ".pth") +        if self.install_path_file: +            self.execute( +                write_file, (filename, [self.extra_dirs]), "creating %s" % filename +            ) +        else: +            self.warn("path file '%s' not created" % filename) + +    # -- Reporting methods --------------------------------------------- + +    def get_outputs(self): +        """Assembles the outputs of all the sub-commands.""" +        outputs = [] +        for cmd_name in self.get_sub_commands(): +            cmd = self.get_finalized_command(cmd_name) +            # Add the contents of cmd.get_outputs(), ensuring +            # that outputs doesn't contain duplicate entries +            for filename in cmd.get_outputs(): +                if filename not in outputs: +                    outputs.append(filename) + +        if self.path_file and self.install_path_file: +            outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth")) + +        return outputs + +    def get_inputs(self): +        """Returns the inputs of all the sub-commands""" +        # XXX gee, this looks familiar ;-( +        inputs = [] +        for cmd_name in self.get_sub_commands(): +            cmd = self.get_finalized_command(cmd_name) +            inputs.extend(cmd.get_inputs()) + +        return inputs + +    # -- Predicates for sub-command list ------------------------------- + +    def has_lib(self): +        """Returns true if the current distribution has any Python +        modules to install.""" +        return ( +            self.distribution.has_pure_modules() or self.distribution.has_ext_modules() +        ) + +    def has_headers(self): +        """Returns true if the current distribution has any headers to +        install.""" +        return self.distribution.has_headers() + +    def has_scripts(self): +        """Returns true if the current distribution has any scripts to. +        install.""" +        return self.distribution.has_scripts() + +    def has_data(self): +        """Returns true if the current distribution has any data to. +        install.""" +        return self.distribution.has_data_files() + +    # 'sub_commands': a list of commands this command might have to run to +    # get its work done.  See cmd.py for more info. +    sub_commands = [ +        ('install_lib', has_lib), +        ('install_headers', has_headers), +        ('install_scripts', has_scripts), +        ('install_data', has_data), +        ('install_egg_info', lambda self: True), +    ] diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_data.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_data.py new file mode 100644 index 0000000..23d91ad --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_data.py @@ -0,0 +1,84 @@ +"""distutils.command.install_data + +Implements the Distutils 'install_data' command, for installing +platform-independent data files.""" + +# contributed by Bastian Kleineidam + +import os +from distutils.core import Command +from distutils.util import change_root, convert_path + + +class install_data(Command): + +    description = "install data files" + +    user_options = [ +        ( +            'install-dir=', +            'd', +            "base directory for installing data files " +            "(default: installation base dir)", +        ), +        ('root=', None, "install everything relative to this alternate root directory"), +        ('force', 'f', "force installation (overwrite existing files)"), +    ] + +    boolean_options = ['force'] + +    def initialize_options(self): +        self.install_dir = None +        self.outfiles = [] +        self.root = None +        self.force = 0 +        self.data_files = self.distribution.data_files +        self.warn_dir = 1 + +    def finalize_options(self): +        self.set_undefined_options( +            'install', +            ('install_data', 'install_dir'), +            ('root', 'root'), +            ('force', 'force'), +        ) + +    def run(self): +        self.mkpath(self.install_dir) +        for f in self.data_files: +            if isinstance(f, str): +                # it's a simple file, so copy it +                f = convert_path(f) +                if self.warn_dir: +                    self.warn( +                        "setup script did not provide a directory for " +                        "'%s' -- installing right in '%s'" % (f, self.install_dir) +                    ) +                (out, _) = self.copy_file(f, self.install_dir) +                self.outfiles.append(out) +            else: +                # it's a tuple with path to install to and a list of files +                dir = convert_path(f[0]) +                if not os.path.isabs(dir): +                    dir = os.path.join(self.install_dir, dir) +                elif self.root: +                    dir = change_root(self.root, dir) +                self.mkpath(dir) + +                if f[1] == []: +                    # If there are no files listed, the user must be +                    # trying to create an empty directory, so add the +                    # directory to the list of output files. +                    self.outfiles.append(dir) +                else: +                    # Copy files, adding them to the list of output files. +                    for data in f[1]: +                        data = convert_path(data) +                        (out, _) = self.copy_file(data, dir) +                        self.outfiles.append(out) + +    def get_inputs(self): +        return self.data_files or [] + +    def get_outputs(self): +        return self.outfiles diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_egg_info.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_egg_info.py new file mode 100644 index 0000000..d5e68a6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_egg_info.py @@ -0,0 +1,91 @@ +""" +distutils.command.install_egg_info + +Implements the Distutils 'install_egg_info' command, for installing +a package's PKG-INFO metadata. +""" + +import os +import sys +import re + +from distutils.cmd import Command +from distutils import log, dir_util + + +class install_egg_info(Command): +    """Install an .egg-info file for the package""" + +    description = "Install package's PKG-INFO metadata as an .egg-info file" +    user_options = [ +        ('install-dir=', 'd', "directory to install to"), +    ] + +    def initialize_options(self): +        self.install_dir = None + +    @property +    def basename(self): +        """ +        Allow basename to be overridden by child class. +        Ref pypa/distutils#2. +        """ +        return "%s-%s-py%d.%d.egg-info" % ( +            to_filename(safe_name(self.distribution.get_name())), +            to_filename(safe_version(self.distribution.get_version())), +            *sys.version_info[:2], +        ) + +    def finalize_options(self): +        self.set_undefined_options('install_lib', ('install_dir', 'install_dir')) +        self.target = os.path.join(self.install_dir, self.basename) +        self.outputs = [self.target] + +    def run(self): +        target = self.target +        if os.path.isdir(target) and not os.path.islink(target): +            dir_util.remove_tree(target, dry_run=self.dry_run) +        elif os.path.exists(target): +            self.execute(os.unlink, (self.target,), "Removing " + target) +        elif not os.path.isdir(self.install_dir): +            self.execute( +                os.makedirs, (self.install_dir,), "Creating " + self.install_dir +            ) +        log.info("Writing %s", target) +        if not self.dry_run: +            with open(target, 'w', encoding='UTF-8') as f: +                self.distribution.metadata.write_pkg_file(f) + +    def get_outputs(self): +        return self.outputs + + +# The following routines are taken from setuptools' pkg_resources module and +# can be replaced by importing them from pkg_resources once it is included +# in the stdlib. + + +def safe_name(name): +    """Convert an arbitrary string to a standard distribution name + +    Any runs of non-alphanumeric/. characters are replaced with a single '-'. +    """ +    return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): +    """Convert an arbitrary string to a standard version string + +    Spaces become dots, and all other non-alphanumeric characters become +    dashes, with runs of multiple dashes condensed to a single dash. +    """ +    version = version.replace(' ', '.') +    return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def to_filename(name): +    """Convert a project or version name to its filename-escaped form + +    Any '-' characters are currently replaced with '_'. +    """ +    return name.replace('-', '_') diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_headers.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_headers.py new file mode 100644 index 0000000..87046ab --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_headers.py @@ -0,0 +1,45 @@ +"""distutils.command.install_headers + +Implements the Distutils 'install_headers' command, to install C/C++ header +files to the Python include directory.""" + +from distutils.core import Command + + +# XXX force is never used +class install_headers(Command): + +    description = "install C/C++ header files" + +    user_options = [ +        ('install-dir=', 'd', "directory to install header files to"), +        ('force', 'f', "force installation (overwrite existing files)"), +    ] + +    boolean_options = ['force'] + +    def initialize_options(self): +        self.install_dir = None +        self.force = 0 +        self.outfiles = [] + +    def finalize_options(self): +        self.set_undefined_options( +            'install', ('install_headers', 'install_dir'), ('force', 'force') +        ) + +    def run(self): +        headers = self.distribution.headers +        if not headers: +            return + +        self.mkpath(self.install_dir) +        for header in headers: +            (out, _) = self.copy_file(header, self.install_dir) +            self.outfiles.append(out) + +    def get_inputs(self): +        return self.distribution.headers or [] + +    def get_outputs(self): +        return self.outfiles diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_lib.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_lib.py new file mode 100644 index 0000000..ad3089c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_lib.py @@ -0,0 +1,238 @@ +"""distutils.command.install_lib + +Implements the Distutils 'install_lib' command +(install all Python modules).""" + +import os +import importlib.util +import sys + +from distutils.core import Command +from distutils.errors import DistutilsOptionError + + +# Extension for Python source files. +PYTHON_SOURCE_EXTENSION = ".py" + + +class install_lib(Command): + +    description = "install all Python modules (extensions and pure Python)" + +    # The byte-compilation options are a tad confusing.  Here are the +    # possible scenarios: +    #   1) no compilation at all (--no-compile --no-optimize) +    #   2) compile .pyc only (--compile --no-optimize; default) +    #   3) compile .pyc and "opt-1" .pyc (--compile --optimize) +    #   4) compile "opt-1" .pyc only (--no-compile --optimize) +    #   5) compile .pyc and "opt-2" .pyc (--compile --optimize-more) +    #   6) compile "opt-2" .pyc only (--no-compile --optimize-more) +    # +    # The UI for this is two options, 'compile' and 'optimize'. +    # 'compile' is strictly boolean, and only decides whether to +    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and +    # decides both whether to generate .pyc files and what level of +    # optimization to use. + +    user_options = [ +        ('install-dir=', 'd', "directory to install to"), +        ('build-dir=', 'b', "build directory (where to install from)"), +        ('force', 'f', "force installation (overwrite existing files)"), +        ('compile', 'c', "compile .py to .pyc [default]"), +        ('no-compile', None, "don't compile .py files"), +        ( +            'optimize=', +            'O', +            "also compile with optimization: -O1 for \"python -O\", " +            "-O2 for \"python -OO\", and -O0 to disable [default: -O0]", +        ), +        ('skip-build', None, "skip the build steps"), +    ] + +    boolean_options = ['force', 'compile', 'skip-build'] +    negative_opt = {'no-compile': 'compile'} + +    def initialize_options(self): +        # let the 'install' command dictate our installation directory +        self.install_dir = None +        self.build_dir = None +        self.force = 0 +        self.compile = None +        self.optimize = None +        self.skip_build = None + +    def finalize_options(self): +        # Get all the information we need to install pure Python modules +        # from the umbrella 'install' command -- build (source) directory, +        # install (target) directory, and whether to compile .py files. +        self.set_undefined_options( +            'install', +            ('build_lib', 'build_dir'), +            ('install_lib', 'install_dir'), +            ('force', 'force'), +            ('compile', 'compile'), +            ('optimize', 'optimize'), +            ('skip_build', 'skip_build'), +        ) + +        if self.compile is None: +            self.compile = True +        if self.optimize is None: +            self.optimize = False + +        if not isinstance(self.optimize, int): +            try: +                self.optimize = int(self.optimize) +                if self.optimize not in (0, 1, 2): +                    raise AssertionError +            except (ValueError, AssertionError): +                raise DistutilsOptionError("optimize must be 0, 1, or 2") + +    def run(self): +        # Make sure we have built everything we need first +        self.build() + +        # Install everything: simply dump the entire contents of the build +        # directory to the installation directory (that's the beauty of +        # having a build directory!) +        outfiles = self.install() + +        # (Optionally) compile .py to .pyc +        if outfiles is not None and self.distribution.has_pure_modules(): +            self.byte_compile(outfiles) + +    # -- Top-level worker functions ------------------------------------ +    # (called from 'run()') + +    def build(self): +        if not self.skip_build: +            if self.distribution.has_pure_modules(): +                self.run_command('build_py') +            if self.distribution.has_ext_modules(): +                self.run_command('build_ext') + +    def install(self): +        if os.path.isdir(self.build_dir): +            outfiles = self.copy_tree(self.build_dir, self.install_dir) +        else: +            self.warn( +                "'%s' does not exist -- no Python modules to install" % self.build_dir +            ) +            return +        return outfiles + +    def byte_compile(self, files): +        if sys.dont_write_bytecode: +            self.warn('byte-compiling is disabled, skipping.') +            return + +        from distutils.util import byte_compile + +        # Get the "--root" directory supplied to the "install" command, +        # and use it as a prefix to strip off the purported filename +        # encoded in bytecode files.  This is far from complete, but it +        # should at least generate usable bytecode in RPM distributions. +        install_root = self.get_finalized_command('install').root + +        if self.compile: +            byte_compile( +                files, +                optimize=0, +                force=self.force, +                prefix=install_root, +                dry_run=self.dry_run, +            ) +        if self.optimize > 0: +            byte_compile( +                files, +                optimize=self.optimize, +                force=self.force, +                prefix=install_root, +                verbose=self.verbose, +                dry_run=self.dry_run, +            ) + +    # -- Utility methods ----------------------------------------------- + +    def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir): +        if not has_any: +            return [] + +        build_cmd = self.get_finalized_command(build_cmd) +        build_files = build_cmd.get_outputs() +        build_dir = getattr(build_cmd, cmd_option) + +        prefix_len = len(build_dir) + len(os.sep) +        outputs = [] +        for file in build_files: +            outputs.append(os.path.join(output_dir, file[prefix_len:])) + +        return outputs + +    def _bytecode_filenames(self, py_filenames): +        bytecode_files = [] +        for py_file in py_filenames: +            # Since build_py handles package data installation, the +            # list of outputs can contain more than just .py files. +            # Make sure we only report bytecode for the .py files. +            ext = os.path.splitext(os.path.normcase(py_file))[1] +            if ext != PYTHON_SOURCE_EXTENSION: +                continue +            if self.compile: +                bytecode_files.append( +                    importlib.util.cache_from_source(py_file, optimization='') +                ) +            if self.optimize > 0: +                bytecode_files.append( +                    importlib.util.cache_from_source( +                        py_file, optimization=self.optimize +                    ) +                ) + +        return bytecode_files + +    # -- External interface -------------------------------------------- +    # (called by outsiders) + +    def get_outputs(self): +        """Return the list of files that would be installed if this command +        were actually run.  Not affected by the "dry-run" flag or whether +        modules have actually been built yet. +        """ +        pure_outputs = self._mutate_outputs( +            self.distribution.has_pure_modules(), +            'build_py', +            'build_lib', +            self.install_dir, +        ) +        if self.compile: +            bytecode_outputs = self._bytecode_filenames(pure_outputs) +        else: +            bytecode_outputs = [] + +        ext_outputs = self._mutate_outputs( +            self.distribution.has_ext_modules(), +            'build_ext', +            'build_lib', +            self.install_dir, +        ) + +        return pure_outputs + bytecode_outputs + ext_outputs + +    def get_inputs(self): +        """Get the list of files that are input to this command, ie. the +        files that get installed as they are named in the build tree. +        The files in this list correspond one-to-one to the output +        filenames returned by 'get_outputs()'. +        """ +        inputs = [] + +        if self.distribution.has_pure_modules(): +            build_py = self.get_finalized_command('build_py') +            inputs.extend(build_py.get_outputs()) + +        if self.distribution.has_ext_modules(): +            build_ext = self.get_finalized_command('build_ext') +            inputs.extend(build_ext.get_outputs()) + +        return inputs diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_scripts.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_scripts.py new file mode 100644 index 0000000..f09bd64 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/install_scripts.py @@ -0,0 +1,61 @@ +"""distutils.command.install_scripts + +Implements the Distutils 'install_scripts' command, for installing +Python scripts.""" + +# contributed by Bastian Kleineidam + +import os +from distutils.core import Command +from distutils import log +from stat import ST_MODE + + +class install_scripts(Command): + +    description = "install scripts (Python or otherwise)" + +    user_options = [ +        ('install-dir=', 'd', "directory to install scripts to"), +        ('build-dir=', 'b', "build directory (where to install from)"), +        ('force', 'f', "force installation (overwrite existing files)"), +        ('skip-build', None, "skip the build steps"), +    ] + +    boolean_options = ['force', 'skip-build'] + +    def initialize_options(self): +        self.install_dir = None +        self.force = 0 +        self.build_dir = None +        self.skip_build = None + +    def finalize_options(self): +        self.set_undefined_options('build', ('build_scripts', 'build_dir')) +        self.set_undefined_options( +            'install', +            ('install_scripts', 'install_dir'), +            ('force', 'force'), +            ('skip_build', 'skip_build'), +        ) + +    def run(self): +        if not self.skip_build: +            self.run_command('build_scripts') +        self.outfiles = self.copy_tree(self.build_dir, self.install_dir) +        if os.name == 'posix': +            # Set the executable bits (owner, group, and world) on +            # all the scripts we just installed. +            for file in self.get_outputs(): +                if self.dry_run: +                    log.info("changing mode of %s", file) +                else: +                    mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777 +                    log.info("changing mode of %s to %o", file, mode) +                    os.chmod(file, mode) + +    def get_inputs(self): +        return self.distribution.scripts or [] + +    def get_outputs(self): +        return self.outfiles or [] diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/py37compat.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/py37compat.py new file mode 100644 index 0000000..aa0c0a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/py37compat.py @@ -0,0 +1,31 @@ +import sys + + +def _pythonlib_compat(): +    """ +    On Python 3.7 and earlier, distutils would include the Python +    library. See pypa/distutils#9. +    """ +    from distutils import sysconfig + +    if not sysconfig.get_config_var('Py_ENABLED_SHARED'): +        return + +    yield 'python{}.{}{}'.format( +        sys.hexversion >> 24, +        (sys.hexversion >> 16) & 0xFF, +        sysconfig.get_config_var('ABIFLAGS'), +    ) + + +def compose(f1, f2): +    return lambda *args, **kwargs: f1(f2(*args, **kwargs)) + + +pythonlib = ( +    compose(list, _pythonlib_compat) +    if sys.version_info < (3, 8) +    and sys.platform != 'darwin' +    and sys.platform[:3] != 'aix' +    else list +) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/register.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/register.py new file mode 100644 index 0000000..c140265 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/register.py @@ -0,0 +1,319 @@ +"""distutils.command.register + +Implements the Distutils 'register' command (register with the repository). +""" + +# created 2002/10/21, Richard Jones + +import getpass +import io +import urllib.parse +import urllib.request +from warnings import warn + +from distutils.core import PyPIRCCommand +from distutils import log + + +class register(PyPIRCCommand): + +    description = "register the distribution with the Python package index" +    user_options = PyPIRCCommand.user_options + [ +        ('list-classifiers', None, 'list the valid Trove classifiers'), +        ( +            'strict', +            None, +            'Will stop the registering if the meta-data are not fully compliant', +        ), +    ] +    boolean_options = PyPIRCCommand.boolean_options + [ +        'verify', +        'list-classifiers', +        'strict', +    ] + +    sub_commands = [('check', lambda self: True)] + +    def initialize_options(self): +        PyPIRCCommand.initialize_options(self) +        self.list_classifiers = 0 +        self.strict = 0 + +    def finalize_options(self): +        PyPIRCCommand.finalize_options(self) +        # setting options for the `check` subcommand +        check_options = { +            'strict': ('register', self.strict), +            'restructuredtext': ('register', 1), +        } +        self.distribution.command_options['check'] = check_options + +    def run(self): +        self.finalize_options() +        self._set_config() + +        # Run sub commands +        for cmd_name in self.get_sub_commands(): +            self.run_command(cmd_name) + +        if self.dry_run: +            self.verify_metadata() +        elif self.list_classifiers: +            self.classifiers() +        else: +            self.send_metadata() + +    def check_metadata(self): +        """Deprecated API.""" +        warn( +            "distutils.command.register.check_metadata is deprecated; " +            "use the check command instead", +            DeprecationWarning, +        ) +        check = self.distribution.get_command_obj('check') +        check.ensure_finalized() +        check.strict = self.strict +        check.restructuredtext = 1 +        check.run() + +    def _set_config(self): +        '''Reads the configuration file and set attributes.''' +        config = self._read_pypirc() +        if config != {}: +            self.username = config['username'] +            self.password = config['password'] +            self.repository = config['repository'] +            self.realm = config['realm'] +            self.has_config = True +        else: +            if self.repository not in ('pypi', self.DEFAULT_REPOSITORY): +                raise ValueError('%s not found in .pypirc' % self.repository) +            if self.repository == 'pypi': +                self.repository = self.DEFAULT_REPOSITORY +            self.has_config = False + +    def classifiers(self): +        '''Fetch the list of classifiers from the server.''' +        url = self.repository + '?:action=list_classifiers' +        response = urllib.request.urlopen(url) +        log.info(self._read_pypi_response(response)) + +    def verify_metadata(self): +        '''Send the metadata to the package index server to be checked.''' +        # send the info to the server and report the result +        (code, result) = self.post_to_server(self.build_post_data('verify')) +        log.info('Server response (%s): %s', code, result) + +    def send_metadata(self):  # noqa: C901 +        '''Send the metadata to the package index server. + +        Well, do the following: +        1. figure who the user is, and then +        2. send the data as a Basic auth'ed POST. + +        First we try to read the username/password from $HOME/.pypirc, +        which is a ConfigParser-formatted file with a section +        [distutils] containing username and password entries (both +        in clear text). Eg: + +            [distutils] +            index-servers = +                pypi + +            [pypi] +            username: fred +            password: sekrit + +        Otherwise, to figure who the user is, we offer the user three +        choices: + +         1. use existing login, +         2. register as a new user, or +         3. set the password to a random string and email the user. + +        ''' +        # see if we can short-cut and get the username/password from the +        # config +        if self.has_config: +            choice = '1' +            username = self.username +            password = self.password +        else: +            choice = 'x' +            username = password = '' + +        # get the user's login info +        choices = '1 2 3 4'.split() +        while choice not in choices: +            self.announce( +                '''\ +We need to know who you are, so please choose either: + 1. use your existing login, + 2. register as a new user, + 3. have the server generate a new password for you (and email it to you), or + 4. quit +Your selection [default 1]: ''', +                log.INFO, +            ) +            choice = input() +            if not choice: +                choice = '1' +            elif choice not in choices: +                print('Please choose one of the four options!') + +        if choice == '1': +            # get the username and password +            while not username: +                username = input('Username: ') +            while not password: +                password = getpass.getpass('Password: ') + +            # set up the authentication +            auth = urllib.request.HTTPPasswordMgr() +            host = urllib.parse.urlparse(self.repository)[1] +            auth.add_password(self.realm, host, username, password) +            # send the info to the server and report the result +            code, result = self.post_to_server(self.build_post_data('submit'), auth) +            self.announce('Server response ({}): {}'.format(code, result), log.INFO) + +            # possibly save the login +            if code == 200: +                if self.has_config: +                    # sharing the password in the distribution instance +                    # so the upload command can reuse it +                    self.distribution.password = password +                else: +                    self.announce( +                        ( +                            'I can store your PyPI login so future ' +                            'submissions will be faster.' +                        ), +                        log.INFO, +                    ) +                    self.announce( +                        '(the login will be stored in %s)' % self._get_rc_file(), +                        log.INFO, +                    ) +                    choice = 'X' +                    while choice.lower() not in 'yn': +                        choice = input('Save your login (y/N)?') +                        if not choice: +                            choice = 'n' +                    if choice.lower() == 'y': +                        self._store_pypirc(username, password) + +        elif choice == '2': +            data = {':action': 'user'} +            data['name'] = data['password'] = data['email'] = '' +            data['confirm'] = None +            while not data['name']: +                data['name'] = input('Username: ') +            while data['password'] != data['confirm']: +                while not data['password']: +                    data['password'] = getpass.getpass('Password: ') +                while not data['confirm']: +                    data['confirm'] = getpass.getpass(' Confirm: ') +                if data['password'] != data['confirm']: +                    data['password'] = '' +                    data['confirm'] = None +                    print("Password and confirm don't match!") +            while not data['email']: +                data['email'] = input('   EMail: ') +            code, result = self.post_to_server(data) +            if code != 200: +                log.info('Server response (%s): %s', code, result) +            else: +                log.info('You will receive an email shortly.') +                log.info('Follow the instructions in it to ' 'complete registration.') +        elif choice == '3': +            data = {':action': 'password_reset'} +            data['email'] = '' +            while not data['email']: +                data['email'] = input('Your email address: ') +            code, result = self.post_to_server(data) +            log.info('Server response (%s): %s', code, result) + +    def build_post_data(self, action): +        # figure the data to send - the metadata plus some additional +        # information used by the package server +        meta = self.distribution.metadata +        data = { +            ':action': action, +            'metadata_version': '1.0', +            'name': meta.get_name(), +            'version': meta.get_version(), +            'summary': meta.get_description(), +            'home_page': meta.get_url(), +            'author': meta.get_contact(), +            'author_email': meta.get_contact_email(), +            'license': meta.get_licence(), +            'description': meta.get_long_description(), +            'keywords': meta.get_keywords(), +            'platform': meta.get_platforms(), +            'classifiers': meta.get_classifiers(), +            'download_url': meta.get_download_url(), +            # PEP 314 +            'provides': meta.get_provides(), +            'requires': meta.get_requires(), +            'obsoletes': meta.get_obsoletes(), +        } +        if data['provides'] or data['requires'] or data['obsoletes']: +            data['metadata_version'] = '1.1' +        return data + +    def post_to_server(self, data, auth=None):  # noqa: C901 +        '''Post a query to the server, and return a string response.''' +        if 'name' in data: +            self.announce( +                'Registering {} to {}'.format(data['name'], self.repository), log.INFO +            ) +        # Build up the MIME payload for the urllib2 POST data +        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' +        sep_boundary = '\n--' + boundary +        end_boundary = sep_boundary + '--' +        body = io.StringIO() +        for key, value in data.items(): +            # handle multiple entries for the same name +            if type(value) not in (type([]), type(())): +                value = [value] +            for value in value: +                value = str(value) +                body.write(sep_boundary) +                body.write('\nContent-Disposition: form-data; name="%s"' % key) +                body.write("\n\n") +                body.write(value) +                if value and value[-1] == '\r': +                    body.write('\n')  # write an extra newline (lurve Macs) +        body.write(end_boundary) +        body.write("\n") +        body = body.getvalue().encode("utf-8") + +        # build the Request +        headers = { +            'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8' +            % boundary, +            'Content-length': str(len(body)), +        } +        req = urllib.request.Request(self.repository, body, headers) + +        # handle HTTP and include the Basic Auth handler +        opener = urllib.request.build_opener( +            urllib.request.HTTPBasicAuthHandler(password_mgr=auth) +        ) +        data = '' +        try: +            result = opener.open(req) +        except urllib.error.HTTPError as e: +            if self.show_response: +                data = e.fp.read() +            result = e.code, e.msg +        except urllib.error.URLError as e: +            result = 500, str(e) +        else: +            if self.show_response: +                data = self._read_pypi_response(result) +            result = 200, 'OK' +        if self.show_response: +            msg = '\n'.join(('-' * 75, data, '-' * 75)) +            self.announce(msg, log.INFO) +        return result diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/sdist.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/sdist.py new file mode 100644 index 0000000..d6e9489 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/sdist.py @@ -0,0 +1,531 @@ +"""distutils.command.sdist + +Implements the Distutils 'sdist' command (create a source distribution).""" + +import os +import sys +from glob import glob +from warnings import warn + +from distutils.core import Command +from distutils import dir_util +from distutils import file_util +from distutils import archive_util +from distutils.text_file import TextFile +from distutils.filelist import FileList +from distutils import log +from distutils.util import convert_path +from distutils.errors import DistutilsOptionError, DistutilsTemplateError + + +def show_formats(): +    """Print all possible values for the 'formats' option (used by +    the "--help-formats" command-line option). +    """ +    from distutils.fancy_getopt import FancyGetopt +    from distutils.archive_util import ARCHIVE_FORMATS + +    formats = [] +    for format in ARCHIVE_FORMATS.keys(): +        formats.append(("formats=" + format, None, ARCHIVE_FORMATS[format][2])) +    formats.sort() +    FancyGetopt(formats).print_help("List of available source distribution formats:") + + +class sdist(Command): + +    description = "create a source distribution (tarball, zip file, etc.)" + +    def checking_metadata(self): +        """Callable used for the check sub-command. + +        Placed here so user_options can view it""" +        return self.metadata_check + +    user_options = [ +        ('template=', 't', "name of manifest template file [default: MANIFEST.in]"), +        ('manifest=', 'm', "name of manifest file [default: MANIFEST]"), +        ( +            'use-defaults', +            None, +            "include the default file set in the manifest " +            "[default; disable with --no-defaults]", +        ), +        ('no-defaults', None, "don't include the default file set"), +        ( +            'prune', +            None, +            "specifically exclude files/directories that should not be " +            "distributed (build tree, RCS/CVS dirs, etc.) " +            "[default; disable with --no-prune]", +        ), +        ('no-prune', None, "don't automatically exclude anything"), +        ( +            'manifest-only', +            'o', +            "just regenerate the manifest and then stop " "(implies --force-manifest)", +        ), +        ( +            'force-manifest', +            'f', +            "forcibly regenerate the manifest and carry on as usual. " +            "Deprecated: now the manifest is always regenerated.", +        ), +        ('formats=', None, "formats for source distribution (comma-separated list)"), +        ( +            'keep-temp', +            'k', +            "keep the distribution tree around after creating " + "archive file(s)", +        ), +        ( +            'dist-dir=', +            'd', +            "directory to put the source distribution archive(s) in " "[default: dist]", +        ), +        ( +            'metadata-check', +            None, +            "Ensure that all required elements of meta-data " +            "are supplied. Warn if any missing. [default]", +        ), +        ( +            'owner=', +            'u', +            "Owner name used when creating a tar file [default: current user]", +        ), +        ( +            'group=', +            'g', +            "Group name used when creating a tar file [default: current group]", +        ), +    ] + +    boolean_options = [ +        'use-defaults', +        'prune', +        'manifest-only', +        'force-manifest', +        'keep-temp', +        'metadata-check', +    ] + +    help_options = [ +        ('help-formats', None, "list available distribution formats", show_formats), +    ] + +    negative_opt = {'no-defaults': 'use-defaults', 'no-prune': 'prune'} + +    sub_commands = [('check', checking_metadata)] + +    READMES = ('README', 'README.txt', 'README.rst') + +    def initialize_options(self): +        # 'template' and 'manifest' are, respectively, the names of +        # the manifest template and manifest file. +        self.template = None +        self.manifest = None + +        # 'use_defaults': if true, we will include the default file set +        # in the manifest +        self.use_defaults = 1 +        self.prune = 1 + +        self.manifest_only = 0 +        self.force_manifest = 0 + +        self.formats = ['gztar'] +        self.keep_temp = 0 +        self.dist_dir = None + +        self.archive_files = None +        self.metadata_check = 1 +        self.owner = None +        self.group = None + +    def finalize_options(self): +        if self.manifest is None: +            self.manifest = "MANIFEST" +        if self.template is None: +            self.template = "MANIFEST.in" + +        self.ensure_string_list('formats') + +        bad_format = archive_util.check_archive_formats(self.formats) +        if bad_format: +            raise DistutilsOptionError("unknown archive format '%s'" % bad_format) + +        if self.dist_dir is None: +            self.dist_dir = "dist" + +    def run(self): +        # 'filelist' contains the list of files that will make up the +        # manifest +        self.filelist = FileList() + +        # Run sub commands +        for cmd_name in self.get_sub_commands(): +            self.run_command(cmd_name) + +        # Do whatever it takes to get the list of files to process +        # (process the manifest template, read an existing manifest, +        # whatever).  File list is accumulated in 'self.filelist'. +        self.get_file_list() + +        # If user just wanted us to regenerate the manifest, stop now. +        if self.manifest_only: +            return + +        # Otherwise, go ahead and create the source distribution tarball, +        # or zipfile, or whatever. +        self.make_distribution() + +    def check_metadata(self): +        """Deprecated API.""" +        warn( +            "distutils.command.sdist.check_metadata is deprecated, \ +              use the check command instead", +            PendingDeprecationWarning, +        ) +        check = self.distribution.get_command_obj('check') +        check.ensure_finalized() +        check.run() + +    def get_file_list(self): +        """Figure out the list of files to include in the source +        distribution, and put it in 'self.filelist'.  This might involve +        reading the manifest template (and writing the manifest), or just +        reading the manifest, or just using the default file set -- it all +        depends on the user's options. +        """ +        # new behavior when using a template: +        # the file list is recalculated every time because +        # even if MANIFEST.in or setup.py are not changed +        # the user might have added some files in the tree that +        # need to be included. +        # +        #  This makes --force the default and only behavior with templates. +        template_exists = os.path.isfile(self.template) +        if not template_exists and self._manifest_is_not_generated(): +            self.read_manifest() +            self.filelist.sort() +            self.filelist.remove_duplicates() +            return + +        if not template_exists: +            self.warn( +                ("manifest template '%s' does not exist " + "(using default file list)") +                % self.template +            ) +        self.filelist.findall() + +        if self.use_defaults: +            self.add_defaults() + +        if template_exists: +            self.read_template() + +        if self.prune: +            self.prune_file_list() + +        self.filelist.sort() +        self.filelist.remove_duplicates() +        self.write_manifest() + +    def add_defaults(self): +        """Add all the default files to self.filelist: +          - README or README.txt +          - setup.py +          - test/test*.py +          - all pure Python modules mentioned in setup script +          - all files pointed by package_data (build_py) +          - all files defined in data_files. +          - all files defined as scripts. +          - all C sources listed as part of extensions or C libraries +            in the setup script (doesn't catch C headers!) +        Warns if (README or README.txt) or setup.py are missing; everything +        else is optional. +        """ +        self._add_defaults_standards() +        self._add_defaults_optional() +        self._add_defaults_python() +        self._add_defaults_data_files() +        self._add_defaults_ext() +        self._add_defaults_c_libs() +        self._add_defaults_scripts() + +    @staticmethod +    def _cs_path_exists(fspath): +        """ +        Case-sensitive path existence check + +        >>> sdist._cs_path_exists(__file__) +        True +        >>> sdist._cs_path_exists(__file__.upper()) +        False +        """ +        if not os.path.exists(fspath): +            return False +        # make absolute so we always have a directory +        abspath = os.path.abspath(fspath) +        directory, filename = os.path.split(abspath) +        return filename in os.listdir(directory) + +    def _add_defaults_standards(self): +        standards = [self.READMES, self.distribution.script_name] +        for fn in standards: +            if isinstance(fn, tuple): +                alts = fn +                got_it = False +                for fn in alts: +                    if self._cs_path_exists(fn): +                        got_it = True +                        self.filelist.append(fn) +                        break + +                if not got_it: +                    self.warn( +                        "standard file not found: should have one of " + ', '.join(alts) +                    ) +            else: +                if self._cs_path_exists(fn): +                    self.filelist.append(fn) +                else: +                    self.warn("standard file '%s' not found" % fn) + +    def _add_defaults_optional(self): +        optional = ['test/test*.py', 'setup.cfg'] +        for pattern in optional: +            files = filter(os.path.isfile, glob(pattern)) +            self.filelist.extend(files) + +    def _add_defaults_python(self): +        # build_py is used to get: +        #  - python modules +        #  - files defined in package_data +        build_py = self.get_finalized_command('build_py') + +        # getting python files +        if self.distribution.has_pure_modules(): +            self.filelist.extend(build_py.get_source_files()) + +        # getting package_data files +        # (computed in build_py.data_files by build_py.finalize_options) +        for pkg, src_dir, build_dir, filenames in build_py.data_files: +            for filename in filenames: +                self.filelist.append(os.path.join(src_dir, filename)) + +    def _add_defaults_data_files(self): +        # getting distribution.data_files +        if self.distribution.has_data_files(): +            for item in self.distribution.data_files: +                if isinstance(item, str): +                    # plain file +                    item = convert_path(item) +                    if os.path.isfile(item): +                        self.filelist.append(item) +                else: +                    # a (dirname, filenames) tuple +                    dirname, filenames = item +                    for f in filenames: +                        f = convert_path(f) +                        if os.path.isfile(f): +                            self.filelist.append(f) + +    def _add_defaults_ext(self): +        if self.distribution.has_ext_modules(): +            build_ext = self.get_finalized_command('build_ext') +            self.filelist.extend(build_ext.get_source_files()) + +    def _add_defaults_c_libs(self): +        if self.distribution.has_c_libraries(): +            build_clib = self.get_finalized_command('build_clib') +            self.filelist.extend(build_clib.get_source_files()) + +    def _add_defaults_scripts(self): +        if self.distribution.has_scripts(): +            build_scripts = self.get_finalized_command('build_scripts') +            self.filelist.extend(build_scripts.get_source_files()) + +    def read_template(self): +        """Read and parse manifest template file named by self.template. + +        (usually "MANIFEST.in") The parsing and processing is done by +        'self.filelist', which updates itself accordingly. +        """ +        log.info("reading manifest template '%s'", self.template) +        template = TextFile( +            self.template, +            strip_comments=1, +            skip_blanks=1, +            join_lines=1, +            lstrip_ws=1, +            rstrip_ws=1, +            collapse_join=1, +        ) + +        try: +            while True: +                line = template.readline() +                if line is None:  # end of file +                    break + +                try: +                    self.filelist.process_template_line(line) +                # the call above can raise a DistutilsTemplateError for +                # malformed lines, or a ValueError from the lower-level +                # convert_path function +                except (DistutilsTemplateError, ValueError) as msg: +                    self.warn( +                        "%s, line %d: %s" +                        % (template.filename, template.current_line, msg) +                    ) +        finally: +            template.close() + +    def prune_file_list(self): +        """Prune off branches that might slip into the file list as created +        by 'read_template()', but really don't belong there: +          * the build tree (typically "build") +          * the release tree itself (only an issue if we ran "sdist" +            previously with --keep-temp, or it aborted) +          * any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories +        """ +        build = self.get_finalized_command('build') +        base_dir = self.distribution.get_fullname() + +        self.filelist.exclude_pattern(None, prefix=build.build_base) +        self.filelist.exclude_pattern(None, prefix=base_dir) + +        if sys.platform == 'win32': +            seps = r'/|\\' +        else: +            seps = '/' + +        vcs_dirs = ['RCS', 'CVS', r'\.svn', r'\.hg', r'\.git', r'\.bzr', '_darcs'] +        vcs_ptrn = r'(^|{})({})({}).*'.format(seps, '|'.join(vcs_dirs), seps) +        self.filelist.exclude_pattern(vcs_ptrn, is_regex=1) + +    def write_manifest(self): +        """Write the file list in 'self.filelist' (presumably as filled in +        by 'add_defaults()' and 'read_template()') to the manifest file +        named by 'self.manifest'. +        """ +        if self._manifest_is_not_generated(): +            log.info( +                "not writing to manually maintained " +                "manifest file '%s'" % self.manifest +            ) +            return + +        content = self.filelist.files[:] +        content.insert(0, '# file GENERATED by distutils, do NOT edit') +        self.execute( +            file_util.write_file, +            (self.manifest, content), +            "writing manifest file '%s'" % self.manifest, +        ) + +    def _manifest_is_not_generated(self): +        # check for special comment used in 3.1.3 and higher +        if not os.path.isfile(self.manifest): +            return False + +        fp = open(self.manifest) +        try: +            first_line = fp.readline() +        finally: +            fp.close() +        return first_line != '# file GENERATED by distutils, do NOT edit\n' + +    def read_manifest(self): +        """Read the manifest file (named by 'self.manifest') and use it to +        fill in 'self.filelist', the list of files to include in the source +        distribution. +        """ +        log.info("reading manifest file '%s'", self.manifest) +        with open(self.manifest) as manifest: +            for line in manifest: +                # ignore comments and blank lines +                line = line.strip() +                if line.startswith('#') or not line: +                    continue +                self.filelist.append(line) + +    def make_release_tree(self, base_dir, files): +        """Create the directory tree that will become the source +        distribution archive.  All directories implied by the filenames in +        'files' are created under 'base_dir', and then we hard link or copy +        (if hard linking is unavailable) those files into place. +        Essentially, this duplicates the developer's source tree, but in a +        directory named after the distribution, containing only the files +        to be distributed. +        """ +        # Create all the directories under 'base_dir' necessary to +        # put 'files' there; the 'mkpath()' is just so we don't die +        # if the manifest happens to be empty. +        self.mkpath(base_dir) +        dir_util.create_tree(base_dir, files, dry_run=self.dry_run) + +        # And walk over the list of files, either making a hard link (if +        # os.link exists) to each one that doesn't already exist in its +        # corresponding location under 'base_dir', or copying each file +        # that's out-of-date in 'base_dir'.  (Usually, all files will be +        # out-of-date, because by default we blow away 'base_dir' when +        # we're done making the distribution archives.) + +        if hasattr(os, 'link'):  # can make hard links on this system +            link = 'hard' +            msg = "making hard links in %s..." % base_dir +        else:  # nope, have to copy +            link = None +            msg = "copying files to %s..." % base_dir + +        if not files: +            log.warn("no files to distribute -- empty manifest?") +        else: +            log.info(msg) +        for file in files: +            if not os.path.isfile(file): +                log.warn("'%s' not a regular file -- skipping", file) +            else: +                dest = os.path.join(base_dir, file) +                self.copy_file(file, dest, link=link) + +        self.distribution.metadata.write_pkg_info(base_dir) + +    def make_distribution(self): +        """Create the source distribution(s).  First, we create the release +        tree with 'make_release_tree()'; then, we create all required +        archive files (according to 'self.formats') from the release tree. +        Finally, we clean up by blowing away the release tree (unless +        'self.keep_temp' is true).  The list of archive files created is +        stored so it can be retrieved later by 'get_archive_files()'. +        """ +        # Don't warn about missing meta-data here -- should be (and is!) +        # done elsewhere. +        base_dir = self.distribution.get_fullname() +        base_name = os.path.join(self.dist_dir, base_dir) + +        self.make_release_tree(base_dir, self.filelist.files) +        archive_files = []  # remember names of files we create +        # tar archive must be created last to avoid overwrite and remove +        if 'tar' in self.formats: +            self.formats.append(self.formats.pop(self.formats.index('tar'))) + +        for fmt in self.formats: +            file = self.make_archive( +                base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group +            ) +            archive_files.append(file) +            self.distribution.dist_files.append(('sdist', '', file)) + +        self.archive_files = archive_files + +        if not self.keep_temp: +            dir_util.remove_tree(base_dir, dry_run=self.dry_run) + +    def get_archive_files(self): +        """Return the list of archive files created when the command +        was run, or None if the command hasn't run yet. +        """ +        return self.archive_files diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/command/upload.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/upload.py new file mode 100644 index 0000000..6af5394 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/command/upload.py @@ -0,0 +1,205 @@ +""" +distutils.command.upload + +Implements the Distutils 'upload' subcommand (upload package to a package +index). +""" + +import os +import io +import hashlib +from base64 import standard_b64encode +from urllib.request import urlopen, Request, HTTPError +from urllib.parse import urlparse +from distutils.errors import DistutilsError, DistutilsOptionError +from distutils.core import PyPIRCCommand +from distutils.spawn import spawn +from distutils import log + + +# PyPI Warehouse supports MD5, SHA256, and Blake2 (blake2-256) +# https://bugs.python.org/issue40698 +_FILE_CONTENT_DIGESTS = { +    "md5_digest": getattr(hashlib, "md5", None), +    "sha256_digest": getattr(hashlib, "sha256", None), +    "blake2_256_digest": getattr(hashlib, "blake2b", None), +} + + +class upload(PyPIRCCommand): + +    description = "upload binary package to PyPI" + +    user_options = PyPIRCCommand.user_options + [ +        ('sign', 's', 'sign files to upload using gpg'), +        ('identity=', 'i', 'GPG identity used to sign files'), +    ] + +    boolean_options = PyPIRCCommand.boolean_options + ['sign'] + +    def initialize_options(self): +        PyPIRCCommand.initialize_options(self) +        self.username = '' +        self.password = '' +        self.show_response = 0 +        self.sign = False +        self.identity = None + +    def finalize_options(self): +        PyPIRCCommand.finalize_options(self) +        if self.identity and not self.sign: +            raise DistutilsOptionError("Must use --sign for --identity to have meaning") +        config = self._read_pypirc() +        if config != {}: +            self.username = config['username'] +            self.password = config['password'] +            self.repository = config['repository'] +            self.realm = config['realm'] + +        # getting the password from the distribution +        # if previously set by the register command +        if not self.password and self.distribution.password: +            self.password = self.distribution.password + +    def run(self): +        if not self.distribution.dist_files: +            msg = ( +                "Must create and upload files in one command " +                "(e.g. setup.py sdist upload)" +            ) +            raise DistutilsOptionError(msg) +        for command, pyversion, filename in self.distribution.dist_files: +            self.upload_file(command, pyversion, filename) + +    def upload_file(self, command, pyversion, filename):  # noqa: C901 +        # Makes sure the repository URL is compliant +        schema, netloc, url, params, query, fragments = urlparse(self.repository) +        if params or query or fragments: +            raise AssertionError("Incompatible url %s" % self.repository) + +        if schema not in ('http', 'https'): +            raise AssertionError("unsupported schema " + schema) + +        # Sign if requested +        if self.sign: +            gpg_args = ["gpg", "--detach-sign", "-a", filename] +            if self.identity: +                gpg_args[2:2] = ["--local-user", self.identity] +            spawn(gpg_args, dry_run=self.dry_run) + +        # Fill in the data - send all the meta-data in case we need to +        # register a new release +        f = open(filename, 'rb') +        try: +            content = f.read() +        finally: +            f.close() + +        meta = self.distribution.metadata +        data = { +            # action +            ':action': 'file_upload', +            'protocol_version': '1', +            # identify release +            'name': meta.get_name(), +            'version': meta.get_version(), +            # file content +            'content': (os.path.basename(filename), content), +            'filetype': command, +            'pyversion': pyversion, +            # additional meta-data +            'metadata_version': '1.0', +            'summary': meta.get_description(), +            'home_page': meta.get_url(), +            'author': meta.get_contact(), +            'author_email': meta.get_contact_email(), +            'license': meta.get_licence(), +            'description': meta.get_long_description(), +            'keywords': meta.get_keywords(), +            'platform': meta.get_platforms(), +            'classifiers': meta.get_classifiers(), +            'download_url': meta.get_download_url(), +            # PEP 314 +            'provides': meta.get_provides(), +            'requires': meta.get_requires(), +            'obsoletes': meta.get_obsoletes(), +        } + +        data['comment'] = '' + +        # file content digests +        for digest_name, digest_cons in _FILE_CONTENT_DIGESTS.items(): +            if digest_cons is None: +                continue +            try: +                data[digest_name] = digest_cons(content).hexdigest() +            except ValueError: +                # hash digest not available or blocked by security policy +                pass + +        if self.sign: +            with open(filename + ".asc", "rb") as f: +                data['gpg_signature'] = (os.path.basename(filename) + ".asc", f.read()) + +        # set up the authentication +        user_pass = (self.username + ":" + self.password).encode('ascii') +        # The exact encoding of the authentication string is debated. +        # Anyway PyPI only accepts ascii for both username or password. +        auth = "Basic " + standard_b64encode(user_pass).decode('ascii') + +        # Build up the MIME payload for the POST data +        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' +        sep_boundary = b'\r\n--' + boundary.encode('ascii') +        end_boundary = sep_boundary + b'--\r\n' +        body = io.BytesIO() +        for key, value in data.items(): +            title = '\r\nContent-Disposition: form-data; name="%s"' % key +            # handle multiple entries for the same name +            if not isinstance(value, list): +                value = [value] +            for value in value: +                if type(value) is tuple: +                    title += '; filename="%s"' % value[0] +                    value = value[1] +                else: +                    value = str(value).encode('utf-8') +                body.write(sep_boundary) +                body.write(title.encode('utf-8')) +                body.write(b"\r\n\r\n") +                body.write(value) +        body.write(end_boundary) +        body = body.getvalue() + +        msg = "Submitting {} to {}".format(filename, self.repository) +        self.announce(msg, log.INFO) + +        # build the Request +        headers = { +            'Content-type': 'multipart/form-data; boundary=%s' % boundary, +            'Content-length': str(len(body)), +            'Authorization': auth, +        } + +        request = Request(self.repository, data=body, headers=headers) +        # send the data +        try: +            result = urlopen(request) +            status = result.getcode() +            reason = result.msg +        except HTTPError as e: +            status = e.code +            reason = e.msg +        except OSError as e: +            self.announce(str(e), log.ERROR) +            raise + +        if status == 200: +            self.announce('Server response ({}): {}'.format(status, reason), log.INFO) +            if self.show_response: +                text = self._read_pypi_response(result) +                msg = '\n'.join(('-' * 75, text, '-' * 75)) +                self.announce(msg, log.INFO) +        else: +            msg = 'Upload failed ({}): {}'.format(status, reason) +            self.announce(msg, log.ERROR) +            raise DistutilsError(msg) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/config.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/config.py new file mode 100644 index 0000000..6e0c3a7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/config.py @@ -0,0 +1,139 @@ +"""distutils.pypirc + +Provides the PyPIRCCommand class, the base class for the command classes +that uses .pypirc in the distutils.command package. +""" +import os +from configparser import RawConfigParser + +from distutils.cmd import Command + +DEFAULT_PYPIRC = """\ +[distutils] +index-servers = +    pypi + +[pypi] +username:%s +password:%s +""" + + +class PyPIRCCommand(Command): +    """Base command that knows how to handle the .pypirc file""" + +    DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' +    DEFAULT_REALM = 'pypi' +    repository = None +    realm = None + +    user_options = [ +        ('repository=', 'r', "url of repository [default: %s]" % DEFAULT_REPOSITORY), +        ('show-response', None, 'display full response text from server'), +    ] + +    boolean_options = ['show-response'] + +    def _get_rc_file(self): +        """Returns rc file path.""" +        return os.path.join(os.path.expanduser('~'), '.pypirc') + +    def _store_pypirc(self, username, password): +        """Creates a default .pypirc file.""" +        rc = self._get_rc_file() +        with os.fdopen(os.open(rc, os.O_CREAT | os.O_WRONLY, 0o600), 'w') as f: +            f.write(DEFAULT_PYPIRC % (username, password)) + +    def _read_pypirc(self):  # noqa: C901 +        """Reads the .pypirc file.""" +        rc = self._get_rc_file() +        if os.path.exists(rc): +            self.announce('Using PyPI login from %s' % rc) +            repository = self.repository or self.DEFAULT_REPOSITORY + +            config = RawConfigParser() +            config.read(rc) +            sections = config.sections() +            if 'distutils' in sections: +                # let's get the list of servers +                index_servers = config.get('distutils', 'index-servers') +                _servers = [ +                    server.strip() +                    for server in index_servers.split('\n') +                    if server.strip() != '' +                ] +                if _servers == []: +                    # nothing set, let's try to get the default pypi +                    if 'pypi' in sections: +                        _servers = ['pypi'] +                    else: +                        # the file is not properly defined, returning +                        # an empty dict +                        return {} +                for server in _servers: +                    current = {'server': server} +                    current['username'] = config.get(server, 'username') + +                    # optional params +                    for key, default in ( +                        ('repository', self.DEFAULT_REPOSITORY), +                        ('realm', self.DEFAULT_REALM), +                        ('password', None), +                    ): +                        if config.has_option(server, key): +                            current[key] = config.get(server, key) +                        else: +                            current[key] = default + +                    # work around people having "repository" for the "pypi" +                    # section of their config set to the HTTP (rather than +                    # HTTPS) URL +                    if server == 'pypi' and repository in ( +                        self.DEFAULT_REPOSITORY, +                        'pypi', +                    ): +                        current['repository'] = self.DEFAULT_REPOSITORY +                        return current + +                    if ( +                        current['server'] == repository +                        or current['repository'] == repository +                    ): +                        return current +            elif 'server-login' in sections: +                # old format +                server = 'server-login' +                if config.has_option(server, 'repository'): +                    repository = config.get(server, 'repository') +                else: +                    repository = self.DEFAULT_REPOSITORY +                return { +                    'username': config.get(server, 'username'), +                    'password': config.get(server, 'password'), +                    'repository': repository, +                    'server': server, +                    'realm': self.DEFAULT_REALM, +                } + +        return {} + +    def _read_pypi_response(self, response): +        """Read and decode a PyPI HTTP response.""" +        import cgi + +        content_type = response.getheader('content-type', 'text/plain') +        encoding = cgi.parse_header(content_type)[1].get('charset', 'ascii') +        return response.read().decode(encoding) + +    def initialize_options(self): +        """Initialize options.""" +        self.repository = None +        self.realm = None +        self.show_response = 0 + +    def finalize_options(self): +        """Finalizes options.""" +        if self.repository is None: +            self.repository = self.DEFAULT_REPOSITORY +        if self.realm is None: +            self.realm = self.DEFAULT_REALM diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/core.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/core.py new file mode 100644 index 0000000..de13978 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/core.py @@ -0,0 +1,291 @@ +"""distutils.core + +The only module that needs to be imported to use the Distutils; provides +the 'setup' function (which is to be called from the setup script).  Also +indirectly provides the Distribution and Command classes, although they are +really defined in distutils.dist and distutils.cmd. +""" + +import os +import sys +import tokenize + +from distutils.debug import DEBUG +from distutils.errors import ( +    DistutilsSetupError, +    DistutilsError, +    CCompilerError, +    DistutilsArgError, +) + +# Mainly import these so setup scripts can "from distutils.core import" them. +from distutils.dist import Distribution +from distutils.cmd import Command +from distutils.config import PyPIRCCommand +from distutils.extension import Extension + + +__all__ = ['Distribution', 'Command', 'PyPIRCCommand', 'Extension', 'setup'] + +# This is a barebones help message generated displayed when the user +# runs the setup script with no arguments at all.  More useful help +# is generated with various --help options: global help, list commands, +# and per-command help. +USAGE = """\ +usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...] +   or: %(script)s --help [cmd1 cmd2 ...] +   or: %(script)s --help-commands +   or: %(script)s cmd --help +""" + + +def gen_usage(script_name): +    script = os.path.basename(script_name) +    return USAGE % locals() + + +# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'. +_setup_stop_after = None +_setup_distribution = None + +# Legal keyword arguments for the setup() function +setup_keywords = ( +    'distclass', +    'script_name', +    'script_args', +    'options', +    'name', +    'version', +    'author', +    'author_email', +    'maintainer', +    'maintainer_email', +    'url', +    'license', +    'description', +    'long_description', +    'keywords', +    'platforms', +    'classifiers', +    'download_url', +    'requires', +    'provides', +    'obsoletes', +) + +# Legal keyword arguments for the Extension constructor +extension_keywords = ( +    'name', +    'sources', +    'include_dirs', +    'define_macros', +    'undef_macros', +    'library_dirs', +    'libraries', +    'runtime_library_dirs', +    'extra_objects', +    'extra_compile_args', +    'extra_link_args', +    'swig_opts', +    'export_symbols', +    'depends', +    'language', +) + + +def setup(**attrs):  # noqa: C901 +    """The gateway to the Distutils: do everything your setup script needs +    to do, in a highly flexible and user-driven way.  Briefly: create a +    Distribution instance; find and parse config files; parse the command +    line; run each Distutils command found there, customized by the options +    supplied to 'setup()' (as keyword arguments), in config files, and on +    the command line. + +    The Distribution instance might be an instance of a class supplied via +    the 'distclass' keyword argument to 'setup'; if no such class is +    supplied, then the Distribution class (in dist.py) is instantiated. +    All other arguments to 'setup' (except for 'cmdclass') are used to set +    attributes of the Distribution instance. + +    The 'cmdclass' argument, if supplied, is a dictionary mapping command +    names to command classes.  Each command encountered on the command line +    will be turned into a command class, which is in turn instantiated; any +    class found in 'cmdclass' is used in place of the default, which is +    (for command 'foo_bar') class 'foo_bar' in module +    'distutils.command.foo_bar'.  The command class must provide a +    'user_options' attribute which is a list of option specifiers for +    'distutils.fancy_getopt'.  Any command-line options between the current +    and the next command are used to set attributes of the current command +    object. + +    When the entire command-line has been successfully parsed, calls the +    'run()' method on each command object in turn.  This method will be +    driven entirely by the Distribution object (which each command object +    has a reference to, thanks to its constructor), and the +    command-specific options that became attributes of each command +    object. +    """ + +    global _setup_stop_after, _setup_distribution + +    # Determine the distribution class -- either caller-supplied or +    # our Distribution (see below). +    klass = attrs.get('distclass') +    if klass: +        del attrs['distclass'] +    else: +        klass = Distribution + +    if 'script_name' not in attrs: +        attrs['script_name'] = os.path.basename(sys.argv[0]) +    if 'script_args' not in attrs: +        attrs['script_args'] = sys.argv[1:] + +    # Create the Distribution instance, using the remaining arguments +    # (ie. everything except distclass) to initialize it +    try: +        _setup_distribution = dist = klass(attrs) +    except DistutilsSetupError as msg: +        if 'name' not in attrs: +            raise SystemExit("error in setup command: %s" % msg) +        else: +            raise SystemExit("error in {} setup command: {}".format(attrs['name'], msg)) + +    if _setup_stop_after == "init": +        return dist + +    # Find and parse the config file(s): they will override options from +    # the setup script, but be overridden by the command line. +    dist.parse_config_files() + +    if DEBUG: +        print("options (after parsing config files):") +        dist.dump_option_dicts() + +    if _setup_stop_after == "config": +        return dist + +    # Parse the command line and override config files; any +    # command-line errors are the end user's fault, so turn them into +    # SystemExit to suppress tracebacks. +    try: +        ok = dist.parse_command_line() +    except DistutilsArgError as msg: +        raise SystemExit(gen_usage(dist.script_name) + "\nerror: %s" % msg) + +    if DEBUG: +        print("options (after parsing command line):") +        dist.dump_option_dicts() + +    if _setup_stop_after == "commandline": +        return dist + +    # And finally, run all the commands found on the command line. +    if ok: +        return run_commands(dist) + +    return dist + + +# setup () + + +def run_commands(dist): +    """Given a Distribution object run all the commands, +    raising ``SystemExit`` errors in the case of failure. + +    This function assumes that either ``sys.argv`` or ``dist.script_args`` +    is already set accordingly. +    """ +    try: +        dist.run_commands() +    except KeyboardInterrupt: +        raise SystemExit("interrupted") +    except OSError as exc: +        if DEBUG: +            sys.stderr.write("error: {}\n".format(exc)) +            raise +        else: +            raise SystemExit("error: {}".format(exc)) + +    except (DistutilsError, CCompilerError) as msg: +        if DEBUG: +            raise +        else: +            raise SystemExit("error: " + str(msg)) + +    return dist + + +def run_setup(script_name, script_args=None, stop_after="run"): +    """Run a setup script in a somewhat controlled environment, and +    return the Distribution instance that drives things.  This is useful +    if you need to find out the distribution meta-data (passed as +    keyword args from 'script' to 'setup()', or the contents of the +    config files or command-line. + +    'script_name' is a file that will be read and run with 'exec()'; +    'sys.argv[0]' will be replaced with 'script' for the duration of the +    call.  'script_args' is a list of strings; if supplied, +    'sys.argv[1:]' will be replaced by 'script_args' for the duration of +    the call. + +    'stop_after' tells 'setup()' when to stop processing; possible +    values: +      init +        stop after the Distribution instance has been created and +        populated with the keyword arguments to 'setup()' +      config +        stop after config files have been parsed (and their data +        stored in the Distribution instance) +      commandline +        stop after the command-line ('sys.argv[1:]' or 'script_args') +        have been parsed (and the data stored in the Distribution) +      run [default] +        stop after all commands have been run (the same as if 'setup()' +        had been called in the usual way + +    Returns the Distribution instance, which provides all information +    used to drive the Distutils. +    """ +    if stop_after not in ('init', 'config', 'commandline', 'run'): +        raise ValueError("invalid value for 'stop_after': {!r}".format(stop_after)) + +    global _setup_stop_after, _setup_distribution +    _setup_stop_after = stop_after + +    save_argv = sys.argv.copy() +    g = {'__file__': script_name, '__name__': '__main__'} +    try: +        try: +            sys.argv[0] = script_name +            if script_args is not None: +                sys.argv[1:] = script_args +            # tokenize.open supports automatic encoding detection +            with tokenize.open(script_name) as f: +                code = f.read().replace(r'\r\n', r'\n') +                exec(code, g) +        finally: +            sys.argv = save_argv +            _setup_stop_after = None +    except SystemExit: +        # Hmm, should we do something if exiting with a non-zero code +        # (ie. error)? +        pass + +    if _setup_distribution is None: +        raise RuntimeError( +            ( +                "'distutils.core.setup()' was never called -- " +                "perhaps '%s' is not a Distutils setup script?" +            ) +            % script_name +        ) + +    # I wonder if the setup script's namespace -- g and l -- would be of +    # any interest to callers? +    # print "_setup_distribution:", _setup_distribution +    return _setup_distribution + + +# run_setup () diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/cygwinccompiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/cygwinccompiler.py new file mode 100644 index 0000000..2c4da5b --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/cygwinccompiler.py @@ -0,0 +1,364 @@ +"""distutils.cygwinccompiler + +Provides the CygwinCCompiler class, a subclass of UnixCCompiler that +handles the Cygwin port of the GNU C compiler to Windows.  It also contains +the Mingw32CCompiler class which handles the mingw32 port of GCC (same as +cygwin in no-cygwin mode). +""" + +import os +import sys +import copy +import shlex +import warnings +from subprocess import check_output + +from distutils.unixccompiler import UnixCCompiler +from distutils.file_util import write_file +from distutils.errors import ( +    DistutilsExecError, +    DistutilsPlatformError, +    CCompilerError, +    CompileError, +) +from distutils.version import LooseVersion, suppress_known_deprecation + + +def get_msvcr(): +    """Include the appropriate MSVC runtime library if Python was built +    with MSVC 7.0 or later. +    """ +    msc_pos = sys.version.find('MSC v.') +    if msc_pos != -1: +        msc_ver = sys.version[msc_pos + 6 : msc_pos + 10] +        if msc_ver == '1300': +            # MSVC 7.0 +            return ['msvcr70'] +        elif msc_ver == '1310': +            # MSVC 7.1 +            return ['msvcr71'] +        elif msc_ver == '1400': +            # VS2005 / MSVC 8.0 +            return ['msvcr80'] +        elif msc_ver == '1500': +            # VS2008 / MSVC 9.0 +            return ['msvcr90'] +        elif msc_ver == '1600': +            # VS2010 / MSVC 10.0 +            return ['msvcr100'] +        elif msc_ver == '1700': +            # VS2012 / MSVC 11.0 +            return ['msvcr110'] +        elif msc_ver == '1800': +            # VS2013 / MSVC 12.0 +            return ['msvcr120'] +        elif 1900 <= int(msc_ver) < 2000: +            # VS2015 / MSVC 14.0 +            return ['ucrt', 'vcruntime140'] +        else: +            raise ValueError("Unknown MS Compiler version %s " % msc_ver) + + +_runtime_library_dirs_msg = ( +    "Unable to set runtime library search path on Windows, " +    "usually indicated by `runtime_library_dirs` parameter to Extension" +) + + +class CygwinCCompiler(UnixCCompiler): +    """Handles the Cygwin port of the GNU C compiler to Windows.""" + +    compiler_type = 'cygwin' +    obj_extension = ".o" +    static_lib_extension = ".a" +    shared_lib_extension = ".dll.a" +    dylib_lib_extension = ".dll" +    static_lib_format = "lib%s%s" +    shared_lib_format = "lib%s%s" +    dylib_lib_format = "cyg%s%s" +    exe_extension = ".exe" + +    def __init__(self, verbose=0, dry_run=0, force=0): + +        super().__init__(verbose, dry_run, force) + +        status, details = check_config_h() +        self.debug_print( +            "Python's GCC status: {} (details: {})".format(status, details) +        ) +        if status is not CONFIG_H_OK: +            self.warn( +                "Python's pyconfig.h doesn't seem to support your compiler. " +                "Reason: %s. " +                "Compiling may fail because of undefined preprocessor macros." % details +            ) + +        self.cc = os.environ.get('CC', 'gcc') +        self.cxx = os.environ.get('CXX', 'g++') + +        self.linker_dll = self.cc +        shared_option = "-shared" + +        self.set_executables( +            compiler='%s -mcygwin -O -Wall' % self.cc, +            compiler_so='%s -mcygwin -mdll -O -Wall' % self.cc, +            compiler_cxx='%s -mcygwin -O -Wall' % self.cxx, +            linker_exe='%s -mcygwin' % self.cc, +            linker_so=('{} -mcygwin {}'.format(self.linker_dll, shared_option)), +        ) + +        # Include the appropriate MSVC runtime library if Python was built +        # with MSVC 7.0 or later. +        self.dll_libraries = get_msvcr() + +    @property +    def gcc_version(self): +        # Older numpy dependend on this existing to check for ancient +        # gcc versions. This doesn't make much sense with clang etc so +        # just hardcode to something recent. +        # https://github.com/numpy/numpy/pull/20333 +        warnings.warn( +            "gcc_version attribute of CygwinCCompiler is deprecated. " +            "Instead of returning actual gcc version a fixed value 11.2.0 is returned.", +            DeprecationWarning, +            stacklevel=2, +        ) +        with suppress_known_deprecation(): +            return LooseVersion("11.2.0") + +    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): +        """Compiles the source by spawning GCC and windres if needed.""" +        if ext == '.rc' or ext == '.res': +            # gcc needs '.res' and '.rc' compiled to object files !!! +            try: +                self.spawn(["windres", "-i", src, "-o", obj]) +            except DistutilsExecError as msg: +                raise CompileError(msg) +        else:  # for other files use the C-compiler +            try: +                self.spawn( +                    self.compiler_so + cc_args + [src, '-o', obj] + extra_postargs +                ) +            except DistutilsExecError as msg: +                raise CompileError(msg) + +    def link( +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): +        """Link the objects.""" +        # use separate copies, so we can modify the lists +        extra_preargs = copy.copy(extra_preargs or []) +        libraries = copy.copy(libraries or []) +        objects = copy.copy(objects or []) + +        if runtime_library_dirs: +            self.warn(_runtime_library_dirs_msg) + +        # Additional libraries +        libraries.extend(self.dll_libraries) + +        # handle export symbols by creating a def-file +        # with executables this only works with gcc/ld as linker +        if (export_symbols is not None) and ( +            target_desc != self.EXECUTABLE or self.linker_dll == "gcc" +        ): +            # (The linker doesn't do anything if output is up-to-date. +            # So it would probably better to check if we really need this, +            # but for this we had to insert some unchanged parts of +            # UnixCCompiler, and this is not what we want.) + +            # we want to put some files in the same directory as the +            # object files are, build_temp doesn't help much +            # where are the object files +            temp_dir = os.path.dirname(objects[0]) +            # name of dll to give the helper files the same base name +            (dll_name, dll_extension) = os.path.splitext( +                os.path.basename(output_filename) +            ) + +            # generate the filenames for these files +            def_file = os.path.join(temp_dir, dll_name + ".def") + +            # Generate .def file +            contents = ["LIBRARY %s" % os.path.basename(output_filename), "EXPORTS"] +            for sym in export_symbols: +                contents.append(sym) +            self.execute(write_file, (def_file, contents), "writing %s" % def_file) + +            # next add options for def-file + +            # for gcc/ld the def-file is specified as any object files +            objects.append(def_file) + +        # end: if ((export_symbols is not None) and +        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")): + +        # who wants symbols and a many times larger output file +        # should explicitly switch the debug mode on +        # otherwise we let ld strip the output file +        # (On my machine: 10KiB < stripped_file < ??100KiB +        #   unstripped_file = stripped_file + XXX KiB +        #  ( XXX=254 for a typical python extension)) +        if not debug: +            extra_preargs.append("-s") + +        UnixCCompiler.link( +            self, +            target_desc, +            objects, +            output_filename, +            output_dir, +            libraries, +            library_dirs, +            runtime_library_dirs, +            None,  # export_symbols, we do this in our def-file +            debug, +            extra_preargs, +            extra_postargs, +            build_temp, +            target_lang, +        ) + +    def runtime_library_dir_option(self, dir): +        # cygwin doesn't support rpath. While in theory we could error +        # out like MSVC does, code might expect it to work like on Unix, so +        # just warn and hope for the best. +        self.warn(_runtime_library_dirs_msg) +        return [] + +    # -- Miscellaneous methods ----------------------------------------- + +    def _make_out_path(self, output_dir, strip_dir, src_name): +        # use normcase to make sure '.rc' is really '.rc' and not '.RC' +        norm_src_name = os.path.normcase(src_name) +        return super()._make_out_path(output_dir, strip_dir, norm_src_name) + +    @property +    def out_extensions(self): +        """ +        Add support for rc and res files. +        """ +        return { +            **super().out_extensions, +            **{ext: ext + self.obj_extension for ext in ('.res', '.rc')}, +        } + + +# the same as cygwin plus some additional parameters +class Mingw32CCompiler(CygwinCCompiler): +    """Handles the Mingw32 port of the GNU C compiler to Windows.""" + +    compiler_type = 'mingw32' + +    def __init__(self, verbose=0, dry_run=0, force=0): + +        super().__init__(verbose, dry_run, force) + +        shared_option = "-shared" + +        if is_cygwincc(self.cc): +            raise CCompilerError('Cygwin gcc cannot be used with --compiler=mingw32') + +        self.set_executables( +            compiler='%s -O -Wall' % self.cc, +            compiler_so='%s -mdll -O -Wall' % self.cc, +            compiler_cxx='%s -O -Wall' % self.cxx, +            linker_exe='%s' % self.cc, +            linker_so='{} {}'.format(self.linker_dll, shared_option), +        ) + +        # Maybe we should also append -mthreads, but then the finished +        # dlls need another dll (mingwm10.dll see Mingw32 docs) +        # (-mthreads: Support thread-safe exception handling on `Mingw32') + +        # no additional libraries needed +        self.dll_libraries = [] + +        # Include the appropriate MSVC runtime library if Python was built +        # with MSVC 7.0 or later. +        self.dll_libraries = get_msvcr() + +    def runtime_library_dir_option(self, dir): +        raise DistutilsPlatformError(_runtime_library_dirs_msg) + + +# Because these compilers aren't configured in Python's pyconfig.h file by +# default, we should at least warn the user if he is using an unmodified +# version. + +CONFIG_H_OK = "ok" +CONFIG_H_NOTOK = "not ok" +CONFIG_H_UNCERTAIN = "uncertain" + + +def check_config_h(): +    """Check if the current Python installation appears amenable to building +    extensions with GCC. + +    Returns a tuple (status, details), where 'status' is one of the following +    constants: + +    - CONFIG_H_OK: all is well, go ahead and compile +    - CONFIG_H_NOTOK: doesn't look good +    - CONFIG_H_UNCERTAIN: not sure -- unable to read pyconfig.h + +    'details' is a human-readable string explaining the situation. + +    Note there are two ways to conclude "OK": either 'sys.version' contains +    the string "GCC" (implying that this Python was built with GCC), or the +    installed "pyconfig.h" contains the string "__GNUC__". +    """ + +    # XXX since this function also checks sys.version, it's not strictly a +    # "pyconfig.h" check -- should probably be renamed... + +    from distutils import sysconfig + +    # if sys.version contains GCC then python was compiled with GCC, and the +    # pyconfig.h file should be OK +    if "GCC" in sys.version: +        return CONFIG_H_OK, "sys.version mentions 'GCC'" + +    # Clang would also work +    if "Clang" in sys.version: +        return CONFIG_H_OK, "sys.version mentions 'Clang'" + +    # let's see if __GNUC__ is mentioned in python.h +    fn = sysconfig.get_config_h_filename() +    try: +        config_h = open(fn) +        try: +            if "__GNUC__" in config_h.read(): +                return CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn +            else: +                return CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn +        finally: +            config_h.close() +    except OSError as exc: +        return (CONFIG_H_UNCERTAIN, "couldn't read '{}': {}".format(fn, exc.strerror)) + + +def is_cygwincc(cc): +    '''Try to determine if the compiler that would be used is from cygwin.''' +    out_string = check_output(shlex.split(cc) + ['-dumpmachine']) +    return out_string.strip().endswith(b'cygwin') + + +get_versions = None +""" +A stand-in for the previous get_versions() function to prevent failures +when monkeypatched. See pypa/setuptools#2969. +""" diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/debug.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/debug.py new file mode 100644 index 0000000..daf1660 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/debug.py @@ -0,0 +1,5 @@ +import os + +# If DISTUTILS_DEBUG is anything other than the empty string, we run in +# debug mode. +DEBUG = os.environ.get('DISTUTILS_DEBUG') diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/dep_util.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/dep_util.py new file mode 100644 index 0000000..db1fa01 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/dep_util.py @@ -0,0 +1,96 @@ +"""distutils.dep_util + +Utility functions for simple, timestamp-based dependency of files +and groups of files; also, function based entirely on such +timestamp dependency analysis.""" + +import os +from distutils.errors import DistutilsFileError + + +def newer(source, target): +    """Return true if 'source' exists and is more recently modified than +    'target', or if 'source' exists and 'target' doesn't.  Return false if +    both exist and 'target' is the same age or younger than 'source'. +    Raise DistutilsFileError if 'source' does not exist. +    """ +    if not os.path.exists(source): +        raise DistutilsFileError("file '%s' does not exist" % os.path.abspath(source)) +    if not os.path.exists(target): +        return 1 + +    from stat import ST_MTIME + +    mtime1 = os.stat(source)[ST_MTIME] +    mtime2 = os.stat(target)[ST_MTIME] + +    return mtime1 > mtime2 + + +# newer () + + +def newer_pairwise(sources, targets): +    """Walk two filename lists in parallel, testing if each source is newer +    than its corresponding target.  Return a pair of lists (sources, +    targets) where source is newer than target, according to the semantics +    of 'newer()'. +    """ +    if len(sources) != len(targets): +        raise ValueError("'sources' and 'targets' must be same length") + +    # build a pair of lists (sources, targets) where  source is newer +    n_sources = [] +    n_targets = [] +    for i in range(len(sources)): +        if newer(sources[i], targets[i]): +            n_sources.append(sources[i]) +            n_targets.append(targets[i]) + +    return (n_sources, n_targets) + + +# newer_pairwise () + + +def newer_group(sources, target, missing='error'): +    """Return true if 'target' is out-of-date with respect to any file +    listed in 'sources'.  In other words, if 'target' exists and is newer +    than every file in 'sources', return false; otherwise return true. +    'missing' controls what we do when a source file is missing; the +    default ("error") is to blow up with an OSError from inside 'stat()'; +    if it is "ignore", we silently drop any missing source files; if it is +    "newer", any missing source files make us assume that 'target' is +    out-of-date (this is handy in "dry-run" mode: it'll make you pretend to +    carry out commands that wouldn't work because inputs are missing, but +    that doesn't matter because you're not actually going to run the +    commands). +    """ +    # If the target doesn't even exist, then it's definitely out-of-date. +    if not os.path.exists(target): +        return 1 + +    # Otherwise we have to find out the hard way: if *any* source file +    # is more recent than 'target', then 'target' is out-of-date and +    # we can immediately return true.  If we fall through to the end +    # of the loop, then 'target' is up-to-date and we return false. +    from stat import ST_MTIME + +    target_mtime = os.stat(target)[ST_MTIME] +    for source in sources: +        if not os.path.exists(source): +            if missing == 'error':  # blow up when we stat() the file +                pass +            elif missing == 'ignore':  # missing source dropped from +                continue  # target's dependency list +            elif missing == 'newer':  # missing source means target is +                return 1  # out-of-date + +        source_mtime = os.stat(source)[ST_MTIME] +        if source_mtime > target_mtime: +            return 1 +    else: +        return 0 + + +# newer_group () diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/dir_util.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/dir_util.py new file mode 100644 index 0000000..6f0bb8a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/dir_util.py @@ -0,0 +1,243 @@ +"""distutils.dir_util + +Utility functions for manipulating directories and directory trees.""" + +import os +import errno +from distutils.errors import DistutilsInternalError, DistutilsFileError +from distutils import log + +# cache for by mkpath() -- in addition to cheapening redundant calls, +# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode +_path_created = {} + + +def mkpath(name, mode=0o777, verbose=1, dry_run=0):  # noqa: C901 +    """Create a directory and any missing ancestor directories. + +    If the directory already exists (or if 'name' is the empty string, which +    means the current directory, which of course exists), then do nothing. +    Raise DistutilsFileError if unable to create some directory along the way +    (eg. some sub-path exists, but is a file rather than a directory). +    If 'verbose' is true, print a one-line summary of each mkdir to stdout. +    Return the list of directories actually created. + +    os.makedirs is not used because: + +    a) It's new to Python 1.5.2, and +    b) it blows up if the directory already exists (in which case it should +       silently succeed). +    """ + +    global _path_created + +    # Detect a common bug -- name is None +    if not isinstance(name, str): +        raise DistutilsInternalError( +            "mkpath: 'name' must be a string (got {!r})".format(name) +        ) + +    # XXX what's the better way to handle verbosity? print as we create +    # each directory in the path (the current behaviour), or only announce +    # the creation of the whole path? (quite easy to do the latter since +    # we're not using a recursive algorithm) + +    name = os.path.normpath(name) +    created_dirs = [] +    if os.path.isdir(name) or name == '': +        return created_dirs +    if _path_created.get(os.path.abspath(name)): +        return created_dirs + +    (head, tail) = os.path.split(name) +    tails = [tail]  # stack of lone dirs to create + +    while head and tail and not os.path.isdir(head): +        (head, tail) = os.path.split(head) +        tails.insert(0, tail)  # push next higher dir onto stack + +    # now 'head' contains the deepest directory that already exists +    # (that is, the child of 'head' in 'name' is the highest directory +    # that does *not* exist) +    for d in tails: +        # print "head = %s, d = %s: " % (head, d), +        head = os.path.join(head, d) +        abs_head = os.path.abspath(head) + +        if _path_created.get(abs_head): +            continue + +        if verbose >= 1: +            log.info("creating %s", head) + +        if not dry_run: +            try: +                os.mkdir(head, mode) +            except OSError as exc: +                if not (exc.errno == errno.EEXIST and os.path.isdir(head)): +                    raise DistutilsFileError( +                        "could not create '{}': {}".format(head, exc.args[-1]) +                    ) +            created_dirs.append(head) + +        _path_created[abs_head] = 1 +    return created_dirs + + +def create_tree(base_dir, files, mode=0o777, verbose=1, dry_run=0): +    """Create all the empty directories under 'base_dir' needed to put 'files' +    there. + +    'base_dir' is just the name of a directory which doesn't necessarily +    exist yet; 'files' is a list of filenames to be interpreted relative to +    'base_dir'.  'base_dir' + the directory portion of every file in 'files' +    will be created if it doesn't already exist.  'mode', 'verbose' and +    'dry_run' flags are as for 'mkpath()'. +    """ +    # First get the list of directories to create +    need_dir = set() +    for file in files: +        need_dir.add(os.path.join(base_dir, os.path.dirname(file))) + +    # Now create them +    for dir in sorted(need_dir): +        mkpath(dir, mode, verbose=verbose, dry_run=dry_run) + + +def copy_tree(  # noqa: C901 +    src, +    dst, +    preserve_mode=1, +    preserve_times=1, +    preserve_symlinks=0, +    update=0, +    verbose=1, +    dry_run=0, +): +    """Copy an entire directory tree 'src' to a new location 'dst'. + +    Both 'src' and 'dst' must be directory names.  If 'src' is not a +    directory, raise DistutilsFileError.  If 'dst' does not exist, it is +    created with 'mkpath()'.  The end result of the copy is that every +    file in 'src' is copied to 'dst', and directories under 'src' are +    recursively copied to 'dst'.  Return the list of files that were +    copied or might have been copied, using their output name.  The +    return value is unaffected by 'update' or 'dry_run': it is simply +    the list of all files under 'src', with the names changed to be +    under 'dst'. + +    'preserve_mode' and 'preserve_times' are the same as for +    'copy_file'; note that they only apply to regular files, not to +    directories.  If 'preserve_symlinks' is true, symlinks will be +    copied as symlinks (on platforms that support them!); otherwise +    (the default), the destination of the symlink will be copied. +    'update' and 'verbose' are the same as for 'copy_file'. +    """ +    from distutils.file_util import copy_file + +    if not dry_run and not os.path.isdir(src): +        raise DistutilsFileError("cannot copy tree '%s': not a directory" % src) +    try: +        names = os.listdir(src) +    except OSError as e: +        if dry_run: +            names = [] +        else: +            raise DistutilsFileError( +                "error listing files in '{}': {}".format(src, e.strerror) +            ) + +    if not dry_run: +        mkpath(dst, verbose=verbose) + +    outputs = [] + +    for n in names: +        src_name = os.path.join(src, n) +        dst_name = os.path.join(dst, n) + +        if n.startswith('.nfs'): +            # skip NFS rename files +            continue + +        if preserve_symlinks and os.path.islink(src_name): +            link_dest = os.readlink(src_name) +            if verbose >= 1: +                log.info("linking %s -> %s", dst_name, link_dest) +            if not dry_run: +                os.symlink(link_dest, dst_name) +            outputs.append(dst_name) + +        elif os.path.isdir(src_name): +            outputs.extend( +                copy_tree( +                    src_name, +                    dst_name, +                    preserve_mode, +                    preserve_times, +                    preserve_symlinks, +                    update, +                    verbose=verbose, +                    dry_run=dry_run, +                ) +            ) +        else: +            copy_file( +                src_name, +                dst_name, +                preserve_mode, +                preserve_times, +                update, +                verbose=verbose, +                dry_run=dry_run, +            ) +            outputs.append(dst_name) + +    return outputs + + +def _build_cmdtuple(path, cmdtuples): +    """Helper for remove_tree().""" +    for f in os.listdir(path): +        real_f = os.path.join(path, f) +        if os.path.isdir(real_f) and not os.path.islink(real_f): +            _build_cmdtuple(real_f, cmdtuples) +        else: +            cmdtuples.append((os.remove, real_f)) +    cmdtuples.append((os.rmdir, path)) + + +def remove_tree(directory, verbose=1, dry_run=0): +    """Recursively remove an entire directory tree. + +    Any errors are ignored (apart from being reported to stdout if 'verbose' +    is true). +    """ +    global _path_created + +    if verbose >= 1: +        log.info("removing '%s' (and everything under it)", directory) +    if dry_run: +        return +    cmdtuples = [] +    _build_cmdtuple(directory, cmdtuples) +    for cmd in cmdtuples: +        try: +            cmd[0](cmd[1]) +            # remove dir from cache if it's already there +            abspath = os.path.abspath(cmd[1]) +            if abspath in _path_created: +                del _path_created[abspath] +        except OSError as exc: +            log.warn("error removing %s: %s", directory, exc) + + +def ensure_relative(path): +    """Take the full path 'path', and make it a relative path. + +    This is useful to make 'path' the second argument to os.path.join(). +    """ +    drive, path = os.path.splitdrive(path) +    if path[0:1] == os.sep: +        path = drive + path[1:] +    return path diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/dist.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/dist.py new file mode 100644 index 0000000..917cd94 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/dist.py @@ -0,0 +1,1286 @@ +"""distutils.dist + +Provides the Distribution class, which represents the module distribution +being built/installed/distributed. +""" + +import sys +import os +import re +import pathlib +import contextlib +from email import message_from_file + +try: +    import warnings +except ImportError: +    warnings = None + +from distutils.errors import ( +    DistutilsOptionError, +    DistutilsModuleError, +    DistutilsArgError, +    DistutilsClassError, +) +from distutils.fancy_getopt import FancyGetopt, translate_longopt +from distutils.util import check_environ, strtobool, rfc822_escape +from distutils import log +from distutils.debug import DEBUG + +# Regex to define acceptable Distutils command names.  This is not *quite* +# the same as a Python NAME -- I don't allow leading underscores.  The fact +# that they're very similar is no coincidence; the default naming scheme is +# to look for a Python module named after the command. +command_re = re.compile(r'^[a-zA-Z]([a-zA-Z0-9_]*)$') + + +def _ensure_list(value, fieldname): +    if isinstance(value, str): +        # a string containing comma separated values is okay.  It will +        # be converted to a list by Distribution.finalize_options(). +        pass +    elif not isinstance(value, list): +        # passing a tuple or an iterator perhaps, warn and convert +        typename = type(value).__name__ +        msg = "Warning: '{fieldname}' should be a list, got type '{typename}'" +        msg = msg.format(**locals()) +        log.log(log.WARN, msg) +        value = list(value) +    return value + + +class Distribution: +    """The core of the Distutils.  Most of the work hiding behind 'setup' +    is really done within a Distribution instance, which farms the work out +    to the Distutils commands specified on the command line. + +    Setup scripts will almost never instantiate Distribution directly, +    unless the 'setup()' function is totally inadequate to their needs. +    However, it is conceivable that a setup script might wish to subclass +    Distribution for some specialized purpose, and then pass the subclass +    to 'setup()' as the 'distclass' keyword argument.  If so, it is +    necessary to respect the expectations that 'setup' has of Distribution. +    See the code for 'setup()', in core.py, for details. +    """ + +    # 'global_options' describes the command-line options that may be +    # supplied to the setup script prior to any actual commands. +    # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of +    # these global options.  This list should be kept to a bare minimum, +    # since every global option is also valid as a command option -- and we +    # don't want to pollute the commands with too many options that they +    # have minimal control over. +    # The fourth entry for verbose means that it can be repeated. +    global_options = [ +        ('verbose', 'v', "run verbosely (default)", 1), +        ('quiet', 'q', "run quietly (turns verbosity off)"), +        ('dry-run', 'n', "don't actually do anything"), +        ('help', 'h', "show detailed help message"), +        ('no-user-cfg', None, 'ignore pydistutils.cfg in your home directory'), +    ] + +    # 'common_usage' is a short (2-3 line) string describing the common +    # usage of the setup script. +    common_usage = """\ +Common commands: (see '--help-commands' for more) + +  setup.py build      will build the package underneath 'build/' +  setup.py install    will install the package +""" + +    # options that are not propagated to the commands +    display_options = [ +        ('help-commands', None, "list all available commands"), +        ('name', None, "print package name"), +        ('version', 'V', "print package version"), +        ('fullname', None, "print <package name>-<version>"), +        ('author', None, "print the author's name"), +        ('author-email', None, "print the author's email address"), +        ('maintainer', None, "print the maintainer's name"), +        ('maintainer-email', None, "print the maintainer's email address"), +        ('contact', None, "print the maintainer's name if known, else the author's"), +        ( +            'contact-email', +            None, +            "print the maintainer's email address if known, else the author's", +        ), +        ('url', None, "print the URL for this package"), +        ('license', None, "print the license of the package"), +        ('licence', None, "alias for --license"), +        ('description', None, "print the package description"), +        ('long-description', None, "print the long package description"), +        ('platforms', None, "print the list of platforms"), +        ('classifiers', None, "print the list of classifiers"), +        ('keywords', None, "print the list of keywords"), +        ('provides', None, "print the list of packages/modules provided"), +        ('requires', None, "print the list of packages/modules required"), +        ('obsoletes', None, "print the list of packages/modules made obsolete"), +    ] +    display_option_names = [translate_longopt(x[0]) for x in display_options] + +    # negative options are options that exclude other options +    negative_opt = {'quiet': 'verbose'} + +    # -- Creation/initialization methods ------------------------------- + +    def __init__(self, attrs=None):  # noqa: C901 +        """Construct a new Distribution instance: initialize all the +        attributes of a Distribution, and then use 'attrs' (a dictionary +        mapping attribute names to values) to assign some of those +        attributes their "real" values.  (Any attributes not mentioned in +        'attrs' will be assigned to some null value: 0, None, an empty list +        or dictionary, etc.)  Most importantly, initialize the +        'command_obj' attribute to the empty dictionary; this will be +        filled in with real command objects by 'parse_command_line()'. +        """ + +        # Default values for our command-line options +        self.verbose = 1 +        self.dry_run = 0 +        self.help = 0 +        for attr in self.display_option_names: +            setattr(self, attr, 0) + +        # Store the distribution meta-data (name, version, author, and so +        # forth) in a separate object -- we're getting to have enough +        # information here (and enough command-line options) that it's +        # worth it.  Also delegate 'get_XXX()' methods to the 'metadata' +        # object in a sneaky and underhanded (but efficient!) way. +        self.metadata = DistributionMetadata() +        for basename in self.metadata._METHOD_BASENAMES: +            method_name = "get_" + basename +            setattr(self, method_name, getattr(self.metadata, method_name)) + +        # 'cmdclass' maps command names to class objects, so we +        # can 1) quickly figure out which class to instantiate when +        # we need to create a new command object, and 2) have a way +        # for the setup script to override command classes +        self.cmdclass = {} + +        # 'command_packages' is a list of packages in which commands +        # are searched for.  The factory for command 'foo' is expected +        # to be named 'foo' in the module 'foo' in one of the packages +        # named here.  This list is searched from the left; an error +        # is raised if no named package provides the command being +        # searched for.  (Always access using get_command_packages().) +        self.command_packages = None + +        # 'script_name' and 'script_args' are usually set to sys.argv[0] +        # and sys.argv[1:], but they can be overridden when the caller is +        # not necessarily a setup script run from the command-line. +        self.script_name = None +        self.script_args = None + +        # 'command_options' is where we store command options between +        # parsing them (from config files, the command-line, etc.) and when +        # they are actually needed -- ie. when the command in question is +        # instantiated.  It is a dictionary of dictionaries of 2-tuples: +        #   command_options = { command_name : { option : (source, value) } } +        self.command_options = {} + +        # 'dist_files' is the list of (command, pyversion, file) that +        # have been created by any dist commands run so far. This is +        # filled regardless of whether the run is dry or not. pyversion +        # gives sysconfig.get_python_version() if the dist file is +        # specific to a Python version, 'any' if it is good for all +        # Python versions on the target platform, and '' for a source +        # file. pyversion should not be used to specify minimum or +        # maximum required Python versions; use the metainfo for that +        # instead. +        self.dist_files = [] + +        # These options are really the business of various commands, rather +        # than of the Distribution itself.  We provide aliases for them in +        # Distribution as a convenience to the developer. +        self.packages = None +        self.package_data = {} +        self.package_dir = None +        self.py_modules = None +        self.libraries = None +        self.headers = None +        self.ext_modules = None +        self.ext_package = None +        self.include_dirs = None +        self.extra_path = None +        self.scripts = None +        self.data_files = None +        self.password = '' + +        # And now initialize bookkeeping stuff that can't be supplied by +        # the caller at all.  'command_obj' maps command names to +        # Command instances -- that's how we enforce that every command +        # class is a singleton. +        self.command_obj = {} + +        # 'have_run' maps command names to boolean values; it keeps track +        # of whether we have actually run a particular command, to make it +        # cheap to "run" a command whenever we think we might need to -- if +        # it's already been done, no need for expensive filesystem +        # operations, we just check the 'have_run' dictionary and carry on. +        # It's only safe to query 'have_run' for a command class that has +        # been instantiated -- a false value will be inserted when the +        # command object is created, and replaced with a true value when +        # the command is successfully run.  Thus it's probably best to use +        # '.get()' rather than a straight lookup. +        self.have_run = {} + +        # Now we'll use the attrs dictionary (ultimately, keyword args from +        # the setup script) to possibly override any or all of these +        # distribution options. + +        if attrs: +            # Pull out the set of command options and work on them +            # specifically.  Note that this order guarantees that aliased +            # command options will override any supplied redundantly +            # through the general options dictionary. +            options = attrs.get('options') +            if options is not None: +                del attrs['options'] +                for (command, cmd_options) in options.items(): +                    opt_dict = self.get_option_dict(command) +                    for (opt, val) in cmd_options.items(): +                        opt_dict[opt] = ("setup script", val) + +            if 'licence' in attrs: +                attrs['license'] = attrs['licence'] +                del attrs['licence'] +                msg = "'licence' distribution option is deprecated; use 'license'" +                if warnings is not None: +                    warnings.warn(msg) +                else: +                    sys.stderr.write(msg + "\n") + +            # Now work on the rest of the attributes.  Any attribute that's +            # not already defined is invalid! +            for (key, val) in attrs.items(): +                if hasattr(self.metadata, "set_" + key): +                    getattr(self.metadata, "set_" + key)(val) +                elif hasattr(self.metadata, key): +                    setattr(self.metadata, key, val) +                elif hasattr(self, key): +                    setattr(self, key, val) +                else: +                    msg = "Unknown distribution option: %s" % repr(key) +                    warnings.warn(msg) + +        # no-user-cfg is handled before other command line args +        # because other args override the config files, and this +        # one is needed before we can load the config files. +        # If attrs['script_args'] wasn't passed, assume false. +        # +        # This also make sure we just look at the global options +        self.want_user_cfg = True + +        if self.script_args is not None: +            for arg in self.script_args: +                if not arg.startswith('-'): +                    break +                if arg == '--no-user-cfg': +                    self.want_user_cfg = False +                    break + +        self.finalize_options() + +    def get_option_dict(self, command): +        """Get the option dictionary for a given command.  If that +        command's option dictionary hasn't been created yet, then create it +        and return the new dictionary; otherwise, return the existing +        option dictionary. +        """ +        dict = self.command_options.get(command) +        if dict is None: +            dict = self.command_options[command] = {} +        return dict + +    def dump_option_dicts(self, header=None, commands=None, indent=""): +        from pprint import pformat + +        if commands is None:  # dump all command option dicts +            commands = sorted(self.command_options.keys()) + +        if header is not None: +            self.announce(indent + header) +            indent = indent + "  " + +        if not commands: +            self.announce(indent + "no commands known yet") +            return + +        for cmd_name in commands: +            opt_dict = self.command_options.get(cmd_name) +            if opt_dict is None: +                self.announce(indent + "no option dict for '%s' command" % cmd_name) +            else: +                self.announce(indent + "option dict for '%s' command:" % cmd_name) +                out = pformat(opt_dict) +                for line in out.split('\n'): +                    self.announce(indent + "  " + line) + +    # -- Config file finding/parsing methods --------------------------- + +    def find_config_files(self): +        """Find as many configuration files as should be processed for this +        platform, and return a list of filenames in the order in which they +        should be parsed.  The filenames returned are guaranteed to exist +        (modulo nasty race conditions). + +        There are multiple possible config files: +        - distutils.cfg in the Distutils installation directory (i.e. +          where the top-level Distutils __inst__.py file lives) +        - a file in the user's home directory named .pydistutils.cfg +          on Unix and pydistutils.cfg on Windows/Mac; may be disabled +          with the ``--no-user-cfg`` option +        - setup.cfg in the current directory +        - a file named by an environment variable +        """ +        check_environ() +        files = [str(path) for path in self._gen_paths() if os.path.isfile(path)] + +        if DEBUG: +            self.announce("using config files: %s" % ', '.join(files)) + +        return files + +    def _gen_paths(self): +        # The system-wide Distutils config file +        sys_dir = pathlib.Path(sys.modules['distutils'].__file__).parent +        yield sys_dir / "distutils.cfg" + +        # The per-user config file +        prefix = '.' * (os.name == 'posix') +        filename = prefix + 'pydistutils.cfg' +        if self.want_user_cfg: +            yield pathlib.Path('~').expanduser() / filename + +        # All platforms support local setup.cfg +        yield pathlib.Path('setup.cfg') + +        # Additional config indicated in the environment +        with contextlib.suppress(TypeError): +            yield pathlib.Path(os.getenv("DIST_EXTRA_CONFIG")) + +    def parse_config_files(self, filenames=None):  # noqa: C901 +        from configparser import ConfigParser + +        # Ignore install directory options if we have a venv +        if sys.prefix != sys.base_prefix: +            ignore_options = [ +                'install-base', +                'install-platbase', +                'install-lib', +                'install-platlib', +                'install-purelib', +                'install-headers', +                'install-scripts', +                'install-data', +                'prefix', +                'exec-prefix', +                'home', +                'user', +                'root', +            ] +        else: +            ignore_options = [] + +        ignore_options = frozenset(ignore_options) + +        if filenames is None: +            filenames = self.find_config_files() + +        if DEBUG: +            self.announce("Distribution.parse_config_files():") + +        parser = ConfigParser() +        for filename in filenames: +            if DEBUG: +                self.announce("  reading %s" % filename) +            parser.read(filename) +            for section in parser.sections(): +                options = parser.options(section) +                opt_dict = self.get_option_dict(section) + +                for opt in options: +                    if opt != '__name__' and opt not in ignore_options: +                        val = parser.get(section, opt) +                        opt = opt.replace('-', '_') +                        opt_dict[opt] = (filename, val) + +            # Make the ConfigParser forget everything (so we retain +            # the original filenames that options come from) +            parser.__init__() + +        # If there was a "global" section in the config file, use it +        # to set Distribution options. + +        if 'global' in self.command_options: +            for (opt, (src, val)) in self.command_options['global'].items(): +                alias = self.negative_opt.get(opt) +                try: +                    if alias: +                        setattr(self, alias, not strtobool(val)) +                    elif opt in ('verbose', 'dry_run'):  # ugh! +                        setattr(self, opt, strtobool(val)) +                    else: +                        setattr(self, opt, val) +                except ValueError as msg: +                    raise DistutilsOptionError(msg) + +    # -- Command-line parsing methods ---------------------------------- + +    def parse_command_line(self): +        """Parse the setup script's command line, taken from the +        'script_args' instance attribute (which defaults to 'sys.argv[1:]' +        -- see 'setup()' in core.py).  This list is first processed for +        "global options" -- options that set attributes of the Distribution +        instance.  Then, it is alternately scanned for Distutils commands +        and options for that command.  Each new command terminates the +        options for the previous command.  The allowed options for a +        command are determined by the 'user_options' attribute of the +        command class -- thus, we have to be able to load command classes +        in order to parse the command line.  Any error in that 'options' +        attribute raises DistutilsGetoptError; any error on the +        command-line raises DistutilsArgError.  If no Distutils commands +        were found on the command line, raises DistutilsArgError.  Return +        true if command-line was successfully parsed and we should carry +        on with executing commands; false if no errors but we shouldn't +        execute commands (currently, this only happens if user asks for +        help). +        """ +        # +        # We now have enough information to show the Macintosh dialog +        # that allows the user to interactively specify the "command line". +        # +        toplevel_options = self._get_toplevel_options() + +        # We have to parse the command line a bit at a time -- global +        # options, then the first command, then its options, and so on -- +        # because each command will be handled by a different class, and +        # the options that are valid for a particular class aren't known +        # until we have loaded the command class, which doesn't happen +        # until we know what the command is. + +        self.commands = [] +        parser = FancyGetopt(toplevel_options + self.display_options) +        parser.set_negative_aliases(self.negative_opt) +        parser.set_aliases({'licence': 'license'}) +        args = parser.getopt(args=self.script_args, object=self) +        option_order = parser.get_option_order() +        log.set_verbosity(self.verbose) + +        # for display options we return immediately +        if self.handle_display_options(option_order): +            return +        while args: +            args = self._parse_command_opts(parser, args) +            if args is None:  # user asked for help (and got it) +                return + +        # Handle the cases of --help as a "global" option, ie. +        # "setup.py --help" and "setup.py --help command ...".  For the +        # former, we show global options (--verbose, --dry-run, etc.) +        # and display-only options (--name, --version, etc.); for the +        # latter, we omit the display-only options and show help for +        # each command listed on the command line. +        if self.help: +            self._show_help( +                parser, display_options=len(self.commands) == 0, commands=self.commands +            ) +            return + +        # Oops, no commands found -- an end-user error +        if not self.commands: +            raise DistutilsArgError("no commands supplied") + +        # All is well: return true +        return True + +    def _get_toplevel_options(self): +        """Return the non-display options recognized at the top level. + +        This includes options that are recognized *only* at the top +        level as well as options recognized for commands. +        """ +        return self.global_options + [ +            ( +                "command-packages=", +                None, +                "list of packages that provide distutils commands", +            ), +        ] + +    def _parse_command_opts(self, parser, args):  # noqa: C901 +        """Parse the command-line options for a single command. +        'parser' must be a FancyGetopt instance; 'args' must be the list +        of arguments, starting with the current command (whose options +        we are about to parse).  Returns a new version of 'args' with +        the next command at the front of the list; will be the empty +        list if there are no more commands on the command line.  Returns +        None if the user asked for help on this command. +        """ +        # late import because of mutual dependence between these modules +        from distutils.cmd import Command + +        # Pull the current command from the head of the command line +        command = args[0] +        if not command_re.match(command): +            raise SystemExit("invalid command name '%s'" % command) +        self.commands.append(command) + +        # Dig up the command class that implements this command, so we +        # 1) know that it's a valid command, and 2) know which options +        # it takes. +        try: +            cmd_class = self.get_command_class(command) +        except DistutilsModuleError as msg: +            raise DistutilsArgError(msg) + +        # Require that the command class be derived from Command -- want +        # to be sure that the basic "command" interface is implemented. +        if not issubclass(cmd_class, Command): +            raise DistutilsClassError( +                "command class %s must subclass Command" % cmd_class +            ) + +        # Also make sure that the command object provides a list of its +        # known options. +        if not ( +            hasattr(cmd_class, 'user_options') +            and isinstance(cmd_class.user_options, list) +        ): +            msg = ( +                "command class %s must provide " +                "'user_options' attribute (a list of tuples)" +            ) +            raise DistutilsClassError(msg % cmd_class) + +        # If the command class has a list of negative alias options, +        # merge it in with the global negative aliases. +        negative_opt = self.negative_opt +        if hasattr(cmd_class, 'negative_opt'): +            negative_opt = negative_opt.copy() +            negative_opt.update(cmd_class.negative_opt) + +        # Check for help_options in command class.  They have a different +        # format (tuple of four) so we need to preprocess them here. +        if hasattr(cmd_class, 'help_options') and isinstance( +            cmd_class.help_options, list +        ): +            help_options = fix_help_options(cmd_class.help_options) +        else: +            help_options = [] + +        # All commands support the global options too, just by adding +        # in 'global_options'. +        parser.set_option_table( +            self.global_options + cmd_class.user_options + help_options +        ) +        parser.set_negative_aliases(negative_opt) +        (args, opts) = parser.getopt(args[1:]) +        if hasattr(opts, 'help') and opts.help: +            self._show_help(parser, display_options=0, commands=[cmd_class]) +            return + +        if hasattr(cmd_class, 'help_options') and isinstance( +            cmd_class.help_options, list +        ): +            help_option_found = 0 +            for (help_option, short, desc, func) in cmd_class.help_options: +                if hasattr(opts, parser.get_attr_name(help_option)): +                    help_option_found = 1 +                    if callable(func): +                        func() +                    else: +                        raise DistutilsClassError( +                            "invalid help function %r for help option '%s': " +                            "must be a callable object (function, etc.)" +                            % (func, help_option) +                        ) + +            if help_option_found: +                return + +        # Put the options from the command-line into their official +        # holding pen, the 'command_options' dictionary. +        opt_dict = self.get_option_dict(command) +        for (name, value) in vars(opts).items(): +            opt_dict[name] = ("command line", value) + +        return args + +    def finalize_options(self): +        """Set final values for all the options on the Distribution +        instance, analogous to the .finalize_options() method of Command +        objects. +        """ +        for attr in ('keywords', 'platforms'): +            value = getattr(self.metadata, attr) +            if value is None: +                continue +            if isinstance(value, str): +                value = [elm.strip() for elm in value.split(',')] +                setattr(self.metadata, attr, value) + +    def _show_help(self, parser, global_options=1, display_options=1, commands=[]): +        """Show help for the setup script command-line in the form of +        several lists of command-line options.  'parser' should be a +        FancyGetopt instance; do not expect it to be returned in the +        same state, as its option table will be reset to make it +        generate the correct help text. + +        If 'global_options' is true, lists the global options: +        --verbose, --dry-run, etc.  If 'display_options' is true, lists +        the "display-only" options: --name, --version, etc.  Finally, +        lists per-command help for every command name or command class +        in 'commands'. +        """ +        # late import because of mutual dependence between these modules +        from distutils.core import gen_usage +        from distutils.cmd import Command + +        if global_options: +            if display_options: +                options = self._get_toplevel_options() +            else: +                options = self.global_options +            parser.set_option_table(options) +            parser.print_help(self.common_usage + "\nGlobal options:") +            print('') + +        if display_options: +            parser.set_option_table(self.display_options) +            parser.print_help( +                "Information display options (just display " +                + "information, ignore any commands)" +            ) +            print('') + +        for command in self.commands: +            if isinstance(command, type) and issubclass(command, Command): +                klass = command +            else: +                klass = self.get_command_class(command) +            if hasattr(klass, 'help_options') and isinstance(klass.help_options, list): +                parser.set_option_table( +                    klass.user_options + fix_help_options(klass.help_options) +                ) +            else: +                parser.set_option_table(klass.user_options) +            parser.print_help("Options for '%s' command:" % klass.__name__) +            print('') + +        print(gen_usage(self.script_name)) + +    def handle_display_options(self, option_order): +        """If there were any non-global "display-only" options +        (--help-commands or the metadata display options) on the command +        line, display the requested info and return true; else return +        false. +        """ +        from distutils.core import gen_usage + +        # User just wants a list of commands -- we'll print it out and stop +        # processing now (ie. if they ran "setup --help-commands foo bar", +        # we ignore "foo bar"). +        if self.help_commands: +            self.print_commands() +            print('') +            print(gen_usage(self.script_name)) +            return 1 + +        # If user supplied any of the "display metadata" options, then +        # display that metadata in the order in which the user supplied the +        # metadata options. +        any_display_options = 0 +        is_display_option = {} +        for option in self.display_options: +            is_display_option[option[0]] = 1 + +        for (opt, val) in option_order: +            if val and is_display_option.get(opt): +                opt = translate_longopt(opt) +                value = getattr(self.metadata, "get_" + opt)() +                if opt in ['keywords', 'platforms']: +                    print(','.join(value)) +                elif opt in ('classifiers', 'provides', 'requires', 'obsoletes'): +                    print('\n'.join(value)) +                else: +                    print(value) +                any_display_options = 1 + +        return any_display_options + +    def print_command_list(self, commands, header, max_length): +        """Print a subset of the list of all commands -- used by +        'print_commands()'. +        """ +        print(header + ":") + +        for cmd in commands: +            klass = self.cmdclass.get(cmd) +            if not klass: +                klass = self.get_command_class(cmd) +            try: +                description = klass.description +            except AttributeError: +                description = "(no description available)" + +            print("  %-*s  %s" % (max_length, cmd, description)) + +    def print_commands(self): +        """Print out a help message listing all available commands with a +        description of each.  The list is divided into "standard commands" +        (listed in distutils.command.__all__) and "extra commands" +        (mentioned in self.cmdclass, but not a standard command).  The +        descriptions come from the command class attribute +        'description'. +        """ +        import distutils.command + +        std_commands = distutils.command.__all__ +        is_std = {} +        for cmd in std_commands: +            is_std[cmd] = 1 + +        extra_commands = [] +        for cmd in self.cmdclass.keys(): +            if not is_std.get(cmd): +                extra_commands.append(cmd) + +        max_length = 0 +        for cmd in std_commands + extra_commands: +            if len(cmd) > max_length: +                max_length = len(cmd) + +        self.print_command_list(std_commands, "Standard commands", max_length) +        if extra_commands: +            print() +            self.print_command_list(extra_commands, "Extra commands", max_length) + +    def get_command_list(self): +        """Get a list of (command, description) tuples. +        The list is divided into "standard commands" (listed in +        distutils.command.__all__) and "extra commands" (mentioned in +        self.cmdclass, but not a standard command).  The descriptions come +        from the command class attribute 'description'. +        """ +        # Currently this is only used on Mac OS, for the Mac-only GUI +        # Distutils interface (by Jack Jansen) +        import distutils.command + +        std_commands = distutils.command.__all__ +        is_std = {} +        for cmd in std_commands: +            is_std[cmd] = 1 + +        extra_commands = [] +        for cmd in self.cmdclass.keys(): +            if not is_std.get(cmd): +                extra_commands.append(cmd) + +        rv = [] +        for cmd in std_commands + extra_commands: +            klass = self.cmdclass.get(cmd) +            if not klass: +                klass = self.get_command_class(cmd) +            try: +                description = klass.description +            except AttributeError: +                description = "(no description available)" +            rv.append((cmd, description)) +        return rv + +    # -- Command class/object methods ---------------------------------- + +    def get_command_packages(self): +        """Return a list of packages from which commands are loaded.""" +        pkgs = self.command_packages +        if not isinstance(pkgs, list): +            if pkgs is None: +                pkgs = '' +            pkgs = [pkg.strip() for pkg in pkgs.split(',') if pkg != ''] +            if "distutils.command" not in pkgs: +                pkgs.insert(0, "distutils.command") +            self.command_packages = pkgs +        return pkgs + +    def get_command_class(self, command): +        """Return the class that implements the Distutils command named by +        'command'.  First we check the 'cmdclass' dictionary; if the +        command is mentioned there, we fetch the class object from the +        dictionary and return it.  Otherwise we load the command module +        ("distutils.command." + command) and fetch the command class from +        the module.  The loaded class is also stored in 'cmdclass' +        to speed future calls to 'get_command_class()'. + +        Raises DistutilsModuleError if the expected module could not be +        found, or if that module does not define the expected class. +        """ +        klass = self.cmdclass.get(command) +        if klass: +            return klass + +        for pkgname in self.get_command_packages(): +            module_name = "{}.{}".format(pkgname, command) +            klass_name = command + +            try: +                __import__(module_name) +                module = sys.modules[module_name] +            except ImportError: +                continue + +            try: +                klass = getattr(module, klass_name) +            except AttributeError: +                raise DistutilsModuleError( +                    "invalid command '%s' (no class '%s' in module '%s')" +                    % (command, klass_name, module_name) +                ) + +            self.cmdclass[command] = klass +            return klass + +        raise DistutilsModuleError("invalid command '%s'" % command) + +    def get_command_obj(self, command, create=1): +        """Return the command object for 'command'.  Normally this object +        is cached on a previous call to 'get_command_obj()'; if no command +        object for 'command' is in the cache, then we either create and +        return it (if 'create' is true) or return None. +        """ +        cmd_obj = self.command_obj.get(command) +        if not cmd_obj and create: +            if DEBUG: +                self.announce( +                    "Distribution.get_command_obj(): " +                    "creating '%s' command object" % command +                ) + +            klass = self.get_command_class(command) +            cmd_obj = self.command_obj[command] = klass(self) +            self.have_run[command] = 0 + +            # Set any options that were supplied in config files +            # or on the command line.  (NB. support for error +            # reporting is lame here: any errors aren't reported +            # until 'finalize_options()' is called, which means +            # we won't report the source of the error.) +            options = self.command_options.get(command) +            if options: +                self._set_command_options(cmd_obj, options) + +        return cmd_obj + +    def _set_command_options(self, command_obj, option_dict=None):  # noqa: C901 +        """Set the options for 'command_obj' from 'option_dict'.  Basically +        this means copying elements of a dictionary ('option_dict') to +        attributes of an instance ('command'). + +        'command_obj' must be a Command instance.  If 'option_dict' is not +        supplied, uses the standard option dictionary for this command +        (from 'self.command_options'). +        """ +        command_name = command_obj.get_command_name() +        if option_dict is None: +            option_dict = self.get_option_dict(command_name) + +        if DEBUG: +            self.announce("  setting options for '%s' command:" % command_name) +        for (option, (source, value)) in option_dict.items(): +            if DEBUG: +                self.announce("    {} = {} (from {})".format(option, value, source)) +            try: +                bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] +            except AttributeError: +                bool_opts = [] +            try: +                neg_opt = command_obj.negative_opt +            except AttributeError: +                neg_opt = {} + +            try: +                is_string = isinstance(value, str) +                if option in neg_opt and is_string: +                    setattr(command_obj, neg_opt[option], not strtobool(value)) +                elif option in bool_opts and is_string: +                    setattr(command_obj, option, strtobool(value)) +                elif hasattr(command_obj, option): +                    setattr(command_obj, option, value) +                else: +                    raise DistutilsOptionError( +                        "error in %s: command '%s' has no such option '%s'" +                        % (source, command_name, option) +                    ) +            except ValueError as msg: +                raise DistutilsOptionError(msg) + +    def reinitialize_command(self, command, reinit_subcommands=0): +        """Reinitializes a command to the state it was in when first +        returned by 'get_command_obj()': ie., initialized but not yet +        finalized.  This provides the opportunity to sneak option +        values in programmatically, overriding or supplementing +        user-supplied values from the config files and command line. +        You'll have to re-finalize the command object (by calling +        'finalize_options()' or 'ensure_finalized()') before using it for +        real. + +        'command' should be a command name (string) or command object.  If +        'reinit_subcommands' is true, also reinitializes the command's +        sub-commands, as declared by the 'sub_commands' class attribute (if +        it has one).  See the "install" command for an example.  Only +        reinitializes the sub-commands that actually matter, ie. those +        whose test predicates return true. + +        Returns the reinitialized command object. +        """ +        from distutils.cmd import Command + +        if not isinstance(command, Command): +            command_name = command +            command = self.get_command_obj(command_name) +        else: +            command_name = command.get_command_name() + +        if not command.finalized: +            return command +        command.initialize_options() +        command.finalized = 0 +        self.have_run[command_name] = 0 +        self._set_command_options(command) + +        if reinit_subcommands: +            for sub in command.get_sub_commands(): +                self.reinitialize_command(sub, reinit_subcommands) + +        return command + +    # -- Methods that operate on the Distribution ---------------------- + +    def announce(self, msg, level=log.INFO): +        log.log(level, msg) + +    def run_commands(self): +        """Run each command that was seen on the setup script command line. +        Uses the list of commands found and cache of command objects +        created by 'get_command_obj()'. +        """ +        for cmd in self.commands: +            self.run_command(cmd) + +    # -- Methods that operate on its Commands -------------------------- + +    def run_command(self, command): +        """Do whatever it takes to run a command (including nothing at all, +        if the command has already been run).  Specifically: if we have +        already created and run the command named by 'command', return +        silently without doing anything.  If the command named by 'command' +        doesn't even have a command object yet, create one.  Then invoke +        'run()' on that command object (or an existing one). +        """ +        # Already been here, done that? then return silently. +        if self.have_run.get(command): +            return + +        log.info("running %s", command) +        cmd_obj = self.get_command_obj(command) +        cmd_obj.ensure_finalized() +        cmd_obj.run() +        self.have_run[command] = 1 + +    # -- Distribution query methods ------------------------------------ + +    def has_pure_modules(self): +        return len(self.packages or self.py_modules or []) > 0 + +    def has_ext_modules(self): +        return self.ext_modules and len(self.ext_modules) > 0 + +    def has_c_libraries(self): +        return self.libraries and len(self.libraries) > 0 + +    def has_modules(self): +        return self.has_pure_modules() or self.has_ext_modules() + +    def has_headers(self): +        return self.headers and len(self.headers) > 0 + +    def has_scripts(self): +        return self.scripts and len(self.scripts) > 0 + +    def has_data_files(self): +        return self.data_files and len(self.data_files) > 0 + +    def is_pure(self): +        return ( +            self.has_pure_modules() +            and not self.has_ext_modules() +            and not self.has_c_libraries() +        ) + +    # -- Metadata query methods ---------------------------------------- + +    # If you're looking for 'get_name()', 'get_version()', and so forth, +    # they are defined in a sneaky way: the constructor binds self.get_XXX +    # to self.metadata.get_XXX.  The actual code is in the +    # DistributionMetadata class, below. + + +class DistributionMetadata: +    """Dummy class to hold the distribution meta-data: name, version, +    author, and so forth. +    """ + +    _METHOD_BASENAMES = ( +        "name", +        "version", +        "author", +        "author_email", +        "maintainer", +        "maintainer_email", +        "url", +        "license", +        "description", +        "long_description", +        "keywords", +        "platforms", +        "fullname", +        "contact", +        "contact_email", +        "classifiers", +        "download_url", +        # PEP 314 +        "provides", +        "requires", +        "obsoletes", +    ) + +    def __init__(self, path=None): +        if path is not None: +            self.read_pkg_file(open(path)) +        else: +            self.name = None +            self.version = None +            self.author = None +            self.author_email = None +            self.maintainer = None +            self.maintainer_email = None +            self.url = None +            self.license = None +            self.description = None +            self.long_description = None +            self.keywords = None +            self.platforms = None +            self.classifiers = None +            self.download_url = None +            # PEP 314 +            self.provides = None +            self.requires = None +            self.obsoletes = None + +    def read_pkg_file(self, file): +        """Reads the metadata values from a file object.""" +        msg = message_from_file(file) + +        def _read_field(name): +            value = msg[name] +            if value and value != "UNKNOWN": +                return value + +        def _read_list(name): +            values = msg.get_all(name, None) +            if values == []: +                return None +            return values + +        metadata_version = msg['metadata-version'] +        self.name = _read_field('name') +        self.version = _read_field('version') +        self.description = _read_field('summary') +        # we are filling author only. +        self.author = _read_field('author') +        self.maintainer = None +        self.author_email = _read_field('author-email') +        self.maintainer_email = None +        self.url = _read_field('home-page') +        self.license = _read_field('license') + +        if 'download-url' in msg: +            self.download_url = _read_field('download-url') +        else: +            self.download_url = None + +        self.long_description = _read_field('description') +        self.description = _read_field('summary') + +        if 'keywords' in msg: +            self.keywords = _read_field('keywords').split(',') + +        self.platforms = _read_list('platform') +        self.classifiers = _read_list('classifier') + +        # PEP 314 - these fields only exist in 1.1 +        if metadata_version == '1.1': +            self.requires = _read_list('requires') +            self.provides = _read_list('provides') +            self.obsoletes = _read_list('obsoletes') +        else: +            self.requires = None +            self.provides = None +            self.obsoletes = None + +    def write_pkg_info(self, base_dir): +        """Write the PKG-INFO file into the release tree.""" +        with open( +            os.path.join(base_dir, 'PKG-INFO'), 'w', encoding='UTF-8' +        ) as pkg_info: +            self.write_pkg_file(pkg_info) + +    def write_pkg_file(self, file): +        """Write the PKG-INFO format data to a file object.""" +        version = '1.0' +        if ( +            self.provides +            or self.requires +            or self.obsoletes +            or self.classifiers +            or self.download_url +        ): +            version = '1.1' + +        # required fields +        file.write('Metadata-Version: %s\n' % version) +        file.write('Name: %s\n' % self.get_name()) +        file.write('Version: %s\n' % self.get_version()) + +        def maybe_write(header, val): +            if val: +                file.write(f"{header}: {val}\n") + +        # optional fields +        maybe_write("Summary", self.get_description()) +        maybe_write("Home-page", self.get_url()) +        maybe_write("Author", self.get_contact()) +        maybe_write("Author-email", self.get_contact_email()) +        maybe_write("License", self.get_license()) +        maybe_write("Download-URL", self.download_url) +        maybe_write("Description", rfc822_escape(self.get_long_description() or "")) +        maybe_write("Keywords", ",".join(self.get_keywords())) + +        self._write_list(file, 'Platform', self.get_platforms()) +        self._write_list(file, 'Classifier', self.get_classifiers()) + +        # PEP 314 +        self._write_list(file, 'Requires', self.get_requires()) +        self._write_list(file, 'Provides', self.get_provides()) +        self._write_list(file, 'Obsoletes', self.get_obsoletes()) + +    def _write_list(self, file, name, values): +        values = values or [] +        for value in values: +            file.write('{}: {}\n'.format(name, value)) + +    # -- Metadata query methods ---------------------------------------- + +    def get_name(self): +        return self.name or "UNKNOWN" + +    def get_version(self): +        return self.version or "0.0.0" + +    def get_fullname(self): +        return "{}-{}".format(self.get_name(), self.get_version()) + +    def get_author(self): +        return self.author + +    def get_author_email(self): +        return self.author_email + +    def get_maintainer(self): +        return self.maintainer + +    def get_maintainer_email(self): +        return self.maintainer_email + +    def get_contact(self): +        return self.maintainer or self.author + +    def get_contact_email(self): +        return self.maintainer_email or self.author_email + +    def get_url(self): +        return self.url + +    def get_license(self): +        return self.license + +    get_licence = get_license + +    def get_description(self): +        return self.description + +    def get_long_description(self): +        return self.long_description + +    def get_keywords(self): +        return self.keywords or [] + +    def set_keywords(self, value): +        self.keywords = _ensure_list(value, 'keywords') + +    def get_platforms(self): +        return self.platforms + +    def set_platforms(self, value): +        self.platforms = _ensure_list(value, 'platforms') + +    def get_classifiers(self): +        return self.classifiers or [] + +    def set_classifiers(self, value): +        self.classifiers = _ensure_list(value, 'classifiers') + +    def get_download_url(self): +        return self.download_url + +    # PEP 314 +    def get_requires(self): +        return self.requires or [] + +    def set_requires(self, value): +        import distutils.versionpredicate + +        for v in value: +            distutils.versionpredicate.VersionPredicate(v) +        self.requires = list(value) + +    def get_provides(self): +        return self.provides or [] + +    def set_provides(self, value): +        value = [v.strip() for v in value] +        for v in value: +            import distutils.versionpredicate + +            distutils.versionpredicate.split_provision(v) +        self.provides = value + +    def get_obsoletes(self): +        return self.obsoletes or [] + +    def set_obsoletes(self, value): +        import distutils.versionpredicate + +        for v in value: +            distutils.versionpredicate.VersionPredicate(v) +        self.obsoletes = list(value) + + +def fix_help_options(options): +    """Convert a 4-tuple 'help_options' list as found in various command +    classes to the 3-tuple form required by FancyGetopt. +    """ +    new_options = [] +    for help_tuple in options: +        new_options.append(help_tuple[0:3]) +    return new_options diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/errors.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/errors.py new file mode 100644 index 0000000..626254c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/errors.py @@ -0,0 +1,127 @@ +"""distutils.errors + +Provides exceptions used by the Distutils modules.  Note that Distutils +modules may raise standard exceptions; in particular, SystemExit is +usually raised for errors that are obviously the end-user's fault +(eg. bad command-line arguments). + +This module is safe to use in "from ... import *" mode; it only exports +symbols whose names start with "Distutils" and end with "Error".""" + + +class DistutilsError(Exception): +    """The root of all Distutils evil.""" + +    pass + + +class DistutilsModuleError(DistutilsError): +    """Unable to load an expected module, or to find an expected class +    within some module (in particular, command modules and classes).""" + +    pass + + +class DistutilsClassError(DistutilsError): +    """Some command class (or possibly distribution class, if anyone +    feels a need to subclass Distribution) is found not to be holding +    up its end of the bargain, ie. implementing some part of the +    "command "interface.""" + +    pass + + +class DistutilsGetoptError(DistutilsError): +    """The option table provided to 'fancy_getopt()' is bogus.""" + +    pass + + +class DistutilsArgError(DistutilsError): +    """Raised by fancy_getopt in response to getopt.error -- ie. an +    error in the command line usage.""" + +    pass + + +class DistutilsFileError(DistutilsError): +    """Any problems in the filesystem: expected file not found, etc. +    Typically this is for problems that we detect before OSError +    could be raised.""" + +    pass + + +class DistutilsOptionError(DistutilsError): +    """Syntactic/semantic errors in command options, such as use of +    mutually conflicting options, or inconsistent options, +    badly-spelled values, etc.  No distinction is made between option +    values originating in the setup script, the command line, config +    files, or what-have-you -- but if we *know* something originated in +    the setup script, we'll raise DistutilsSetupError instead.""" + +    pass + + +class DistutilsSetupError(DistutilsError): +    """For errors that can be definitely blamed on the setup script, +    such as invalid keyword arguments to 'setup()'.""" + +    pass + + +class DistutilsPlatformError(DistutilsError): +    """We don't know how to do something on the current platform (but +    we do know how to do it on some platform) -- eg. trying to compile +    C files on a platform not supported by a CCompiler subclass.""" + +    pass + + +class DistutilsExecError(DistutilsError): +    """Any problems executing an external program (such as the C +    compiler, when compiling C files).""" + +    pass + + +class DistutilsInternalError(DistutilsError): +    """Internal inconsistencies or impossibilities (obviously, this +    should never be seen if the code is working!).""" + +    pass + + +class DistutilsTemplateError(DistutilsError): +    """Syntax error in a file list template.""" + + +class DistutilsByteCompileError(DistutilsError): +    """Byte compile error.""" + + +# Exception classes used by the CCompiler implementation classes +class CCompilerError(Exception): +    """Some compile/link operation failed.""" + + +class PreprocessError(CCompilerError): +    """Failure to preprocess one or more C/C++ files.""" + + +class CompileError(CCompilerError): +    """Failure to compile one or more C/C++ source files.""" + + +class LibError(CCompilerError): +    """Failure to create a static library from one or more C/C++ object +    files.""" + + +class LinkError(CCompilerError): +    """Failure to link one or more C/C++ object files into an executable +    or shared library file.""" + + +class UnknownFileError(CCompilerError): +    """Attempt to process an unknown file type.""" diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/extension.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/extension.py new file mode 100644 index 0000000..6b8575d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/extension.py @@ -0,0 +1,248 @@ +"""distutils.extension + +Provides the Extension class, used to describe C/C++ extension +modules in setup scripts.""" + +import os +import warnings + +# This class is really only used by the "build_ext" command, so it might +# make sense to put it in distutils.command.build_ext.  However, that +# module is already big enough, and I want to make this class a bit more +# complex to simplify some common cases ("foo" module in "foo.c") and do +# better error-checking ("foo.c" actually exists). +# +# Also, putting this in build_ext.py means every setup script would have to +# import that large-ish module (indirectly, through distutils.core) in +# order to do anything. + + +class Extension: +    """Just a collection of attributes that describes an extension +    module and everything needed to build it (hopefully in a portable +    way, but there are hooks that let you be as unportable as you need). + +    Instance attributes: +      name : string +        the full name of the extension, including any packages -- ie. +        *not* a filename or pathname, but Python dotted name +      sources : [string] +        list of source filenames, relative to the distribution root +        (where the setup script lives), in Unix form (slash-separated) +        for portability.  Source files may be C, C++, SWIG (.i), +        platform-specific resource files, or whatever else is recognized +        by the "build_ext" command as source for a Python extension. +      include_dirs : [string] +        list of directories to search for C/C++ header files (in Unix +        form for portability) +      define_macros : [(name : string, value : string|None)] +        list of macros to define; each macro is defined using a 2-tuple, +        where 'value' is either the string to define it to or None to +        define it without a particular value (equivalent of "#define +        FOO" in source or -DFOO on Unix C compiler command line) +      undef_macros : [string] +        list of macros to undefine explicitly +      library_dirs : [string] +        list of directories to search for C/C++ libraries at link time +      libraries : [string] +        list of library names (not filenames or paths) to link against +      runtime_library_dirs : [string] +        list of directories to search for C/C++ libraries at run time +        (for shared extensions, this is when the extension is loaded) +      extra_objects : [string] +        list of extra files to link with (eg. object files not implied +        by 'sources', static library that must be explicitly specified, +        binary resource files, etc.) +      extra_compile_args : [string] +        any extra platform- and compiler-specific information to use +        when compiling the source files in 'sources'.  For platforms and +        compilers where "command line" makes sense, this is typically a +        list of command-line arguments, but for other platforms it could +        be anything. +      extra_link_args : [string] +        any extra platform- and compiler-specific information to use +        when linking object files together to create the extension (or +        to create a new static Python interpreter).  Similar +        interpretation as for 'extra_compile_args'. +      export_symbols : [string] +        list of symbols to be exported from a shared extension.  Not +        used on all platforms, and not generally necessary for Python +        extensions, which typically export exactly one symbol: "init" + +        extension_name. +      swig_opts : [string] +        any extra options to pass to SWIG if a source file has the .i +        extension. +      depends : [string] +        list of files that the extension depends on +      language : string +        extension language (i.e. "c", "c++", "objc"). Will be detected +        from the source extensions if not provided. +      optional : boolean +        specifies that a build failure in the extension should not abort the +        build process, but simply not install the failing extension. +    """ + +    # When adding arguments to this constructor, be sure to update +    # setup_keywords in core.py. +    def __init__( +        self, +        name, +        sources, +        include_dirs=None, +        define_macros=None, +        undef_macros=None, +        library_dirs=None, +        libraries=None, +        runtime_library_dirs=None, +        extra_objects=None, +        extra_compile_args=None, +        extra_link_args=None, +        export_symbols=None, +        swig_opts=None, +        depends=None, +        language=None, +        optional=None, +        **kw  # To catch unknown keywords +    ): +        if not isinstance(name, str): +            raise AssertionError("'name' must be a string") +        if not (isinstance(sources, list) and all(isinstance(v, str) for v in sources)): +            raise AssertionError("'sources' must be a list of strings") + +        self.name = name +        self.sources = sources +        self.include_dirs = include_dirs or [] +        self.define_macros = define_macros or [] +        self.undef_macros = undef_macros or [] +        self.library_dirs = library_dirs or [] +        self.libraries = libraries or [] +        self.runtime_library_dirs = runtime_library_dirs or [] +        self.extra_objects = extra_objects or [] +        self.extra_compile_args = extra_compile_args or [] +        self.extra_link_args = extra_link_args or [] +        self.export_symbols = export_symbols or [] +        self.swig_opts = swig_opts or [] +        self.depends = depends or [] +        self.language = language +        self.optional = optional + +        # If there are unknown keyword options, warn about them +        if len(kw) > 0: +            options = [repr(option) for option in kw] +            options = ', '.join(sorted(options)) +            msg = "Unknown Extension options: %s" % options +            warnings.warn(msg) + +    def __repr__(self): +        return '<{}.{}({!r}) at {:#x}>'.format( +            self.__class__.__module__, +            self.__class__.__qualname__, +            self.name, +            id(self), +        ) + + +def read_setup_file(filename):  # noqa: C901 +    """Reads a Setup file and returns Extension instances.""" +    from distutils.sysconfig import parse_makefile, expand_makefile_vars, _variable_rx + +    from distutils.text_file import TextFile +    from distutils.util import split_quoted + +    # First pass over the file to gather "VAR = VALUE" assignments. +    vars = parse_makefile(filename) + +    # Second pass to gobble up the real content: lines of the form +    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...] +    file = TextFile( +        filename, +        strip_comments=1, +        skip_blanks=1, +        join_lines=1, +        lstrip_ws=1, +        rstrip_ws=1, +    ) +    try: +        extensions = [] + +        while True: +            line = file.readline() +            if line is None:  # eof +                break +            if _variable_rx.match(line):  # VAR=VALUE, handled in first pass +                continue + +            if line[0] == line[-1] == "*": +                file.warn("'%s' lines not handled yet" % line) +                continue + +            line = expand_makefile_vars(line, vars) +            words = split_quoted(line) + +            # NB. this parses a slightly different syntax than the old +            # makesetup script: here, there must be exactly one extension per +            # line, and it must be the first word of the line.  I have no idea +            # why the old syntax supported multiple extensions per line, as +            # they all wind up being the same. + +            module = words[0] +            ext = Extension(module, []) +            append_next_word = None + +            for word in words[1:]: +                if append_next_word is not None: +                    append_next_word.append(word) +                    append_next_word = None +                    continue + +                suffix = os.path.splitext(word)[1] +                switch = word[0:2] +                value = word[2:] + +                if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"): +                    # hmm, should we do something about C vs. C++ sources? +                    # or leave it up to the CCompiler implementation to +                    # worry about? +                    ext.sources.append(word) +                elif switch == "-I": +                    ext.include_dirs.append(value) +                elif switch == "-D": +                    equals = value.find("=") +                    if equals == -1:  # bare "-DFOO" -- no value +                        ext.define_macros.append((value, None)) +                    else:  # "-DFOO=blah" +                        ext.define_macros.append((value[0:equals], value[equals + 2 :])) +                elif switch == "-U": +                    ext.undef_macros.append(value) +                elif switch == "-C":  # only here 'cause makesetup has it! +                    ext.extra_compile_args.append(word) +                elif switch == "-l": +                    ext.libraries.append(value) +                elif switch == "-L": +                    ext.library_dirs.append(value) +                elif switch == "-R": +                    ext.runtime_library_dirs.append(value) +                elif word == "-rpath": +                    append_next_word = ext.runtime_library_dirs +                elif word == "-Xlinker": +                    append_next_word = ext.extra_link_args +                elif word == "-Xcompiler": +                    append_next_word = ext.extra_compile_args +                elif switch == "-u": +                    ext.extra_link_args.append(word) +                    if not value: +                        append_next_word = ext.extra_link_args +                elif suffix in (".a", ".so", ".sl", ".o", ".dylib"): +                    # NB. a really faithful emulation of makesetup would +                    # append a .o file to extra_objects only if it +                    # had a slash in it; otherwise, it would s/.o/.c/ +                    # and append it to sources.  Hmmmm. +                    ext.extra_objects.append(word) +                else: +                    file.warn("unrecognized argument '%s'" % word) + +            extensions.append(ext) +    finally: +        file.close() + +    return extensions diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/fancy_getopt.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/fancy_getopt.py new file mode 100644 index 0000000..830f047 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/fancy_getopt.py @@ -0,0 +1,470 @@ +"""distutils.fancy_getopt + +Wrapper around the standard getopt module that provides the following +additional features: +  * short and long options are tied together +  * options have help strings, so fancy_getopt could potentially +    create a complete usage summary +  * options set attributes of a passed-in object +""" + +import sys +import string +import re +import getopt +from distutils.errors import DistutilsGetoptError, DistutilsArgError + +# Much like command_re in distutils.core, this is close to but not quite +# the same as a Python NAME -- except, in the spirit of most GNU +# utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!) +# The similarities to NAME are again not a coincidence... +longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)' +longopt_re = re.compile(r'^%s$' % longopt_pat) + +# For recognizing "negative alias" options, eg. "quiet=!verbose" +neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat)) + +# This is used to translate long options to legitimate Python identifiers +# (for use as attributes of some object). +longopt_xlate = str.maketrans('-', '_') + + +class FancyGetopt: +    """Wrapper around the standard 'getopt()' module that provides some +    handy extra functionality: +      * short and long options are tied together +      * options have help strings, and help text can be assembled +        from them +      * options set attributes of a passed-in object +      * boolean options can have "negative aliases" -- eg. if +        --quiet is the "negative alias" of --verbose, then "--quiet" +        on the command line sets 'verbose' to false +    """ + +    def __init__(self, option_table=None): +        # The option table is (currently) a list of tuples.  The +        # tuples may have 3 or four values: +        #   (long_option, short_option, help_string [, repeatable]) +        # if an option takes an argument, its long_option should have '=' +        # appended; short_option should just be a single character, no ':' +        # in any case.  If a long_option doesn't have a corresponding +        # short_option, short_option should be None.  All option tuples +        # must have long options. +        self.option_table = option_table + +        # 'option_index' maps long option names to entries in the option +        # table (ie. those 3-tuples). +        self.option_index = {} +        if self.option_table: +            self._build_index() + +        # 'alias' records (duh) alias options; {'foo': 'bar'} means +        # --foo is an alias for --bar +        self.alias = {} + +        # 'negative_alias' keeps track of options that are the boolean +        # opposite of some other option +        self.negative_alias = {} + +        # These keep track of the information in the option table.  We +        # don't actually populate these structures until we're ready to +        # parse the command-line, since the 'option_table' passed in here +        # isn't necessarily the final word. +        self.short_opts = [] +        self.long_opts = [] +        self.short2long = {} +        self.attr_name = {} +        self.takes_arg = {} + +        # And 'option_order' is filled up in 'getopt()'; it records the +        # original order of options (and their values) on the command-line, +        # but expands short options, converts aliases, etc. +        self.option_order = [] + +    def _build_index(self): +        self.option_index.clear() +        for option in self.option_table: +            self.option_index[option[0]] = option + +    def set_option_table(self, option_table): +        self.option_table = option_table +        self._build_index() + +    def add_option(self, long_option, short_option=None, help_string=None): +        if long_option in self.option_index: +            raise DistutilsGetoptError( +                "option conflict: already an option '%s'" % long_option +            ) +        else: +            option = (long_option, short_option, help_string) +            self.option_table.append(option) +            self.option_index[long_option] = option + +    def has_option(self, long_option): +        """Return true if the option table for this parser has an +        option with long name 'long_option'.""" +        return long_option in self.option_index + +    def get_attr_name(self, long_option): +        """Translate long option name 'long_option' to the form it +        has as an attribute of some object: ie., translate hyphens +        to underscores.""" +        return long_option.translate(longopt_xlate) + +    def _check_alias_dict(self, aliases, what): +        assert isinstance(aliases, dict) +        for (alias, opt) in aliases.items(): +            if alias not in self.option_index: +                raise DistutilsGetoptError( +                    ("invalid %s '%s': " "option '%s' not defined") +                    % (what, alias, alias) +                ) +            if opt not in self.option_index: +                raise DistutilsGetoptError( +                    ("invalid %s '%s': " "aliased option '%s' not defined") +                    % (what, alias, opt) +                ) + +    def set_aliases(self, alias): +        """Set the aliases for this option parser.""" +        self._check_alias_dict(alias, "alias") +        self.alias = alias + +    def set_negative_aliases(self, negative_alias): +        """Set the negative aliases for this option parser. +        'negative_alias' should be a dictionary mapping option names to +        option names, both the key and value must already be defined +        in the option table.""" +        self._check_alias_dict(negative_alias, "negative alias") +        self.negative_alias = negative_alias + +    def _grok_option_table(self):  # noqa: C901 +        """Populate the various data structures that keep tabs on the +        option table.  Called by 'getopt()' before it can do anything +        worthwhile. +        """ +        self.long_opts = [] +        self.short_opts = [] +        self.short2long.clear() +        self.repeat = {} + +        for option in self.option_table: +            if len(option) == 3: +                long, short, help = option +                repeat = 0 +            elif len(option) == 4: +                long, short, help, repeat = option +            else: +                # the option table is part of the code, so simply +                # assert that it is correct +                raise ValueError("invalid option tuple: {!r}".format(option)) + +            # Type- and value-check the option names +            if not isinstance(long, str) or len(long) < 2: +                raise DistutilsGetoptError( +                    ("invalid long option '%s': " "must be a string of length >= 2") +                    % long +                ) + +            if not ((short is None) or (isinstance(short, str) and len(short) == 1)): +                raise DistutilsGetoptError( +                    "invalid short option '%s': " +                    "must a single character or None" % short +                ) + +            self.repeat[long] = repeat +            self.long_opts.append(long) + +            if long[-1] == '=':  # option takes an argument? +                if short: +                    short = short + ':' +                long = long[0:-1] +                self.takes_arg[long] = 1 +            else: +                # Is option is a "negative alias" for some other option (eg. +                # "quiet" == "!verbose")? +                alias_to = self.negative_alias.get(long) +                if alias_to is not None: +                    if self.takes_arg[alias_to]: +                        raise DistutilsGetoptError( +                            "invalid negative alias '%s': " +                            "aliased option '%s' takes a value" % (long, alias_to) +                        ) + +                    self.long_opts[-1] = long  # XXX redundant?! +                self.takes_arg[long] = 0 + +            # If this is an alias option, make sure its "takes arg" flag is +            # the same as the option it's aliased to. +            alias_to = self.alias.get(long) +            if alias_to is not None: +                if self.takes_arg[long] != self.takes_arg[alias_to]: +                    raise DistutilsGetoptError( +                        "invalid alias '%s': inconsistent with " +                        "aliased option '%s' (one of them takes a value, " +                        "the other doesn't" % (long, alias_to) +                    ) + +            # Now enforce some bondage on the long option name, so we can +            # later translate it to an attribute name on some object.  Have +            # to do this a bit late to make sure we've removed any trailing +            # '='. +            if not longopt_re.match(long): +                raise DistutilsGetoptError( +                    "invalid long option name '%s' " +                    "(must be letters, numbers, hyphens only" % long +                ) + +            self.attr_name[long] = self.get_attr_name(long) +            if short: +                self.short_opts.append(short) +                self.short2long[short[0]] = long + +    def getopt(self, args=None, object=None):  # noqa: C901 +        """Parse command-line options in args. Store as attributes on object. + +        If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If +        'object' is None or not supplied, creates a new OptionDummy +        object, stores option values there, and returns a tuple (args, +        object).  If 'object' is supplied, it is modified in place and +        'getopt()' just returns 'args'; in both cases, the returned +        'args' is a modified copy of the passed-in 'args' list, which +        is left untouched. +        """ +        if args is None: +            args = sys.argv[1:] +        if object is None: +            object = OptionDummy() +            created_object = True +        else: +            created_object = False + +        self._grok_option_table() + +        short_opts = ' '.join(self.short_opts) +        try: +            opts, args = getopt.getopt(args, short_opts, self.long_opts) +        except getopt.error as msg: +            raise DistutilsArgError(msg) + +        for opt, val in opts: +            if len(opt) == 2 and opt[0] == '-':  # it's a short option +                opt = self.short2long[opt[1]] +            else: +                assert len(opt) > 2 and opt[:2] == '--' +                opt = opt[2:] + +            alias = self.alias.get(opt) +            if alias: +                opt = alias + +            if not self.takes_arg[opt]:  # boolean option? +                assert val == '', "boolean option can't have value" +                alias = self.negative_alias.get(opt) +                if alias: +                    opt = alias +                    val = 0 +                else: +                    val = 1 + +            attr = self.attr_name[opt] +            # The only repeating option at the moment is 'verbose'. +            # It has a negative option -q quiet, which should set verbose = 0. +            if val and self.repeat.get(attr) is not None: +                val = getattr(object, attr, 0) + 1 +            setattr(object, attr, val) +            self.option_order.append((opt, val)) + +        # for opts +        if created_object: +            return args, object +        else: +            return args + +    def get_option_order(self): +        """Returns the list of (option, value) tuples processed by the +        previous run of 'getopt()'.  Raises RuntimeError if +        'getopt()' hasn't been called yet. +        """ +        if self.option_order is None: +            raise RuntimeError("'getopt()' hasn't been called yet") +        else: +            return self.option_order + +    def generate_help(self, header=None):  # noqa: C901 +        """Generate help text (a list of strings, one per suggested line of +        output) from the option table for this FancyGetopt object. +        """ +        # Blithely assume the option table is good: probably wouldn't call +        # 'generate_help()' unless you've already called 'getopt()'. + +        # First pass: determine maximum length of long option names +        max_opt = 0 +        for option in self.option_table: +            long = option[0] +            short = option[1] +            ell = len(long) +            if long[-1] == '=': +                ell = ell - 1 +            if short is not None: +                ell = ell + 5  # " (-x)" where short == 'x' +            if ell > max_opt: +                max_opt = ell + +        opt_width = max_opt + 2 + 2 + 2  # room for indent + dashes + gutter + +        # Typical help block looks like this: +        #   --foo       controls foonabulation +        # Help block for longest option looks like this: +        #   --flimflam  set the flim-flam level +        # and with wrapped text: +        #   --flimflam  set the flim-flam level (must be between +        #               0 and 100, except on Tuesdays) +        # Options with short names will have the short name shown (but +        # it doesn't contribute to max_opt): +        #   --foo (-f)  controls foonabulation +        # If adding the short option would make the left column too wide, +        # we push the explanation off to the next line +        #   --flimflam (-l) +        #               set the flim-flam level +        # Important parameters: +        #   - 2 spaces before option block start lines +        #   - 2 dashes for each long option name +        #   - min. 2 spaces between option and explanation (gutter) +        #   - 5 characters (incl. space) for short option name + +        # Now generate lines of help text.  (If 80 columns were good enough +        # for Jesus, then 78 columns are good enough for me!) +        line_width = 78 +        text_width = line_width - opt_width +        big_indent = ' ' * opt_width +        if header: +            lines = [header] +        else: +            lines = ['Option summary:'] + +        for option in self.option_table: +            long, short, help = option[:3] +            text = wrap_text(help, text_width) +            if long[-1] == '=': +                long = long[0:-1] + +            # Case 1: no short option at all (makes life easy) +            if short is None: +                if text: +                    lines.append("  --%-*s  %s" % (max_opt, long, text[0])) +                else: +                    lines.append("  --%-*s  " % (max_opt, long)) + +            # Case 2: we have a short option, so we have to include it +            # just after the long option +            else: +                opt_names = "{} (-{})".format(long, short) +                if text: +                    lines.append("  --%-*s  %s" % (max_opt, opt_names, text[0])) +                else: +                    lines.append("  --%-*s" % opt_names) + +            for ell in text[1:]: +                lines.append(big_indent + ell) +        return lines + +    def print_help(self, header=None, file=None): +        if file is None: +            file = sys.stdout +        for line in self.generate_help(header): +            file.write(line + "\n") + + +def fancy_getopt(options, negative_opt, object, args): +    parser = FancyGetopt(options) +    parser.set_negative_aliases(negative_opt) +    return parser.getopt(args, object) + + +WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace} + + +def wrap_text(text, width): +    """wrap_text(text : string, width : int) -> [string] + +    Split 'text' into multiple lines of no more than 'width' characters +    each, and return the list of strings that results. +    """ +    if text is None: +        return [] +    if len(text) <= width: +        return [text] + +    text = text.expandtabs() +    text = text.translate(WS_TRANS) +    chunks = re.split(r'( +|-+)', text) +    chunks = [ch for ch in chunks if ch]  # ' - ' results in empty strings +    lines = [] + +    while chunks: +        cur_line = []  # list of chunks (to-be-joined) +        cur_len = 0  # length of current line + +        while chunks: +            ell = len(chunks[0]) +            if cur_len + ell <= width:  # can squeeze (at least) this chunk in +                cur_line.append(chunks[0]) +                del chunks[0] +                cur_len = cur_len + ell +            else:  # this line is full +                # drop last chunk if all space +                if cur_line and cur_line[-1][0] == ' ': +                    del cur_line[-1] +                break + +        if chunks:  # any chunks left to process? +            # if the current line is still empty, then we had a single +            # chunk that's too big too fit on a line -- so we break +            # down and break it up at the line width +            if cur_len == 0: +                cur_line.append(chunks[0][0:width]) +                chunks[0] = chunks[0][width:] + +            # all-whitespace chunks at the end of a line can be discarded +            # (and we know from the re.split above that if a chunk has +            # *any* whitespace, it is *all* whitespace) +            if chunks[0][0] == ' ': +                del chunks[0] + +        # and store this line in the list-of-all-lines -- as a single +        # string, of course! +        lines.append(''.join(cur_line)) + +    return lines + + +def translate_longopt(opt): +    """Convert a long option name to a valid Python identifier by +    changing "-" to "_". +    """ +    return opt.translate(longopt_xlate) + + +class OptionDummy: +    """Dummy class just used as a place to hold command-line option +    values as instance attributes.""" + +    def __init__(self, options=[]): +        """Create a new OptionDummy instance.  The attributes listed in +        'options' will be initialized to None.""" +        for opt in options: +            setattr(self, opt, None) + + +if __name__ == "__main__": +    text = """\ +Tra-la-la, supercalifragilisticexpialidocious. +How *do* you spell that odd word, anyways? +(Someone ask Mary -- she'll know [or she'll +say, "How should I know?"].)""" + +    for w in (10, 20, 30, 40): +        print("width: %d" % w) +        print("\n".join(wrap_text(text, w))) +        print() diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/file_util.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/file_util.py new file mode 100644 index 0000000..1f1e444 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/file_util.py @@ -0,0 +1,249 @@ +"""distutils.file_util + +Utility functions for operating on single files. +""" + +import os +from distutils.errors import DistutilsFileError +from distutils import log + +# for generating verbose output in 'copy_file()' +_copy_action = {None: 'copying', 'hard': 'hard linking', 'sym': 'symbolically linking'} + + +def _copy_file_contents(src, dst, buffer_size=16 * 1024):  # noqa: C901 +    """Copy the file 'src' to 'dst'; both must be filenames.  Any error +    opening either file, reading from 'src', or writing to 'dst', raises +    DistutilsFileError.  Data is read/written in chunks of 'buffer_size' +    bytes (default 16k).  No attempt is made to handle anything apart from +    regular files. +    """ +    # Stolen from shutil module in the standard library, but with +    # custom error-handling added. +    fsrc = None +    fdst = None +    try: +        try: +            fsrc = open(src, 'rb') +        except OSError as e: +            raise DistutilsFileError("could not open '{}': {}".format(src, e.strerror)) + +        if os.path.exists(dst): +            try: +                os.unlink(dst) +            except OSError as e: +                raise DistutilsFileError( +                    "could not delete '{}': {}".format(dst, e.strerror) +                ) + +        try: +            fdst = open(dst, 'wb') +        except OSError as e: +            raise DistutilsFileError( +                "could not create '{}': {}".format(dst, e.strerror) +            ) + +        while True: +            try: +                buf = fsrc.read(buffer_size) +            except OSError as e: +                raise DistutilsFileError( +                    "could not read from '{}': {}".format(src, e.strerror) +                ) + +            if not buf: +                break + +            try: +                fdst.write(buf) +            except OSError as e: +                raise DistutilsFileError( +                    "could not write to '{}': {}".format(dst, e.strerror) +                ) +    finally: +        if fdst: +            fdst.close() +        if fsrc: +            fsrc.close() + + +def copy_file(  # noqa: C901 +    src, +    dst, +    preserve_mode=1, +    preserve_times=1, +    update=0, +    link=None, +    verbose=1, +    dry_run=0, +): +    """Copy a file 'src' to 'dst'.  If 'dst' is a directory, then 'src' is +    copied there with the same name; otherwise, it must be a filename.  (If +    the file exists, it will be ruthlessly clobbered.)  If 'preserve_mode' +    is true (the default), the file's mode (type and permission bits, or +    whatever is analogous on the current platform) is copied.  If +    'preserve_times' is true (the default), the last-modified and +    last-access times are copied as well.  If 'update' is true, 'src' will +    only be copied if 'dst' does not exist, or if 'dst' does exist but is +    older than 'src'. + +    'link' allows you to make hard links (os.link) or symbolic links +    (os.symlink) instead of copying: set it to "hard" or "sym"; if it is +    None (the default), files are copied.  Don't set 'link' on systems that +    don't support it: 'copy_file()' doesn't check if hard or symbolic +    linking is available. If hardlink fails, falls back to +    _copy_file_contents(). + +    Under Mac OS, uses the native file copy function in macostools; on +    other systems, uses '_copy_file_contents()' to copy file contents. + +    Return a tuple (dest_name, copied): 'dest_name' is the actual name of +    the output file, and 'copied' is true if the file was copied (or would +    have been copied, if 'dry_run' true). +    """ +    # XXX if the destination file already exists, we clobber it if +    # copying, but blow up if linking.  Hmmm.  And I don't know what +    # macostools.copyfile() does.  Should definitely be consistent, and +    # should probably blow up if destination exists and we would be +    # changing it (ie. it's not already a hard/soft link to src OR +    # (not update) and (src newer than dst). + +    from distutils.dep_util import newer +    from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE + +    if not os.path.isfile(src): +        raise DistutilsFileError( +            "can't copy '%s': doesn't exist or not a regular file" % src +        ) + +    if os.path.isdir(dst): +        dir = dst +        dst = os.path.join(dst, os.path.basename(src)) +    else: +        dir = os.path.dirname(dst) + +    if update and not newer(src, dst): +        if verbose >= 1: +            log.debug("not copying %s (output up-to-date)", src) +        return (dst, 0) + +    try: +        action = _copy_action[link] +    except KeyError: +        raise ValueError("invalid value '%s' for 'link' argument" % link) + +    if verbose >= 1: +        if os.path.basename(dst) == os.path.basename(src): +            log.info("%s %s -> %s", action, src, dir) +        else: +            log.info("%s %s -> %s", action, src, dst) + +    if dry_run: +        return (dst, 1) + +    # If linking (hard or symbolic), use the appropriate system call +    # (Unix only, of course, but that's the caller's responsibility) +    elif link == 'hard': +        if not (os.path.exists(dst) and os.path.samefile(src, dst)): +            try: +                os.link(src, dst) +                return (dst, 1) +            except OSError: +                # If hard linking fails, fall back on copying file +                # (some special filesystems don't support hard linking +                #  even under Unix, see issue #8876). +                pass +    elif link == 'sym': +        if not (os.path.exists(dst) and os.path.samefile(src, dst)): +            os.symlink(src, dst) +            return (dst, 1) + +    # Otherwise (non-Mac, not linking), copy the file contents and +    # (optionally) copy the times and mode. +    _copy_file_contents(src, dst) +    if preserve_mode or preserve_times: +        st = os.stat(src) + +        # According to David Ascher <da@ski.org>, utime() should be done +        # before chmod() (at least under NT). +        if preserve_times: +            os.utime(dst, (st[ST_ATIME], st[ST_MTIME])) +        if preserve_mode: +            os.chmod(dst, S_IMODE(st[ST_MODE])) + +    return (dst, 1) + + +# XXX I suspect this is Unix-specific -- need porting help! +def move_file(src, dst, verbose=1, dry_run=0):  # noqa: C901 + +    """Move a file 'src' to 'dst'.  If 'dst' is a directory, the file will +    be moved into it with the same name; otherwise, 'src' is just renamed +    to 'dst'.  Return the new full name of the file. + +    Handles cross-device moves on Unix using 'copy_file()'.  What about +    other systems??? +    """ +    from os.path import exists, isfile, isdir, basename, dirname +    import errno + +    if verbose >= 1: +        log.info("moving %s -> %s", src, dst) + +    if dry_run: +        return dst + +    if not isfile(src): +        raise DistutilsFileError("can't move '%s': not a regular file" % src) + +    if isdir(dst): +        dst = os.path.join(dst, basename(src)) +    elif exists(dst): +        raise DistutilsFileError( +            "can't move '{}': destination '{}' already exists".format(src, dst) +        ) + +    if not isdir(dirname(dst)): +        raise DistutilsFileError( +            "can't move '{}': destination '{}' not a valid path".format(src, dst) +        ) + +    copy_it = False +    try: +        os.rename(src, dst) +    except OSError as e: +        (num, msg) = e.args +        if num == errno.EXDEV: +            copy_it = True +        else: +            raise DistutilsFileError( +                "couldn't move '{}' to '{}': {}".format(src, dst, msg) +            ) + +    if copy_it: +        copy_file(src, dst, verbose=verbose) +        try: +            os.unlink(src) +        except OSError as e: +            (num, msg) = e.args +            try: +                os.unlink(dst) +            except OSError: +                pass +            raise DistutilsFileError( +                "couldn't move '%s' to '%s' by copy/delete: " +                "delete '%s' failed: %s" % (src, dst, src, msg) +            ) +    return dst + + +def write_file(filename, contents): +    """Create a file with the specified name and write 'contents' (a +    sequence of strings without line terminators) to it. +    """ +    f = open(filename, "w") +    try: +        for line in contents: +            f.write(line + "\n") +    finally: +        f.close() diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/filelist.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/filelist.py new file mode 100644 index 0000000..987931a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/filelist.py @@ -0,0 +1,371 @@ +"""distutils.filelist + +Provides the FileList class, used for poking about the filesystem +and building lists of files. +""" + +import os +import re +import fnmatch +import functools + +from distutils.util import convert_path +from distutils.errors import DistutilsTemplateError, DistutilsInternalError +from distutils import log + + +class FileList: +    """A list of files built by on exploring the filesystem and filtered by +    applying various patterns to what we find there. + +    Instance attributes: +      dir +        directory from which files will be taken -- only used if +        'allfiles' not supplied to constructor +      files +        list of filenames currently being built/filtered/manipulated +      allfiles +        complete list of files under consideration (ie. without any +        filtering applied) +    """ + +    def __init__(self, warn=None, debug_print=None): +        # ignore argument to FileList, but keep them for backwards +        # compatibility +        self.allfiles = None +        self.files = [] + +    def set_allfiles(self, allfiles): +        self.allfiles = allfiles + +    def findall(self, dir=os.curdir): +        self.allfiles = findall(dir) + +    def debug_print(self, msg): +        """Print 'msg' to stdout if the global DEBUG (taken from the +        DISTUTILS_DEBUG environment variable) flag is true. +        """ +        from distutils.debug import DEBUG + +        if DEBUG: +            print(msg) + +    # Collection methods + +    def append(self, item): +        self.files.append(item) + +    def extend(self, items): +        self.files.extend(items) + +    def sort(self): +        # Not a strict lexical sort! +        sortable_files = sorted(map(os.path.split, self.files)) +        self.files = [] +        for sort_tuple in sortable_files: +            self.files.append(os.path.join(*sort_tuple)) + +    # Other miscellaneous utility methods + +    def remove_duplicates(self): +        # Assumes list has been sorted! +        for i in range(len(self.files) - 1, 0, -1): +            if self.files[i] == self.files[i - 1]: +                del self.files[i] + +    # "File template" methods + +    def _parse_template_line(self, line): +        words = line.split() +        action = words[0] + +        patterns = dir = dir_pattern = None + +        if action in ('include', 'exclude', 'global-include', 'global-exclude'): +            if len(words) < 2: +                raise DistutilsTemplateError( +                    "'%s' expects <pattern1> <pattern2> ..." % action +                ) +            patterns = [convert_path(w) for w in words[1:]] +        elif action in ('recursive-include', 'recursive-exclude'): +            if len(words) < 3: +                raise DistutilsTemplateError( +                    "'%s' expects <dir> <pattern1> <pattern2> ..." % action +                ) +            dir = convert_path(words[1]) +            patterns = [convert_path(w) for w in words[2:]] +        elif action in ('graft', 'prune'): +            if len(words) != 2: +                raise DistutilsTemplateError( +                    "'%s' expects a single <dir_pattern>" % action +                ) +            dir_pattern = convert_path(words[1]) +        else: +            raise DistutilsTemplateError("unknown action '%s'" % action) + +        return (action, patterns, dir, dir_pattern) + +    def process_template_line(self, line):  # noqa: C901 +        # Parse the line: split it up, make sure the right number of words +        # is there, and return the relevant words.  'action' is always +        # defined: it's the first word of the line.  Which of the other +        # three are defined depends on the action; it'll be either +        # patterns, (dir and patterns), or (dir_pattern). +        (action, patterns, dir, dir_pattern) = self._parse_template_line(line) + +        # OK, now we know that the action is valid and we have the +        # right number of words on the line for that action -- so we +        # can proceed with minimal error-checking. +        if action == 'include': +            self.debug_print("include " + ' '.join(patterns)) +            for pattern in patterns: +                if not self.include_pattern(pattern, anchor=1): +                    log.warn("warning: no files found matching '%s'", pattern) + +        elif action == 'exclude': +            self.debug_print("exclude " + ' '.join(patterns)) +            for pattern in patterns: +                if not self.exclude_pattern(pattern, anchor=1): +                    log.warn( +                        ( +                            "warning: no previously-included files " +                            "found matching '%s'" +                        ), +                        pattern, +                    ) + +        elif action == 'global-include': +            self.debug_print("global-include " + ' '.join(patterns)) +            for pattern in patterns: +                if not self.include_pattern(pattern, anchor=0): +                    log.warn( +                        ( +                            "warning: no files found matching '%s' " +                            "anywhere in distribution" +                        ), +                        pattern, +                    ) + +        elif action == 'global-exclude': +            self.debug_print("global-exclude " + ' '.join(patterns)) +            for pattern in patterns: +                if not self.exclude_pattern(pattern, anchor=0): +                    log.warn( +                        ( +                            "warning: no previously-included files matching " +                            "'%s' found anywhere in distribution" +                        ), +                        pattern, +                    ) + +        elif action == 'recursive-include': +            self.debug_print("recursive-include {} {}".format(dir, ' '.join(patterns))) +            for pattern in patterns: +                if not self.include_pattern(pattern, prefix=dir): +                    msg = ( +                        "warning: no files found matching '%s' " "under directory '%s'" +                    ) +                    log.warn(msg, pattern, dir) + +        elif action == 'recursive-exclude': +            self.debug_print("recursive-exclude {} {}".format(dir, ' '.join(patterns))) +            for pattern in patterns: +                if not self.exclude_pattern(pattern, prefix=dir): +                    log.warn( +                        ( +                            "warning: no previously-included files matching " +                            "'%s' found under directory '%s'" +                        ), +                        pattern, +                        dir, +                    ) + +        elif action == 'graft': +            self.debug_print("graft " + dir_pattern) +            if not self.include_pattern(None, prefix=dir_pattern): +                log.warn("warning: no directories found matching '%s'", dir_pattern) + +        elif action == 'prune': +            self.debug_print("prune " + dir_pattern) +            if not self.exclude_pattern(None, prefix=dir_pattern): +                log.warn( +                    ("no previously-included directories found " "matching '%s'"), +                    dir_pattern, +                ) +        else: +            raise DistutilsInternalError( +                "this cannot happen: invalid action '%s'" % action +            ) + +    # Filtering/selection methods + +    def include_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): +        """Select strings (presumably filenames) from 'self.files' that +        match 'pattern', a Unix-style wildcard (glob) pattern.  Patterns +        are not quite the same as implemented by the 'fnmatch' module: '*' +        and '?'  match non-special characters, where "special" is platform- +        dependent: slash on Unix; colon, slash, and backslash on +        DOS/Windows; and colon on Mac OS. + +        If 'anchor' is true (the default), then the pattern match is more +        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If +        'anchor' is false, both of these will match. + +        If 'prefix' is supplied, then only filenames starting with 'prefix' +        (itself a pattern) and ending with 'pattern', with anything in between +        them, will match.  'anchor' is ignored in this case. + +        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and +        'pattern' is assumed to be either a string containing a regex or a +        regex object -- no translation is done, the regex is just compiled +        and used as-is. + +        Selected strings will be added to self.files. + +        Return True if files are found, False otherwise. +        """ +        # XXX docstring lying about what the special chars are? +        files_found = False +        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) +        self.debug_print("include_pattern: applying regex r'%s'" % pattern_re.pattern) + +        # delayed loading of allfiles list +        if self.allfiles is None: +            self.findall() + +        for name in self.allfiles: +            if pattern_re.search(name): +                self.debug_print(" adding " + name) +                self.files.append(name) +                files_found = True +        return files_found + +    def exclude_pattern(self, pattern, anchor=1, prefix=None, is_regex=0): +        """Remove strings (presumably filenames) from 'files' that match +        'pattern'.  Other parameters are the same as for +        'include_pattern()', above. +        The list 'self.files' is modified in place. +        Return True if files are found, False otherwise. +        """ +        files_found = False +        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex) +        self.debug_print("exclude_pattern: applying regex r'%s'" % pattern_re.pattern) +        for i in range(len(self.files) - 1, -1, -1): +            if pattern_re.search(self.files[i]): +                self.debug_print(" removing " + self.files[i]) +                del self.files[i] +                files_found = True +        return files_found + + +# Utility functions + + +def _find_all_simple(path): +    """ +    Find all files under 'path' +    """ +    all_unique = _UniqueDirs.filter(os.walk(path, followlinks=True)) +    results = ( +        os.path.join(base, file) for base, dirs, files in all_unique for file in files +    ) +    return filter(os.path.isfile, results) + + +class _UniqueDirs(set): +    """ +    Exclude previously-seen dirs from walk results, +    avoiding infinite recursion. +    Ref https://bugs.python.org/issue44497. +    """ + +    def __call__(self, walk_item): +        """ +        Given an item from an os.walk result, determine +        if the item represents a unique dir for this instance +        and if not, prevent further traversal. +        """ +        base, dirs, files = walk_item +        stat = os.stat(base) +        candidate = stat.st_dev, stat.st_ino +        found = candidate in self +        if found: +            del dirs[:] +        self.add(candidate) +        return not found + +    @classmethod +    def filter(cls, items): +        return filter(cls(), items) + + +def findall(dir=os.curdir): +    """ +    Find all files under 'dir' and return the list of full filenames. +    Unless dir is '.', return full filenames with dir prepended. +    """ +    files = _find_all_simple(dir) +    if dir == os.curdir: +        make_rel = functools.partial(os.path.relpath, start=dir) +        files = map(make_rel, files) +    return list(files) + + +def glob_to_re(pattern): +    """Translate a shell-like glob pattern to a regular expression; return +    a string containing the regex.  Differs from 'fnmatch.translate()' in +    that '*' does not match "special characters" (which are +    platform-specific). +    """ +    pattern_re = fnmatch.translate(pattern) + +    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which +    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, +    # and by extension they shouldn't match such "special characters" under +    # any OS.  So change all non-escaped dots in the RE to match any +    # character except the special characters (currently: just os.sep). +    sep = os.sep +    if os.sep == '\\': +        # we're using a regex to manipulate a regex, so we need +        # to escape the backslash twice +        sep = r'\\\\' +    escaped = r'\1[^%s]' % sep +    pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re) +    return pattern_re + + +def translate_pattern(pattern, anchor=1, prefix=None, is_regex=0): +    """Translate a shell-like wildcard pattern to a compiled regular +    expression.  Return the compiled regex.  If 'is_regex' true, +    then 'pattern' is directly compiled to a regex (if it's a string) +    or just returned as-is (assumes it's a regex object). +    """ +    if is_regex: +        if isinstance(pattern, str): +            return re.compile(pattern) +        else: +            return pattern + +    # ditch start and end characters +    start, _, end = glob_to_re('_').partition('_') + +    if pattern: +        pattern_re = glob_to_re(pattern) +        assert pattern_re.startswith(start) and pattern_re.endswith(end) +    else: +        pattern_re = '' + +    if prefix is not None: +        prefix_re = glob_to_re(prefix) +        assert prefix_re.startswith(start) and prefix_re.endswith(end) +        prefix_re = prefix_re[len(start) : len(prefix_re) - len(end)] +        sep = os.sep +        if os.sep == '\\': +            sep = r'\\' +        pattern_re = pattern_re[len(start) : len(pattern_re) - len(end)] +        pattern_re = r'{}\A{}{}.*{}{}'.format(start, prefix_re, sep, pattern_re, end) +    else:  # no prefix -- respect anchor flag +        if anchor: +            pattern_re = r'{}\A{}'.format(start, pattern_re[len(start) :]) + +    return re.compile(pattern_re) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/log.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/log.py new file mode 100644 index 0000000..be25f6c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/log.py @@ -0,0 +1,80 @@ +"""A simple log mechanism styled after PEP 282.""" + +# The class here is styled after PEP 282 so that it could later be +# replaced with a standard Python logging implementation. + +import sys + +DEBUG = 1 +INFO = 2 +WARN = 3 +ERROR = 4 +FATAL = 5 + + +class Log: +    def __init__(self, threshold=WARN): +        self.threshold = threshold + +    def _log(self, level, msg, args): +        if level not in (DEBUG, INFO, WARN, ERROR, FATAL): +            raise ValueError('%s wrong log level' % str(level)) + +        if level >= self.threshold: +            if args: +                msg = msg % args +            if level in (WARN, ERROR, FATAL): +                stream = sys.stderr +            else: +                stream = sys.stdout +            try: +                stream.write('%s\n' % msg) +            except UnicodeEncodeError: +                # emulate backslashreplace error handler +                encoding = stream.encoding +                msg = msg.encode(encoding, "backslashreplace").decode(encoding) +                stream.write('%s\n' % msg) +            stream.flush() + +    def log(self, level, msg, *args): +        self._log(level, msg, args) + +    def debug(self, msg, *args): +        self._log(DEBUG, msg, args) + +    def info(self, msg, *args): +        self._log(INFO, msg, args) + +    def warn(self, msg, *args): +        self._log(WARN, msg, args) + +    def error(self, msg, *args): +        self._log(ERROR, msg, args) + +    def fatal(self, msg, *args): +        self._log(FATAL, msg, args) + + +_global_log = Log() +log = _global_log.log +debug = _global_log.debug +info = _global_log.info +warn = _global_log.warn +error = _global_log.error +fatal = _global_log.fatal + + +def set_threshold(level): +    # return the old threshold for use from tests +    old = _global_log.threshold +    _global_log.threshold = level +    return old + + +def set_verbosity(v): +    if v <= 0: +        set_threshold(WARN) +    elif v == 1: +        set_threshold(INFO) +    elif v >= 2: +        set_threshold(DEBUG) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/msvc9compiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/msvc9compiler.py new file mode 100644 index 0000000..2202183 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/msvc9compiler.py @@ -0,0 +1,832 @@ +"""distutils.msvc9compiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio 2008. + +The module is compatible with VS 2005 and VS 2008. You can find legacy support +for older versions of VS in distutils.msvccompiler. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +#   finding DevStudio (through the registry) +# ported to VS2005 and VS 2008 by Christian Heimes + +import os +import subprocess +import sys +import re +import warnings + +from distutils.errors import ( +    DistutilsExecError, +    DistutilsPlatformError, +    CompileError, +    LibError, +    LinkError, +) +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log +from distutils.util import get_platform + +import winreg + +warnings.warn( +    "msvc9compiler is deprecated and slated to be removed " +    "in the future. Please discontinue use or file an issue " +    "with pypa/distutils describing your use case.", +    DeprecationWarning, +) + +RegOpenKeyEx = winreg.OpenKeyEx +RegEnumKey = winreg.EnumKey +RegEnumValue = winreg.EnumValue +RegError = winreg.error + +HKEYS = ( +    winreg.HKEY_USERS, +    winreg.HKEY_CURRENT_USER, +    winreg.HKEY_LOCAL_MACHINE, +    winreg.HKEY_CLASSES_ROOT, +) + +NATIVE_WIN64 = sys.platform == 'win32' and sys.maxsize > 2**32 +if NATIVE_WIN64: +    # Visual C++ is a 32-bit application, so we need to look in +    # the corresponding registry branch, if we're running a +    # 64-bit Python on Win64 +    VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f" +    WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows" +    NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework" +else: +    VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f" +    WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows" +    NET_BASE = r"Software\Microsoft\.NETFramework" + +# A map keyed by get_platform() return values to values accepted by +# 'vcvarsall.bat'.  Note a cross-compile may combine these (eg, 'x86_amd64' is +# the param to cross-compile on x86 targeting amd64.) +PLAT_TO_VCVARS = { +    'win32': 'x86', +    'win-amd64': 'amd64', +} + + +class Reg: +    """Helper class to read values from the registry""" + +    def get_value(cls, path, key): +        for base in HKEYS: +            d = cls.read_values(base, path) +            if d and key in d: +                return d[key] +        raise KeyError(key) + +    get_value = classmethod(get_value) + +    def read_keys(cls, base, key): +        """Return list of registry keys.""" +        try: +            handle = RegOpenKeyEx(base, key) +        except RegError: +            return None +        L = [] +        i = 0 +        while True: +            try: +                k = RegEnumKey(handle, i) +            except RegError: +                break +            L.append(k) +            i += 1 +        return L + +    read_keys = classmethod(read_keys) + +    def read_values(cls, base, key): +        """Return dict of registry keys and values. + +        All names are converted to lowercase. +        """ +        try: +            handle = RegOpenKeyEx(base, key) +        except RegError: +            return None +        d = {} +        i = 0 +        while True: +            try: +                name, value, type = RegEnumValue(handle, i) +            except RegError: +                break +            name = name.lower() +            d[cls.convert_mbcs(name)] = cls.convert_mbcs(value) +            i += 1 +        return d + +    read_values = classmethod(read_values) + +    def convert_mbcs(s): +        dec = getattr(s, "decode", None) +        if dec is not None: +            try: +                s = dec("mbcs") +            except UnicodeError: +                pass +        return s + +    convert_mbcs = staticmethod(convert_mbcs) + + +class MacroExpander: +    def __init__(self, version): +        self.macros = {} +        self.vsbase = VS_BASE % version +        self.load_macros(version) + +    def set_macro(self, macro, path, key): +        self.macros["$(%s)" % macro] = Reg.get_value(path, key) + +    def load_macros(self, version): +        self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir") +        self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir") +        self.set_macro("FrameworkDir", NET_BASE, "installroot") +        try: +            if version >= 8.0: +                self.set_macro("FrameworkSDKDir", NET_BASE, "sdkinstallrootv2.0") +            else: +                raise KeyError("sdkinstallrootv2.0") +        except KeyError: +            raise DistutilsPlatformError( +                """Python was built with Visual Studio 2008; +extensions must be built with a compiler than can generate compatible binaries. +Visual Studio 2008 was not found on this system. If you have Cygwin installed, +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" +            ) + +        if version >= 9.0: +            self.set_macro("FrameworkVersion", self.vsbase, "clr version") +            self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder") +        else: +            p = r"Software\Microsoft\NET Framework Setup\Product" +            for base in HKEYS: +                try: +                    h = RegOpenKeyEx(base, p) +                except RegError: +                    continue +                key = RegEnumKey(h, 0) +                d = Reg.get_value(base, r"{}\{}".format(p, key)) +                self.macros["$(FrameworkVersion)"] = d["version"] + +    def sub(self, s): +        for k, v in self.macros.items(): +            s = s.replace(k, v) +        return s + + +def get_build_version(): +    """Return the version of MSVC that was used to build Python. + +    For Python 2.3 and up, the version number is included in +    sys.version.  For earlier versions, assume the compiler is MSVC 6. +    """ +    prefix = "MSC v." +    i = sys.version.find(prefix) +    if i == -1: +        return 6 +    i = i + len(prefix) +    s, rest = sys.version[i:].split(" ", 1) +    majorVersion = int(s[:-2]) - 6 +    if majorVersion >= 13: +        # v13 was skipped and should be v14 +        majorVersion += 1 +    minorVersion = int(s[2:3]) / 10.0 +    # I don't think paths are affected by minor version in version 6 +    if majorVersion == 6: +        minorVersion = 0 +    if majorVersion >= 6: +        return majorVersion + minorVersion +    # else we don't know what version of the compiler this is +    return None + + +def normalize_and_reduce_paths(paths): +    """Return a list of normalized paths with duplicates removed. + +    The current order of paths is maintained. +    """ +    # Paths are normalized so things like:  /a and /a/ aren't both preserved. +    reduced_paths = [] +    for p in paths: +        np = os.path.normpath(p) +        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. +        if np not in reduced_paths: +            reduced_paths.append(np) +    return reduced_paths + + +def removeDuplicates(variable): +    """Remove duplicate values of an environment variable.""" +    oldList = variable.split(os.pathsep) +    newList = [] +    for i in oldList: +        if i not in newList: +            newList.append(i) +    newVariable = os.pathsep.join(newList) +    return newVariable + + +def find_vcvarsall(version): +    """Find the vcvarsall.bat file + +    At first it tries to find the productdir of VS 2008 in the registry. If +    that fails it falls back to the VS90COMNTOOLS env var. +    """ +    vsbase = VS_BASE % version +    try: +        productdir = Reg.get_value(r"%s\Setup\VC" % vsbase, "productdir") +    except KeyError: +        log.debug("Unable to find productdir in registry") +        productdir = None + +    if not productdir or not os.path.isdir(productdir): +        toolskey = "VS%0.f0COMNTOOLS" % version +        toolsdir = os.environ.get(toolskey, None) + +        if toolsdir and os.path.isdir(toolsdir): +            productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC") +            productdir = os.path.abspath(productdir) +            if not os.path.isdir(productdir): +                log.debug("%s is not a valid directory" % productdir) +                return None +        else: +            log.debug("Env var %s is not set or invalid" % toolskey) +    if not productdir: +        log.debug("No productdir found") +        return None +    vcvarsall = os.path.join(productdir, "vcvarsall.bat") +    if os.path.isfile(vcvarsall): +        return vcvarsall +    log.debug("Unable to find vcvarsall.bat") +    return None + + +def query_vcvarsall(version, arch="x86"): +    """Launch vcvarsall.bat and read the settings from its environment""" +    vcvarsall = find_vcvarsall(version) +    interesting = {"include", "lib", "libpath", "path"} +    result = {} + +    if vcvarsall is None: +        raise DistutilsPlatformError("Unable to find vcvarsall.bat") +    log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version) +    popen = subprocess.Popen( +        '"{}" {} & set'.format(vcvarsall, arch), +        stdout=subprocess.PIPE, +        stderr=subprocess.PIPE, +    ) +    try: +        stdout, stderr = popen.communicate() +        if popen.wait() != 0: +            raise DistutilsPlatformError(stderr.decode("mbcs")) + +        stdout = stdout.decode("mbcs") +        for line in stdout.split("\n"): +            line = Reg.convert_mbcs(line) +            if '=' not in line: +                continue +            line = line.strip() +            key, value = line.split('=', 1) +            key = key.lower() +            if key in interesting: +                if value.endswith(os.pathsep): +                    value = value[:-1] +                result[key] = removeDuplicates(value) + +    finally: +        popen.stdout.close() +        popen.stderr.close() + +    if len(result) != len(interesting): +        raise ValueError(str(list(result.keys()))) + +    return result + + +# More globals +VERSION = get_build_version() +# MACROS = MacroExpander(VERSION) + + +class MSVCCompiler(CCompiler): +    """Concrete class that implements an interface to Microsoft Visual C++, +    as defined by the CCompiler abstract class.""" + +    compiler_type = 'msvc' + +    # Just set this so CCompiler's constructor doesn't barf.  We currently +    # don't use the 'set_executables()' bureaucracy provided by CCompiler, +    # as it really isn't necessary for this sort of single-compiler class. +    # Would be nice to have a consistent interface with UnixCCompiler, +    # though, so it's worth thinking about. +    executables = {} + +    # Private class data (need to distinguish C from C++ source for compiler) +    _c_extensions = ['.c'] +    _cpp_extensions = ['.cc', '.cpp', '.cxx'] +    _rc_extensions = ['.rc'] +    _mc_extensions = ['.mc'] + +    # Needed for the filename generation methods provided by the +    # base class, CCompiler. +    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions +    res_extension = '.res' +    obj_extension = '.obj' +    static_lib_extension = '.lib' +    shared_lib_extension = '.dll' +    static_lib_format = shared_lib_format = '%s%s' +    exe_extension = '.exe' + +    def __init__(self, verbose=0, dry_run=0, force=0): +        super().__init__(verbose, dry_run, force) +        self.__version = VERSION +        self.__root = r"Software\Microsoft\VisualStudio" +        # self.__macros = MACROS +        self.__paths = [] +        # target platform (.plat_name is consistent with 'bdist') +        self.plat_name = None +        self.__arch = None  # deprecated name +        self.initialized = False + +    def initialize(self, plat_name=None):  # noqa: C901 +        # multi-init means we would need to check platform same each time... +        assert not self.initialized, "don't init multiple times" +        if self.__version < 8.0: +            raise DistutilsPlatformError( +                "VC %0.1f is not supported by this module" % self.__version +            ) +        if plat_name is None: +            plat_name = get_platform() +        # sanity check for platforms to prevent obscure errors later. +        ok_plats = 'win32', 'win-amd64' +        if plat_name not in ok_plats: +            raise DistutilsPlatformError( +                "--plat-name must be one of {}".format(ok_plats) +            ) + +        if ( +            "DISTUTILS_USE_SDK" in os.environ +            and "MSSdk" in os.environ +            and self.find_exe("cl.exe") +        ): +            # Assume that the SDK set up everything alright; don't try to be +            # smarter +            self.cc = "cl.exe" +            self.linker = "link.exe" +            self.lib = "lib.exe" +            self.rc = "rc.exe" +            self.mc = "mc.exe" +        else: +            # On x86, 'vcvars32.bat amd64' creates an env that doesn't work; +            # to cross compile, you use 'x86_amd64'. +            # On AMD64, 'vcvars32.bat amd64' is a native build env; to cross +            # compile use 'x86' (ie, it runs the x86 compiler directly) +            if plat_name == get_platform() or plat_name == 'win32': +                # native build or cross-compile to win32 +                plat_spec = PLAT_TO_VCVARS[plat_name] +            else: +                # cross compile from win32 -> some 64bit +                plat_spec = ( +                    PLAT_TO_VCVARS[get_platform()] + '_' + PLAT_TO_VCVARS[plat_name] +                ) + +            vc_env = query_vcvarsall(VERSION, plat_spec) + +            self.__paths = vc_env['path'].split(os.pathsep) +            os.environ['lib'] = vc_env['lib'] +            os.environ['include'] = vc_env['include'] + +            if len(self.__paths) == 0: +                raise DistutilsPlatformError( +                    "Python was built with %s, " +                    "and extensions need to be built with the same " +                    "version of the compiler, but it isn't installed." % self.__product +                ) + +            self.cc = self.find_exe("cl.exe") +            self.linker = self.find_exe("link.exe") +            self.lib = self.find_exe("lib.exe") +            self.rc = self.find_exe("rc.exe")  # resource compiler +            self.mc = self.find_exe("mc.exe")  # message compiler +            # self.set_path_env_var('lib') +            # self.set_path_env_var('include') + +        # extend the MSVC path with the current path +        try: +            for p in os.environ['path'].split(';'): +                self.__paths.append(p) +        except KeyError: +            pass +        self.__paths = normalize_and_reduce_paths(self.__paths) +        os.environ['path'] = ";".join(self.__paths) + +        self.preprocess_options = None +        if self.__arch == "x86": +            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/DNDEBUG'] +            self.compile_options_debug = [ +                '/nologo', +                '/Od', +                '/MDd', +                '/W3', +                '/Z7', +                '/D_DEBUG', +            ] +        else: +            # Win64 +            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] +            self.compile_options_debug = [ +                '/nologo', +                '/Od', +                '/MDd', +                '/W3', +                '/GS-', +                '/Z7', +                '/D_DEBUG', +            ] + +        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] +        if self.__version >= 7: +            self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] +        self.ldflags_static = ['/nologo'] + +        self.initialized = True + +    # -- Worker methods ------------------------------------------------ + +    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): +        # Copied from ccompiler.py, extended to return .res as 'object'-file +        # for .rc input file +        if output_dir is None: +            output_dir = '' +        obj_names = [] +        for src_name in source_filenames: +            (base, ext) = os.path.splitext(src_name) +            base = os.path.splitdrive(base)[1]  # Chop off the drive +            base = base[os.path.isabs(base) :]  # If abs, chop off leading / +            if ext not in self.src_extensions: +                # Better to raise an exception instead of silently continuing +                # and later complain about sources and targets having +                # different lengths +                raise CompileError("Don't know how to compile %s" % src_name) +            if strip_dir: +                base = os.path.basename(base) +            if ext in self._rc_extensions: +                obj_names.append(os.path.join(output_dir, base + self.res_extension)) +            elif ext in self._mc_extensions: +                obj_names.append(os.path.join(output_dir, base + self.res_extension)) +            else: +                obj_names.append(os.path.join(output_dir, base + self.obj_extension)) +        return obj_names + +    def compile(  # noqa: C901 +        self, +        sources, +        output_dir=None, +        macros=None, +        include_dirs=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        depends=None, +    ): + +        if not self.initialized: +            self.initialize() +        compile_info = self._setup_compile( +            output_dir, macros, include_dirs, sources, depends, extra_postargs +        ) +        macros, objects, extra_postargs, pp_opts, build = compile_info + +        compile_opts = extra_preargs or [] +        compile_opts.append('/c') +        if debug: +            compile_opts.extend(self.compile_options_debug) +        else: +            compile_opts.extend(self.compile_options) + +        for obj in objects: +            try: +                src, ext = build[obj] +            except KeyError: +                continue +            if debug: +                # pass the full pathname to MSVC in debug mode, +                # this allows the debugger to find the source file +                # without asking the user to browse for it +                src = os.path.abspath(src) + +            if ext in self._c_extensions: +                input_opt = "/Tc" + src +            elif ext in self._cpp_extensions: +                input_opt = "/Tp" + src +            elif ext in self._rc_extensions: +                # compile .RC to .RES file +                input_opt = src +                output_opt = "/fo" + obj +                try: +                    self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue +            elif ext in self._mc_extensions: +                # Compile .MC to .RC file to .RES file. +                #   * '-h dir' specifies the directory for the +                #     generated include file +                #   * '-r dir' specifies the target directory of the +                #     generated RC file and the binary message resource +                #     it includes +                # +                # For now (since there are no options to change this), +                # we use the source-directory for the include file and +                # the build directory for the RC file and message +                # resources. This works at least for win32all. +                h_dir = os.path.dirname(src) +                rc_dir = os.path.dirname(obj) +                try: +                    # first compile .MC to .RC and .H file +                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) +                    base, _ = os.path.splitext(os.path.basename(src)) +                    rc_file = os.path.join(rc_dir, base + '.rc') +                    # then compile .RC to .RES file +                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) + +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue +            else: +                # how to handle this file? +                raise CompileError( +                    "Don't know how to compile {} to {}".format(src, obj) +                ) + +            output_opt = "/Fo" + obj +            try: +                self.spawn( +                    [self.cc] +                    + compile_opts +                    + pp_opts +                    + [input_opt, output_opt] +                    + extra_postargs +                ) +            except DistutilsExecError as msg: +                raise CompileError(msg) + +        return objects + +    def create_static_lib( +        self, objects, output_libname, output_dir=None, debug=0, target_lang=None +    ): + +        if not self.initialized: +            self.initialize() +        (objects, output_dir) = self._fix_object_args(objects, output_dir) +        output_filename = self.library_filename(output_libname, output_dir=output_dir) + +        if self._need_link(objects, output_filename): +            lib_args = objects + ['/OUT:' + output_filename] +            if debug: +                pass  # XXX what goes here? +            try: +                self.spawn([self.lib] + lib_args) +            except DistutilsExecError as msg: +                raise LibError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    def link(  # noqa: C901 +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): + +        if not self.initialized: +            self.initialize() +        (objects, output_dir) = self._fix_object_args(objects, output_dir) +        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) +        (libraries, library_dirs, runtime_library_dirs) = fixed_args + +        if runtime_library_dirs: +            self.warn( +                "I don't know what to do with 'runtime_library_dirs': " +                + str(runtime_library_dirs) +            ) + +        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) +        if output_dir is not None: +            output_filename = os.path.join(output_dir, output_filename) + +        if self._need_link(objects, output_filename): +            if target_desc == CCompiler.EXECUTABLE: +                if debug: +                    ldflags = self.ldflags_shared_debug[1:] +                else: +                    ldflags = self.ldflags_shared[1:] +            else: +                if debug: +                    ldflags = self.ldflags_shared_debug +                else: +                    ldflags = self.ldflags_shared + +            export_opts = [] +            for sym in export_symbols or []: +                export_opts.append("/EXPORT:" + sym) + +            ld_args = ( +                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] +            ) + +            # The MSVC linker generates .lib and .exp files, which cannot be +            # suppressed by any linker switches. The .lib files may even be +            # needed! Make sure they are generated in the temporary build +            # directory. Since they have different names for debug and release +            # builds, they can go into the same directory. +            build_temp = os.path.dirname(objects[0]) +            if export_symbols is not None: +                (dll_name, dll_ext) = os.path.splitext( +                    os.path.basename(output_filename) +                ) +                implib_file = os.path.join(build_temp, self.library_filename(dll_name)) +                ld_args.append('/IMPLIB:' + implib_file) + +            self.manifest_setup_ldargs(output_filename, build_temp, ld_args) + +            if extra_preargs: +                ld_args[:0] = extra_preargs +            if extra_postargs: +                ld_args.extend(extra_postargs) + +            self.mkpath(os.path.dirname(output_filename)) +            try: +                self.spawn([self.linker] + ld_args) +            except DistutilsExecError as msg: +                raise LinkError(msg) + +            # embed the manifest +            # XXX - this is somewhat fragile - if mt.exe fails, distutils +            # will still consider the DLL up-to-date, but it will not have a +            # manifest.  Maybe we should link to a temp file?  OTOH, that +            # implies a build environment error that shouldn't go undetected. +            mfinfo = self.manifest_get_embed_info(target_desc, ld_args) +            if mfinfo is not None: +                mffilename, mfid = mfinfo +                out_arg = '-outputresource:{};{}'.format(output_filename, mfid) +                try: +                    self.spawn(['mt.exe', '-nologo', '-manifest', mffilename, out_arg]) +                except DistutilsExecError as msg: +                    raise LinkError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    def manifest_setup_ldargs(self, output_filename, build_temp, ld_args): +        # If we need a manifest at all, an embedded manifest is recommended. +        # See MSDN article titled +        # "How to: Embed a Manifest Inside a C/C++ Application" +        # (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx) +        # Ask the linker to generate the manifest in the temp dir, so +        # we can check it, and possibly embed it, later. +        temp_manifest = os.path.join( +            build_temp, os.path.basename(output_filename) + ".manifest" +        ) +        ld_args.append('/MANIFESTFILE:' + temp_manifest) + +    def manifest_get_embed_info(self, target_desc, ld_args): +        # If a manifest should be embedded, return a tuple of +        # (manifest_filename, resource_id).  Returns None if no manifest +        # should be embedded.  See http://bugs.python.org/issue7833 for why +        # we want to avoid any manifest for extension modules if we can) +        for arg in ld_args: +            if arg.startswith("/MANIFESTFILE:"): +                temp_manifest = arg.split(":", 1)[1] +                break +        else: +            # no /MANIFESTFILE so nothing to do. +            return None +        if target_desc == CCompiler.EXECUTABLE: +            # by default, executables always get the manifest with the +            # CRT referenced. +            mfid = 1 +        else: +            # Extension modules try and avoid any manifest if possible. +            mfid = 2 +            temp_manifest = self._remove_visual_c_ref(temp_manifest) +        if temp_manifest is None: +            return None +        return temp_manifest, mfid + +    def _remove_visual_c_ref(self, manifest_file): +        try: +            # Remove references to the Visual C runtime, so they will +            # fall through to the Visual C dependency of Python.exe. +            # This way, when installed for a restricted user (e.g. +            # runtimes are not in WinSxS folder, but in Python's own +            # folder), the runtimes do not need to be in every folder +            # with .pyd's. +            # Returns either the filename of the modified manifest or +            # None if no manifest should be embedded. +            manifest_f = open(manifest_file) +            try: +                manifest_buf = manifest_f.read() +            finally: +                manifest_f.close() +            pattern = re.compile( +                r"""<assemblyIdentity.*?name=("|')Microsoft\.""" +                r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""", +                re.DOTALL, +            ) +            manifest_buf = re.sub(pattern, "", manifest_buf) +            pattern = r"<dependentAssembly>\s*</dependentAssembly>" +            manifest_buf = re.sub(pattern, "", manifest_buf) +            # Now see if any other assemblies are referenced - if not, we +            # don't want a manifest embedded. +            pattern = re.compile( +                r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')""" +                r""".*?(?:/>|</assemblyIdentity>)""", +                re.DOTALL, +            ) +            if re.search(pattern, manifest_buf) is None: +                return None + +            manifest_f = open(manifest_file, 'w') +            try: +                manifest_f.write(manifest_buf) +                return manifest_file +            finally: +                manifest_f.close() +        except OSError: +            pass + +    # -- Miscellaneous methods ----------------------------------------- +    # These are all used by the 'gen_lib_options() function, in +    # ccompiler.py. + +    def library_dir_option(self, dir): +        return "/LIBPATH:" + dir + +    def runtime_library_dir_option(self, dir): +        raise DistutilsPlatformError( +            "don't know how to set runtime library search path for MSVC++" +        ) + +    def library_option(self, lib): +        return self.library_filename(lib) + +    def find_library_file(self, dirs, lib, debug=0): +        # Prefer a debugging library if found (and requested), but deal +        # with it if we don't have one. +        if debug: +            try_names = [lib + "_d", lib] +        else: +            try_names = [lib] +        for dir in dirs: +            for name in try_names: +                libfile = os.path.join(dir, self.library_filename(name)) +                if os.path.exists(libfile): +                    return libfile +        else: +            # Oops, didn't find it in *any* of 'dirs' +            return None + +    # Helper methods for using the MSVC registry settings + +    def find_exe(self, exe): +        """Return path to an MSVC executable program. + +        Tries to find the program in several places: first, one of the +        MSVC program search paths from the registry; next, the directories +        in the PATH environment variable.  If any of those work, return an +        absolute path that is known to exist.  If none of them work, just +        return the original program name, 'exe'. +        """ +        for p in self.__paths: +            fn = os.path.join(os.path.abspath(p), exe) +            if os.path.isfile(fn): +                return fn + +        # didn't find it; try existing path +        for p in os.environ['Path'].split(';'): +            fn = os.path.join(os.path.abspath(p), exe) +            if os.path.isfile(fn): +                return fn + +        return exe diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/msvccompiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/msvccompiler.py new file mode 100644 index 0000000..1069e99 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/msvccompiler.py @@ -0,0 +1,695 @@ +"""distutils.msvccompiler + +Contains MSVCCompiler, an implementation of the abstract CCompiler class +for the Microsoft Visual Studio. +""" + +# Written by Perry Stoll +# hacked by Robin Becker and Thomas Heller to do a better job of +#   finding DevStudio (through the registry) + +import sys +import os +import warnings +from distutils.errors import ( +    DistutilsExecError, +    DistutilsPlatformError, +    CompileError, +    LibError, +    LinkError, +) +from distutils.ccompiler import CCompiler, gen_lib_options +from distutils import log + +_can_read_reg = False +try: +    import winreg + +    _can_read_reg = True +    hkey_mod = winreg + +    RegOpenKeyEx = winreg.OpenKeyEx +    RegEnumKey = winreg.EnumKey +    RegEnumValue = winreg.EnumValue +    RegError = winreg.error + +except ImportError: +    try: +        import win32api +        import win32con + +        _can_read_reg = True +        hkey_mod = win32con + +        RegOpenKeyEx = win32api.RegOpenKeyEx +        RegEnumKey = win32api.RegEnumKey +        RegEnumValue = win32api.RegEnumValue +        RegError = win32api.error +    except ImportError: +        log.info( +            "Warning: Can't read registry to find the " +            "necessary compiler setting\n" +            "Make sure that Python modules winreg, " +            "win32api or win32con are installed." +        ) +        pass + +if _can_read_reg: +    HKEYS = ( +        hkey_mod.HKEY_USERS, +        hkey_mod.HKEY_CURRENT_USER, +        hkey_mod.HKEY_LOCAL_MACHINE, +        hkey_mod.HKEY_CLASSES_ROOT, +    ) + + +warnings.warn( +    "msvccompiler is deprecated and slated to be removed " +    "in the future. Please discontinue use or file an issue " +    "with pypa/distutils describing your use case.", +    DeprecationWarning, +) + + +def read_keys(base, key): +    """Return list of registry keys.""" +    try: +        handle = RegOpenKeyEx(base, key) +    except RegError: +        return None +    L = [] +    i = 0 +    while True: +        try: +            k = RegEnumKey(handle, i) +        except RegError: +            break +        L.append(k) +        i += 1 +    return L + + +def read_values(base, key): +    """Return dict of registry keys and values. + +    All names are converted to lowercase. +    """ +    try: +        handle = RegOpenKeyEx(base, key) +    except RegError: +        return None +    d = {} +    i = 0 +    while True: +        try: +            name, value, type = RegEnumValue(handle, i) +        except RegError: +            break +        name = name.lower() +        d[convert_mbcs(name)] = convert_mbcs(value) +        i += 1 +    return d + + +def convert_mbcs(s): +    dec = getattr(s, "decode", None) +    if dec is not None: +        try: +            s = dec("mbcs") +        except UnicodeError: +            pass +    return s + + +class MacroExpander: +    def __init__(self, version): +        self.macros = {} +        self.load_macros(version) + +    def set_macro(self, macro, path, key): +        for base in HKEYS: +            d = read_values(base, path) +            if d: +                self.macros["$(%s)" % macro] = d[key] +                break + +    def load_macros(self, version): +        vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version +        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir") +        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir") +        net = r"Software\Microsoft\.NETFramework" +        self.set_macro("FrameworkDir", net, "installroot") +        try: +            if version > 7.0: +                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1") +            else: +                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot") +        except KeyError: +            raise DistutilsPlatformError( +                """Python was built with Visual Studio 2003; +extensions must be built with a compiler than can generate compatible binaries. +Visual Studio 2003 was not found on this system. If you have Cygwin installed, +you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""" +            ) + +        p = r"Software\Microsoft\NET Framework Setup\Product" +        for base in HKEYS: +            try: +                h = RegOpenKeyEx(base, p) +            except RegError: +                continue +            key = RegEnumKey(h, 0) +            d = read_values(base, r"{}\{}".format(p, key)) +            self.macros["$(FrameworkVersion)"] = d["version"] + +    def sub(self, s): +        for k, v in self.macros.items(): +            s = s.replace(k, v) +        return s + + +def get_build_version(): +    """Return the version of MSVC that was used to build Python. + +    For Python 2.3 and up, the version number is included in +    sys.version.  For earlier versions, assume the compiler is MSVC 6. +    """ +    prefix = "MSC v." +    i = sys.version.find(prefix) +    if i == -1: +        return 6 +    i = i + len(prefix) +    s, rest = sys.version[i:].split(" ", 1) +    majorVersion = int(s[:-2]) - 6 +    if majorVersion >= 13: +        # v13 was skipped and should be v14 +        majorVersion += 1 +    minorVersion = int(s[2:3]) / 10.0 +    # I don't think paths are affected by minor version in version 6 +    if majorVersion == 6: +        minorVersion = 0 +    if majorVersion >= 6: +        return majorVersion + minorVersion +    # else we don't know what version of the compiler this is +    return None + + +def get_build_architecture(): +    """Return the processor architecture. + +    Possible results are "Intel" or "AMD64". +    """ + +    prefix = " bit (" +    i = sys.version.find(prefix) +    if i == -1: +        return "Intel" +    j = sys.version.find(")", i) +    return sys.version[i + len(prefix) : j] + + +def normalize_and_reduce_paths(paths): +    """Return a list of normalized paths with duplicates removed. + +    The current order of paths is maintained. +    """ +    # Paths are normalized so things like:  /a and /a/ aren't both preserved. +    reduced_paths = [] +    for p in paths: +        np = os.path.normpath(p) +        # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. +        if np not in reduced_paths: +            reduced_paths.append(np) +    return reduced_paths + + +class MSVCCompiler(CCompiler): +    """Concrete class that implements an interface to Microsoft Visual C++, +    as defined by the CCompiler abstract class.""" + +    compiler_type = 'msvc' + +    # Just set this so CCompiler's constructor doesn't barf.  We currently +    # don't use the 'set_executables()' bureaucracy provided by CCompiler, +    # as it really isn't necessary for this sort of single-compiler class. +    # Would be nice to have a consistent interface with UnixCCompiler, +    # though, so it's worth thinking about. +    executables = {} + +    # Private class data (need to distinguish C from C++ source for compiler) +    _c_extensions = ['.c'] +    _cpp_extensions = ['.cc', '.cpp', '.cxx'] +    _rc_extensions = ['.rc'] +    _mc_extensions = ['.mc'] + +    # Needed for the filename generation methods provided by the +    # base class, CCompiler. +    src_extensions = _c_extensions + _cpp_extensions + _rc_extensions + _mc_extensions +    res_extension = '.res' +    obj_extension = '.obj' +    static_lib_extension = '.lib' +    shared_lib_extension = '.dll' +    static_lib_format = shared_lib_format = '%s%s' +    exe_extension = '.exe' + +    def __init__(self, verbose=0, dry_run=0, force=0): +        super().__init__(verbose, dry_run, force) +        self.__version = get_build_version() +        self.__arch = get_build_architecture() +        if self.__arch == "Intel": +            # x86 +            if self.__version >= 7: +                self.__root = r"Software\Microsoft\VisualStudio" +                self.__macros = MacroExpander(self.__version) +            else: +                self.__root = r"Software\Microsoft\Devstudio" +            self.__product = "Visual Studio version %s" % self.__version +        else: +            # Win64. Assume this was built with the platform SDK +            self.__product = "Microsoft SDK compiler %s" % (self.__version + 6) + +        self.initialized = False + +    def initialize(self): +        self.__paths = [] +        if ( +            "DISTUTILS_USE_SDK" in os.environ +            and "MSSdk" in os.environ +            and self.find_exe("cl.exe") +        ): +            # Assume that the SDK set up everything alright; don't try to be +            # smarter +            self.cc = "cl.exe" +            self.linker = "link.exe" +            self.lib = "lib.exe" +            self.rc = "rc.exe" +            self.mc = "mc.exe" +        else: +            self.__paths = self.get_msvc_paths("path") + +            if len(self.__paths) == 0: +                raise DistutilsPlatformError( +                    "Python was built with %s, " +                    "and extensions need to be built with the same " +                    "version of the compiler, but it isn't installed." % self.__product +                ) + +            self.cc = self.find_exe("cl.exe") +            self.linker = self.find_exe("link.exe") +            self.lib = self.find_exe("lib.exe") +            self.rc = self.find_exe("rc.exe")  # resource compiler +            self.mc = self.find_exe("mc.exe")  # message compiler +            self.set_path_env_var('lib') +            self.set_path_env_var('include') + +        # extend the MSVC path with the current path +        try: +            for p in os.environ['path'].split(';'): +                self.__paths.append(p) +        except KeyError: +            pass +        self.__paths = normalize_and_reduce_paths(self.__paths) +        os.environ['path'] = ";".join(self.__paths) + +        self.preprocess_options = None +        if self.__arch == "Intel": +            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GX', '/DNDEBUG'] +            self.compile_options_debug = [ +                '/nologo', +                '/Od', +                '/MDd', +                '/W3', +                '/GX', +                '/Z7', +                '/D_DEBUG', +            ] +        else: +            # Win64 +            self.compile_options = ['/nologo', '/O2', '/MD', '/W3', '/GS-', '/DNDEBUG'] +            self.compile_options_debug = [ +                '/nologo', +                '/Od', +                '/MDd', +                '/W3', +                '/GS-', +                '/Z7', +                '/D_DEBUG', +            ] + +        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO'] +        if self.__version >= 7: +            self.ldflags_shared_debug = ['/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'] +        else: +            self.ldflags_shared_debug = [ +                '/DLL', +                '/nologo', +                '/INCREMENTAL:no', +                '/pdb:None', +                '/DEBUG', +            ] +        self.ldflags_static = ['/nologo'] + +        self.initialized = True + +    # -- Worker methods ------------------------------------------------ + +    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''): +        # Copied from ccompiler.py, extended to return .res as 'object'-file +        # for .rc input file +        if output_dir is None: +            output_dir = '' +        obj_names = [] +        for src_name in source_filenames: +            (base, ext) = os.path.splitext(src_name) +            base = os.path.splitdrive(base)[1]  # Chop off the drive +            base = base[os.path.isabs(base) :]  # If abs, chop off leading / +            if ext not in self.src_extensions: +                # Better to raise an exception instead of silently continuing +                # and later complain about sources and targets having +                # different lengths +                raise CompileError("Don't know how to compile %s" % src_name) +            if strip_dir: +                base = os.path.basename(base) +            if ext in self._rc_extensions: +                obj_names.append(os.path.join(output_dir, base + self.res_extension)) +            elif ext in self._mc_extensions: +                obj_names.append(os.path.join(output_dir, base + self.res_extension)) +            else: +                obj_names.append(os.path.join(output_dir, base + self.obj_extension)) +        return obj_names + +    def compile(  # noqa: C901 +        self, +        sources, +        output_dir=None, +        macros=None, +        include_dirs=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        depends=None, +    ): + +        if not self.initialized: +            self.initialize() +        compile_info = self._setup_compile( +            output_dir, macros, include_dirs, sources, depends, extra_postargs +        ) +        macros, objects, extra_postargs, pp_opts, build = compile_info + +        compile_opts = extra_preargs or [] +        compile_opts.append('/c') +        if debug: +            compile_opts.extend(self.compile_options_debug) +        else: +            compile_opts.extend(self.compile_options) + +        for obj in objects: +            try: +                src, ext = build[obj] +            except KeyError: +                continue +            if debug: +                # pass the full pathname to MSVC in debug mode, +                # this allows the debugger to find the source file +                # without asking the user to browse for it +                src = os.path.abspath(src) + +            if ext in self._c_extensions: +                input_opt = "/Tc" + src +            elif ext in self._cpp_extensions: +                input_opt = "/Tp" + src +            elif ext in self._rc_extensions: +                # compile .RC to .RES file +                input_opt = src +                output_opt = "/fo" + obj +                try: +                    self.spawn([self.rc] + pp_opts + [output_opt] + [input_opt]) +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue +            elif ext in self._mc_extensions: +                # Compile .MC to .RC file to .RES file. +                #   * '-h dir' specifies the directory for the +                #     generated include file +                #   * '-r dir' specifies the target directory of the +                #     generated RC file and the binary message resource +                #     it includes +                # +                # For now (since there are no options to change this), +                # we use the source-directory for the include file and +                # the build directory for the RC file and message +                # resources. This works at least for win32all. +                h_dir = os.path.dirname(src) +                rc_dir = os.path.dirname(obj) +                try: +                    # first compile .MC to .RC and .H file +                    self.spawn([self.mc] + ['-h', h_dir, '-r', rc_dir] + [src]) +                    base, _ = os.path.splitext(os.path.basename(src)) +                    rc_file = os.path.join(rc_dir, base + '.rc') +                    # then compile .RC to .RES file +                    self.spawn([self.rc] + ["/fo" + obj] + [rc_file]) + +                except DistutilsExecError as msg: +                    raise CompileError(msg) +                continue +            else: +                # how to handle this file? +                raise CompileError( +                    "Don't know how to compile {} to {}".format(src, obj) +                ) + +            output_opt = "/Fo" + obj +            try: +                self.spawn( +                    [self.cc] +                    + compile_opts +                    + pp_opts +                    + [input_opt, output_opt] +                    + extra_postargs +                ) +            except DistutilsExecError as msg: +                raise CompileError(msg) + +        return objects + +    def create_static_lib( +        self, objects, output_libname, output_dir=None, debug=0, target_lang=None +    ): + +        if not self.initialized: +            self.initialize() +        (objects, output_dir) = self._fix_object_args(objects, output_dir) +        output_filename = self.library_filename(output_libname, output_dir=output_dir) + +        if self._need_link(objects, output_filename): +            lib_args = objects + ['/OUT:' + output_filename] +            if debug: +                pass  # XXX what goes here? +            try: +                self.spawn([self.lib] + lib_args) +            except DistutilsExecError as msg: +                raise LibError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    def link(  # noqa: C901 +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): + +        if not self.initialized: +            self.initialize() +        (objects, output_dir) = self._fix_object_args(objects, output_dir) +        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) +        (libraries, library_dirs, runtime_library_dirs) = fixed_args + +        if runtime_library_dirs: +            self.warn( +                "I don't know what to do with 'runtime_library_dirs': " +                + str(runtime_library_dirs) +            ) + +        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) +        if output_dir is not None: +            output_filename = os.path.join(output_dir, output_filename) + +        if self._need_link(objects, output_filename): +            if target_desc == CCompiler.EXECUTABLE: +                if debug: +                    ldflags = self.ldflags_shared_debug[1:] +                else: +                    ldflags = self.ldflags_shared[1:] +            else: +                if debug: +                    ldflags = self.ldflags_shared_debug +                else: +                    ldflags = self.ldflags_shared + +            export_opts = [] +            for sym in export_symbols or []: +                export_opts.append("/EXPORT:" + sym) + +            ld_args = ( +                ldflags + lib_opts + export_opts + objects + ['/OUT:' + output_filename] +            ) + +            # The MSVC linker generates .lib and .exp files, which cannot be +            # suppressed by any linker switches. The .lib files may even be +            # needed! Make sure they are generated in the temporary build +            # directory. Since they have different names for debug and release +            # builds, they can go into the same directory. +            if export_symbols is not None: +                (dll_name, dll_ext) = os.path.splitext( +                    os.path.basename(output_filename) +                ) +                implib_file = os.path.join( +                    os.path.dirname(objects[0]), self.library_filename(dll_name) +                ) +                ld_args.append('/IMPLIB:' + implib_file) + +            if extra_preargs: +                ld_args[:0] = extra_preargs +            if extra_postargs: +                ld_args.extend(extra_postargs) + +            self.mkpath(os.path.dirname(output_filename)) +            try: +                self.spawn([self.linker] + ld_args) +            except DistutilsExecError as msg: +                raise LinkError(msg) + +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    # -- Miscellaneous methods ----------------------------------------- +    # These are all used by the 'gen_lib_options() function, in +    # ccompiler.py. + +    def library_dir_option(self, dir): +        return "/LIBPATH:" + dir + +    def runtime_library_dir_option(self, dir): +        raise DistutilsPlatformError( +            "don't know how to set runtime library search path for MSVC++" +        ) + +    def library_option(self, lib): +        return self.library_filename(lib) + +    def find_library_file(self, dirs, lib, debug=0): +        # Prefer a debugging library if found (and requested), but deal +        # with it if we don't have one. +        if debug: +            try_names = [lib + "_d", lib] +        else: +            try_names = [lib] +        for dir in dirs: +            for name in try_names: +                libfile = os.path.join(dir, self.library_filename(name)) +                if os.path.exists(libfile): +                    return libfile +        else: +            # Oops, didn't find it in *any* of 'dirs' +            return None + +    # Helper methods for using the MSVC registry settings + +    def find_exe(self, exe): +        """Return path to an MSVC executable program. + +        Tries to find the program in several places: first, one of the +        MSVC program search paths from the registry; next, the directories +        in the PATH environment variable.  If any of those work, return an +        absolute path that is known to exist.  If none of them work, just +        return the original program name, 'exe'. +        """ +        for p in self.__paths: +            fn = os.path.join(os.path.abspath(p), exe) +            if os.path.isfile(fn): +                return fn + +        # didn't find it; try existing path +        for p in os.environ['Path'].split(';'): +            fn = os.path.join(os.path.abspath(p), exe) +            if os.path.isfile(fn): +                return fn + +        return exe + +    def get_msvc_paths(self, path, platform='x86'): +        """Get a list of devstudio directories (include, lib or path). + +        Return a list of strings.  The list will be empty if unable to +        access the registry or appropriate registry keys not found. +        """ +        if not _can_read_reg: +            return [] + +        path = path + " dirs" +        if self.__version >= 7: +            key = r"{}\{:0.1f}\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories".format( +                self.__root, +                self.__version, +            ) +        else: +            key = ( +                r"%s\6.0\Build System\Components\Platforms" +                r"\Win32 (%s)\Directories" % (self.__root, platform) +            ) + +        for base in HKEYS: +            d = read_values(base, key) +            if d: +                if self.__version >= 7: +                    return self.__macros.sub(d[path]).split(";") +                else: +                    return d[path].split(";") +        # MSVC 6 seems to create the registry entries we need only when +        # the GUI is run. +        if self.__version == 6: +            for base in HKEYS: +                if read_values(base, r"%s\6.0" % self.__root) is not None: +                    self.warn( +                        "It seems you have Visual Studio 6 installed, " +                        "but the expected registry settings are not present.\n" +                        "You must at least run the Visual Studio GUI once " +                        "so that these entries are created." +                    ) +                    break +        return [] + +    def set_path_env_var(self, name): +        """Set environment variable 'name' to an MSVC path type value. + +        This is equivalent to a SET command prior to execution of spawned +        commands. +        """ + +        if name == "lib": +            p = self.get_msvc_paths("library") +        else: +            p = self.get_msvc_paths(name) +        if p: +            os.environ[name] = ';'.join(p) + + +if get_build_version() >= 8.0: +    log.debug("Importing new compiler from distutils.msvc9compiler") +    OldMSVCCompiler = MSVCCompiler +    from distutils.msvc9compiler import MSVCCompiler + +    # get_build_architecture not really relevant now we support cross-compile +    from distutils.msvc9compiler import MacroExpander  # noqa: F811 diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/py38compat.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/py38compat.py new file mode 100644 index 0000000..59224e7 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/py38compat.py @@ -0,0 +1,8 @@ +def aix_platform(osname, version, release): +    try: +        import _aix_support + +        return _aix_support.aix_platform() +    except ImportError: +        pass +    return "{}-{}.{}".format(osname, version, release) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/py39compat.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/py39compat.py new file mode 100644 index 0000000..c43e5f1 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/py39compat.py @@ -0,0 +1,22 @@ +import sys +import platform + + +def add_ext_suffix_39(vars): +    """ +    Ensure vars contains 'EXT_SUFFIX'. pypa/distutils#130 +    """ +    import _imp + +    ext_suffix = _imp.extension_suffixes()[0] +    vars.update( +        EXT_SUFFIX=ext_suffix, +        # sysconfig sets SO to match EXT_SUFFIX, so maintain +        # that expectation. +        # https://github.com/python/cpython/blob/785cc6770588de087d09e89a69110af2542be208/Lib/sysconfig.py#L671-L673 +        SO=ext_suffix, +    ) + + +needs_ext_suffix = sys.version_info < (3, 10) and platform.system() == 'Windows' +add_ext_suffix = add_ext_suffix_39 if needs_ext_suffix else lambda vars: None diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/spawn.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/spawn.py new file mode 100644 index 0000000..b18ba9d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/spawn.py @@ -0,0 +1,109 @@ +"""distutils.spawn + +Provides the 'spawn()' function, a front-end to various platform- +specific functions for launching another program in a sub-process. +Also provides the 'find_executable()' to search the path for a given +executable name. +""" + +import sys +import os +import subprocess + +from distutils.errors import DistutilsExecError +from distutils.debug import DEBUG +from distutils import log + + +def spawn(cmd, search_path=1, verbose=0, dry_run=0, env=None):  # noqa: C901 +    """Run another program, specified as a command list 'cmd', in a new process. + +    'cmd' is just the argument list for the new process, ie. +    cmd[0] is the program to run and cmd[1:] are the rest of its arguments. +    There is no way to run a program with a name different from that of its +    executable. + +    If 'search_path' is true (the default), the system's executable +    search path will be used to find the program; otherwise, cmd[0] +    must be the exact path to the executable.  If 'dry_run' is true, +    the command will not actually be run. + +    Raise DistutilsExecError if running the program fails in any way; just +    return on success. +    """ +    # cmd is documented as a list, but just in case some code passes a tuple +    # in, protect our %-formatting code against horrible death +    cmd = list(cmd) + +    log.info(subprocess.list2cmdline(cmd)) +    if dry_run: +        return + +    if search_path: +        executable = find_executable(cmd[0]) +        if executable is not None: +            cmd[0] = executable + +    env = env if env is not None else dict(os.environ) + +    if sys.platform == 'darwin': +        from distutils.util import MACOSX_VERSION_VAR, get_macosx_target_ver + +        macosx_target_ver = get_macosx_target_ver() +        if macosx_target_ver: +            env[MACOSX_VERSION_VAR] = macosx_target_ver + +    try: +        proc = subprocess.Popen(cmd, env=env) +        proc.wait() +        exitcode = proc.returncode +    except OSError as exc: +        if not DEBUG: +            cmd = cmd[0] +        raise DistutilsExecError( +            "command {!r} failed: {}".format(cmd, exc.args[-1]) +        ) from exc + +    if exitcode: +        if not DEBUG: +            cmd = cmd[0] +        raise DistutilsExecError( +            "command {!r} failed with exit code {}".format(cmd, exitcode) +        ) + + +def find_executable(executable, path=None): +    """Tries to find 'executable' in the directories listed in 'path'. + +    A string listing directories separated by 'os.pathsep'; defaults to +    os.environ['PATH'].  Returns the complete filename or None if not found. +    """ +    _, ext = os.path.splitext(executable) +    if (sys.platform == 'win32') and (ext != '.exe'): +        executable = executable + '.exe' + +    if os.path.isfile(executable): +        return executable + +    if path is None: +        path = os.environ.get('PATH', None) +        if path is None: +            try: +                path = os.confstr("CS_PATH") +            except (AttributeError, ValueError): +                # os.confstr() or CS_PATH is not available +                path = os.defpath +        # bpo-35755: Don't use os.defpath if the PATH environment variable is +        # set to an empty string + +    # PATH='' doesn't match, whereas PATH=':' looks in the current directory +    if not path: +        return None + +    paths = path.split(os.pathsep) +    for p in paths: +        f = os.path.join(p, executable) +        if os.path.isfile(f): +            # the file exists, we have a shot at spawn working +            return f +    return None diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/sysconfig.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/sysconfig.py new file mode 100644 index 0000000..6a979f8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/sysconfig.py @@ -0,0 +1,558 @@ +"""Provide access to Python's configuration information.  The specific +configuration variables available depend heavily on the platform and +configuration.  The values may be retrieved using +get_config_var(name), and the list of variables is available via +get_config_vars().keys().  Additional convenience functions are also +available. + +Written by:   Fred L. Drake, Jr. +Email:        <fdrake@acm.org> +""" + +import os +import re +import sys +import sysconfig +import pathlib + +from .errors import DistutilsPlatformError +from . import py39compat +from ._functools import pass_none + +IS_PYPY = '__pypy__' in sys.builtin_module_names + +# These are needed in a couple of spots, so just compute them once. +PREFIX = os.path.normpath(sys.prefix) +EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +BASE_PREFIX = os.path.normpath(sys.base_prefix) +BASE_EXEC_PREFIX = os.path.normpath(sys.base_exec_prefix) + +# Path to the base directory of the project. On Windows the binary may +# live in project/PCbuild/win32 or project/PCbuild/amd64. +# set for cross builds +if "_PYTHON_PROJECT_BASE" in os.environ: +    project_base = os.path.abspath(os.environ["_PYTHON_PROJECT_BASE"]) +else: +    if sys.executable: +        project_base = os.path.dirname(os.path.abspath(sys.executable)) +    else: +        # sys.executable can be empty if argv[0] has been changed and Python is +        # unable to retrieve the real program name +        project_base = os.getcwd() + + +def _is_python_source_dir(d): +    """ +    Return True if the target directory appears to point to an +    un-installed Python. +    """ +    modules = pathlib.Path(d).joinpath('Modules') +    return any(modules.joinpath(fn).is_file() for fn in ('Setup', 'Setup.local')) + + +_sys_home = getattr(sys, '_home', None) + + +def _is_parent(dir_a, dir_b): +    """ +    Return True if a is a parent of b. +    """ +    return os.path.normcase(dir_a).startswith(os.path.normcase(dir_b)) + + +if os.name == 'nt': + +    @pass_none +    def _fix_pcbuild(d): +        # In a venv, sys._home will be inside BASE_PREFIX rather than PREFIX. +        prefixes = PREFIX, BASE_PREFIX +        matched = ( +            prefix +            for prefix in prefixes +            if _is_parent(d, os.path.join(prefix, "PCbuild")) +        ) +        return next(matched, d) + +    project_base = _fix_pcbuild(project_base) +    _sys_home = _fix_pcbuild(_sys_home) + + +def _python_build(): +    if _sys_home: +        return _is_python_source_dir(_sys_home) +    return _is_python_source_dir(project_base) + + +python_build = _python_build() + + +# Calculate the build qualifier flags if they are defined.  Adding the flags +# to the include and lib directories only makes sense for an installation, not +# an in-source build. +build_flags = '' +try: +    if not python_build: +        build_flags = sys.abiflags +except AttributeError: +    # It's not a configure-based build, so the sys module doesn't have +    # this attribute, which is fine. +    pass + + +def get_python_version(): +    """Return a string containing the major and minor Python version, +    leaving off the patchlevel.  Sample return values could be '1.5' +    or '2.2'. +    """ +    return '%d.%d' % sys.version_info[:2] + + +def get_python_inc(plat_specific=0, prefix=None): +    """Return the directory containing installed Python header files. + +    If 'plat_specific' is false (the default), this is the path to the +    non-platform-specific header files, i.e. Python.h and so on; +    otherwise, this is the path to platform-specific header files +    (namely pyconfig.h). + +    If 'prefix' is supplied, use it instead of sys.base_prefix or +    sys.base_exec_prefix -- i.e., ignore 'plat_specific'. +    """ +    default_prefix = BASE_EXEC_PREFIX if plat_specific else BASE_PREFIX +    resolved_prefix = prefix if prefix is not None else default_prefix +    try: +        getter = globals()[f'_get_python_inc_{os.name}'] +    except KeyError: +        raise DistutilsPlatformError( +            "I don't know where Python installs its C header files " +            "on platform '%s'" % os.name +        ) +    return getter(resolved_prefix, prefix, plat_specific) + + +def _get_python_inc_posix(prefix, spec_prefix, plat_specific): +    if IS_PYPY and sys.version_info < (3, 8): +        return os.path.join(prefix, 'include') +    return ( +        _get_python_inc_posix_python(plat_specific) +        or _get_python_inc_from_config(plat_specific, spec_prefix) +        or _get_python_inc_posix_prefix(prefix) +    ) + + +def _get_python_inc_posix_python(plat_specific): +    """ +    Assume the executable is in the build directory. The +    pyconfig.h file should be in the same directory. Since +    the build directory may not be the source directory, +    use "srcdir" from the makefile to find the "Include" +    directory. +    """ +    if not python_build: +        return +    if plat_specific: +        return _sys_home or project_base +    incdir = os.path.join(get_config_var('srcdir'), 'Include') +    return os.path.normpath(incdir) + + +def _get_python_inc_from_config(plat_specific, spec_prefix): +    """ +    If no prefix was explicitly specified, provide the include +    directory from the config vars. Useful when +    cross-compiling, since the config vars may come from +    the host +    platform Python installation, while the current Python +    executable is from the build platform installation. + +    >>> monkeypatch = getfixture('monkeypatch') +    >>> gpifc = _get_python_inc_from_config +    >>> monkeypatch.setitem(gpifc.__globals__, 'get_config_var', str.lower) +    >>> gpifc(False, '/usr/bin/') +    >>> gpifc(False, '') +    >>> gpifc(False, None) +    'includepy' +    >>> gpifc(True, None) +    'confincludepy' +    """ +    if spec_prefix is None: +        return get_config_var('CONF' * plat_specific + 'INCLUDEPY') + + +def _get_python_inc_posix_prefix(prefix): +    implementation = 'pypy' if IS_PYPY else 'python' +    python_dir = implementation + get_python_version() + build_flags +    return os.path.join(prefix, "include", python_dir) + + +def _get_python_inc_nt(prefix, spec_prefix, plat_specific): +    if python_build: +        # Include both the include and PC dir to ensure we can find +        # pyconfig.h +        return ( +            os.path.join(prefix, "include") +            + os.path.pathsep +            + os.path.join(prefix, "PC") +        ) +    return os.path.join(prefix, "include") + + +# allow this behavior to be monkey-patched. Ref pypa/distutils#2. +def _posix_lib(standard_lib, libpython, early_prefix, prefix): +    if standard_lib: +        return libpython +    else: +        return os.path.join(libpython, "site-packages") + + +def get_python_lib(plat_specific=0, standard_lib=0, prefix=None): +    """Return the directory containing the Python library (standard or +    site additions). + +    If 'plat_specific' is true, return the directory containing +    platform-specific modules, i.e. any module from a non-pure-Python +    module distribution; otherwise, return the platform-shared library +    directory.  If 'standard_lib' is true, return the directory +    containing standard Python library modules; otherwise, return the +    directory for site-specific modules. + +    If 'prefix' is supplied, use it instead of sys.base_prefix or +    sys.base_exec_prefix -- i.e., ignore 'plat_specific'. +    """ + +    if IS_PYPY and sys.version_info < (3, 8): +        # PyPy-specific schema +        if prefix is None: +            prefix = PREFIX +        if standard_lib: +            return os.path.join(prefix, "lib-python", sys.version[0]) +        return os.path.join(prefix, 'site-packages') + +    early_prefix = prefix + +    if prefix is None: +        if standard_lib: +            prefix = plat_specific and BASE_EXEC_PREFIX or BASE_PREFIX +        else: +            prefix = plat_specific and EXEC_PREFIX or PREFIX + +    if os.name == "posix": +        if plat_specific or standard_lib: +            # Platform-specific modules (any module from a non-pure-Python +            # module distribution) or standard Python library modules. +            libdir = getattr(sys, "platlibdir", "lib") +        else: +            # Pure Python +            libdir = "lib" +        implementation = 'pypy' if IS_PYPY else 'python' +        libpython = os.path.join(prefix, libdir, implementation + get_python_version()) +        return _posix_lib(standard_lib, libpython, early_prefix, prefix) +    elif os.name == "nt": +        if standard_lib: +            return os.path.join(prefix, "Lib") +        else: +            return os.path.join(prefix, "Lib", "site-packages") +    else: +        raise DistutilsPlatformError( +            "I don't know where Python installs its library " +            "on platform '%s'" % os.name +        ) + + +def customize_compiler(compiler):  # noqa: C901 +    """Do any platform-specific customization of a CCompiler instance. + +    Mainly needed on Unix, so we can plug in the information that +    varies across Unices and is stored in Python's Makefile. +    """ +    if compiler.compiler_type == "unix": +        if sys.platform == "darwin": +            # Perform first-time customization of compiler-related +            # config vars on OS X now that we know we need a compiler. +            # This is primarily to support Pythons from binary +            # installers.  The kind and paths to build tools on +            # the user system may vary significantly from the system +            # that Python itself was built on.  Also the user OS +            # version and build tools may not support the same set +            # of CPU architectures for universal builds. +            global _config_vars +            # Use get_config_var() to ensure _config_vars is initialized. +            if not get_config_var('CUSTOMIZED_OSX_COMPILER'): +                import _osx_support + +                _osx_support.customize_compiler(_config_vars) +                _config_vars['CUSTOMIZED_OSX_COMPILER'] = 'True' + +        ( +            cc, +            cxx, +            cflags, +            ccshared, +            ldshared, +            shlib_suffix, +            ar, +            ar_flags, +        ) = get_config_vars( +            'CC', +            'CXX', +            'CFLAGS', +            'CCSHARED', +            'LDSHARED', +            'SHLIB_SUFFIX', +            'AR', +            'ARFLAGS', +        ) + +        if 'CC' in os.environ: +            newcc = os.environ['CC'] +            if 'LDSHARED' not in os.environ and ldshared.startswith(cc): +                # If CC is overridden, use that as the default +                #       command for LDSHARED as well +                ldshared = newcc + ldshared[len(cc) :] +            cc = newcc +        if 'CXX' in os.environ: +            cxx = os.environ['CXX'] +        if 'LDSHARED' in os.environ: +            ldshared = os.environ['LDSHARED'] +        if 'CPP' in os.environ: +            cpp = os.environ['CPP'] +        else: +            cpp = cc + " -E"  # not always +        if 'LDFLAGS' in os.environ: +            ldshared = ldshared + ' ' + os.environ['LDFLAGS'] +        if 'CFLAGS' in os.environ: +            cflags = cflags + ' ' + os.environ['CFLAGS'] +            ldshared = ldshared + ' ' + os.environ['CFLAGS'] +        if 'CPPFLAGS' in os.environ: +            cpp = cpp + ' ' + os.environ['CPPFLAGS'] +            cflags = cflags + ' ' + os.environ['CPPFLAGS'] +            ldshared = ldshared + ' ' + os.environ['CPPFLAGS'] +        if 'AR' in os.environ: +            ar = os.environ['AR'] +        if 'ARFLAGS' in os.environ: +            archiver = ar + ' ' + os.environ['ARFLAGS'] +        else: +            archiver = ar + ' ' + ar_flags + +        cc_cmd = cc + ' ' + cflags +        compiler.set_executables( +            preprocessor=cpp, +            compiler=cc_cmd, +            compiler_so=cc_cmd + ' ' + ccshared, +            compiler_cxx=cxx, +            linker_so=ldshared, +            linker_exe=cc, +            archiver=archiver, +        ) + +        if 'RANLIB' in os.environ and compiler.executables.get('ranlib', None): +            compiler.set_executables(ranlib=os.environ['RANLIB']) + +        compiler.shared_lib_extension = shlib_suffix + + +def get_config_h_filename(): +    """Return full pathname of installed pyconfig.h file.""" +    if python_build: +        if os.name == "nt": +            inc_dir = os.path.join(_sys_home or project_base, "PC") +        else: +            inc_dir = _sys_home or project_base +        return os.path.join(inc_dir, 'pyconfig.h') +    else: +        return sysconfig.get_config_h_filename() + + +def get_makefile_filename(): +    """Return full pathname of installed Makefile from the Python build.""" +    return sysconfig.get_makefile_filename() + + +def parse_config_h(fp, g=None): +    """Parse a config.h-style file. + +    A dictionary containing name/value pairs is returned.  If an +    optional dictionary is passed in as the second argument, it is +    used instead of a new dictionary. +    """ +    return sysconfig.parse_config_h(fp, vars=g) + + +# Regexes needed for parsing Makefile (and similar syntaxes, +# like old-style Setup files). +_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") +_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") +_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + +def parse_makefile(fn, g=None):  # noqa: C901 +    """Parse a Makefile-style file. + +    A dictionary containing name/value pairs is returned.  If an +    optional dictionary is passed in as the second argument, it is +    used instead of a new dictionary. +    """ +    from distutils.text_file import TextFile + +    fp = TextFile( +        fn, strip_comments=1, skip_blanks=1, join_lines=1, errors="surrogateescape" +    ) + +    if g is None: +        g = {} +    done = {} +    notdone = {} + +    while True: +        line = fp.readline() +        if line is None:  # eof +            break +        m = _variable_rx.match(line) +        if m: +            n, v = m.group(1, 2) +            v = v.strip() +            # `$$' is a literal `$' in make +            tmpv = v.replace('$$', '') + +            if "$" in tmpv: +                notdone[n] = v +            else: +                try: +                    v = int(v) +                except ValueError: +                    # insert literal `$' +                    done[n] = v.replace('$$', '$') +                else: +                    done[n] = v + +    # Variables with a 'PY_' prefix in the makefile. These need to +    # be made available without that prefix through sysconfig. +    # Special care is needed to ensure that variable expansion works, even +    # if the expansion uses the name without a prefix. +    renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + +    # do variable interpolation here +    while notdone: +        for name in list(notdone): +            value = notdone[name] +            m = _findvar1_rx.search(value) or _findvar2_rx.search(value) +            if m: +                n = m.group(1) +                found = True +                if n in done: +                    item = str(done[n]) +                elif n in notdone: +                    # get it on a subsequent round +                    found = False +                elif n in os.environ: +                    # do it like make: fall back to environment +                    item = os.environ[n] + +                elif n in renamed_variables: +                    if name.startswith('PY_') and name[3:] in renamed_variables: +                        item = "" + +                    elif 'PY_' + n in notdone: +                        found = False + +                    else: +                        item = str(done['PY_' + n]) +                else: +                    done[n] = item = "" +                if found: +                    after = value[m.end() :] +                    value = value[: m.start()] + item + after +                    if "$" in after: +                        notdone[name] = value +                    else: +                        try: +                            value = int(value) +                        except ValueError: +                            done[name] = value.strip() +                        else: +                            done[name] = value +                        del notdone[name] + +                        if name.startswith('PY_') and name[3:] in renamed_variables: + +                            name = name[3:] +                            if name not in done: +                                done[name] = value +            else: +                # bogus variable reference; just drop it since we can't deal +                del notdone[name] + +    fp.close() + +    # strip spurious spaces +    for k, v in done.items(): +        if isinstance(v, str): +            done[k] = v.strip() + +    # save the results in the global dictionary +    g.update(done) +    return g + + +def expand_makefile_vars(s, vars): +    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in +    'string' according to 'vars' (a dictionary mapping variable names to +    values).  Variables not present in 'vars' are silently expanded to the +    empty string.  The variable values in 'vars' should not contain further +    variable expansions; if 'vars' is the output of 'parse_makefile()', +    you're fine.  Returns a variable-expanded version of 's'. +    """ + +    # This algorithm does multiple expansion, so if vars['foo'] contains +    # "${bar}", it will expand ${foo} to ${bar}, and then expand +    # ${bar}... and so forth.  This is fine as long as 'vars' comes from +    # 'parse_makefile()', which takes care of such expansions eagerly, +    # according to make's variable expansion semantics. + +    while True: +        m = _findvar1_rx.search(s) or _findvar2_rx.search(s) +        if m: +            (beg, end) = m.span() +            s = s[0:beg] + vars.get(m.group(1)) + s[end:] +        else: +            break +    return s + + +_config_vars = None + + +def get_config_vars(*args): +    """With no arguments, return a dictionary of all configuration +    variables relevant for the current platform.  Generally this includes +    everything needed to build extensions and install both pure modules and +    extensions.  On Unix, this means every variable defined in Python's +    installed Makefile; on Windows it's a much smaller set. + +    With arguments, return a list of values that result from looking up +    each argument in the configuration variable dictionary. +    """ +    global _config_vars +    if _config_vars is None: +        _config_vars = sysconfig.get_config_vars().copy() +        py39compat.add_ext_suffix(_config_vars) + +    if args: +        vals = [] +        for name in args: +            vals.append(_config_vars.get(name)) +        return vals +    else: +        return _config_vars + + +def get_config_var(name): +    """Return the value of a single variable using the dictionary +    returned by 'get_config_vars()'.  Equivalent to +    get_config_vars().get(name) +    """ +    if name == 'SO': +        import warnings + +        warnings.warn('SO is deprecated, use EXT_SUFFIX', DeprecationWarning, 2) +    return get_config_vars().get(name) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/text_file.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/text_file.py new file mode 100644 index 0000000..7274d4b --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/text_file.py @@ -0,0 +1,287 @@ +"""text_file + +provides the TextFile class, which gives an interface to text files +that (optionally) takes care of stripping comments, ignoring blank +lines, and joining lines with backslashes.""" + +import sys + + +class TextFile: +    """Provides a file-like object that takes care of all the things you +    commonly want to do when processing a text file that has some +    line-by-line syntax: strip comments (as long as "#" is your +    comment character), skip blank lines, join adjacent lines by +    escaping the newline (ie. backslash at end of line), strip +    leading and/or trailing whitespace.  All of these are optional +    and independently controllable. + +    Provides a 'warn()' method so you can generate warning messages that +    report physical line number, even if the logical line in question +    spans multiple physical lines.  Also provides 'unreadline()' for +    implementing line-at-a-time lookahead. + +    Constructor is called as: + +        TextFile (filename=None, file=None, **options) + +    It bombs (RuntimeError) if both 'filename' and 'file' are None; +    'filename' should be a string, and 'file' a file object (or +    something that provides 'readline()' and 'close()' methods).  It is +    recommended that you supply at least 'filename', so that TextFile +    can include it in warning messages.  If 'file' is not supplied, +    TextFile creates its own using 'io.open()'. + +    The options are all boolean, and affect the value returned by +    'readline()': +      strip_comments [default: true] +        strip from "#" to end-of-line, as well as any whitespace +        leading up to the "#" -- unless it is escaped by a backslash +      lstrip_ws [default: false] +        strip leading whitespace from each line before returning it +      rstrip_ws [default: true] +        strip trailing whitespace (including line terminator!) from +        each line before returning it +      skip_blanks [default: true} +        skip lines that are empty *after* stripping comments and +        whitespace.  (If both lstrip_ws and rstrip_ws are false, +        then some lines may consist of solely whitespace: these will +        *not* be skipped, even if 'skip_blanks' is true.) +      join_lines [default: false] +        if a backslash is the last non-newline character on a line +        after stripping comments and whitespace, join the following line +        to it to form one "logical line"; if N consecutive lines end +        with a backslash, then N+1 physical lines will be joined to +        form one logical line. +      collapse_join [default: false] +        strip leading whitespace from lines that are joined to their +        predecessor; only matters if (join_lines and not lstrip_ws) +      errors [default: 'strict'] +        error handler used to decode the file content + +    Note that since 'rstrip_ws' can strip the trailing newline, the +    semantics of 'readline()' must differ from those of the builtin file +    object's 'readline()' method!  In particular, 'readline()' returns +    None for end-of-file: an empty string might just be a blank line (or +    an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is +    not.""" + +    default_options = { +        'strip_comments': 1, +        'skip_blanks': 1, +        'lstrip_ws': 0, +        'rstrip_ws': 1, +        'join_lines': 0, +        'collapse_join': 0, +        'errors': 'strict', +    } + +    def __init__(self, filename=None, file=None, **options): +        """Construct a new TextFile object.  At least one of 'filename' +        (a string) and 'file' (a file-like object) must be supplied. +        They keyword argument options are described above and affect +        the values returned by 'readline()'.""" +        if filename is None and file is None: +            raise RuntimeError( +                "you must supply either or both of 'filename' and 'file'" +            ) + +        # set values for all options -- either from client option hash +        # or fallback to default_options +        for opt in self.default_options.keys(): +            if opt in options: +                setattr(self, opt, options[opt]) +            else: +                setattr(self, opt, self.default_options[opt]) + +        # sanity check client option hash +        for opt in options.keys(): +            if opt not in self.default_options: +                raise KeyError("invalid TextFile option '%s'" % opt) + +        if file is None: +            self.open(filename) +        else: +            self.filename = filename +            self.file = file +            self.current_line = 0  # assuming that file is at BOF! + +        # 'linebuf' is a stack of lines that will be emptied before we +        # actually read from the file; it's only populated by an +        # 'unreadline()' operation +        self.linebuf = [] + +    def open(self, filename): +        """Open a new file named 'filename'.  This overrides both the +        'filename' and 'file' arguments to the constructor.""" +        self.filename = filename +        self.file = open(self.filename, errors=self.errors) +        self.current_line = 0 + +    def close(self): +        """Close the current file and forget everything we know about it +        (filename, current line number).""" +        file = self.file +        self.file = None +        self.filename = None +        self.current_line = None +        file.close() + +    def gen_error(self, msg, line=None): +        outmsg = [] +        if line is None: +            line = self.current_line +        outmsg.append(self.filename + ", ") +        if isinstance(line, (list, tuple)): +            outmsg.append("lines %d-%d: " % tuple(line)) +        else: +            outmsg.append("line %d: " % line) +        outmsg.append(str(msg)) +        return "".join(outmsg) + +    def error(self, msg, line=None): +        raise ValueError("error: " + self.gen_error(msg, line)) + +    def warn(self, msg, line=None): +        """Print (to stderr) a warning message tied to the current logical +        line in the current file.  If the current logical line in the +        file spans multiple physical lines, the warning refers to the +        whole range, eg. "lines 3-5".  If 'line' supplied, it overrides +        the current line number; it may be a list or tuple to indicate a +        range of physical lines, or an integer for a single physical +        line.""" +        sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n") + +    def readline(self):  # noqa: C901 +        """Read and return a single logical line from the current file (or +        from an internal buffer if lines have previously been "unread" +        with 'unreadline()').  If the 'join_lines' option is true, this +        may involve reading multiple physical lines concatenated into a +        single string.  Updates the current line number, so calling +        'warn()' after 'readline()' emits a warning about the physical +        line(s) just read.  Returns None on end-of-file, since the empty +        string can occur if 'rstrip_ws' is true but 'strip_blanks' is +        not.""" +        # If any "unread" lines waiting in 'linebuf', return the top +        # one.  (We don't actually buffer read-ahead data -- lines only +        # get put in 'linebuf' if the client explicitly does an +        # 'unreadline()'. +        if self.linebuf: +            line = self.linebuf[-1] +            del self.linebuf[-1] +            return line + +        buildup_line = '' + +        while True: +            # read the line, make it None if EOF +            line = self.file.readline() +            if line == '': +                line = None + +            if self.strip_comments and line: + +                # Look for the first "#" in the line.  If none, never +                # mind.  If we find one and it's the first character, or +                # is not preceded by "\", then it starts a comment -- +                # strip the comment, strip whitespace before it, and +                # carry on.  Otherwise, it's just an escaped "#", so +                # unescape it (and any other escaped "#"'s that might be +                # lurking in there) and otherwise leave the line alone. + +                pos = line.find("#") +                if pos == -1:  # no "#" -- no comments +                    pass + +                # It's definitely a comment -- either "#" is the first +                # character, or it's elsewhere and unescaped. +                elif pos == 0 or line[pos - 1] != "\\": +                    # Have to preserve the trailing newline, because it's +                    # the job of a later step (rstrip_ws) to remove it -- +                    # and if rstrip_ws is false, we'd better preserve it! +                    # (NB. this means that if the final line is all comment +                    # and has no trailing newline, we will think that it's +                    # EOF; I think that's OK.) +                    eol = (line[-1] == '\n') and '\n' or '' +                    line = line[0:pos] + eol + +                    # If all that's left is whitespace, then skip line +                    # *now*, before we try to join it to 'buildup_line' -- +                    # that way constructs like +                    #   hello \\ +                    #   # comment that should be ignored +                    #   there +                    # result in "hello there". +                    if line.strip() == "": +                        continue +                else:  # it's an escaped "#" +                    line = line.replace("\\#", "#") + +            # did previous line end with a backslash? then accumulate +            if self.join_lines and buildup_line: +                # oops: end of file +                if line is None: +                    self.warn("continuation line immediately precedes " "end-of-file") +                    return buildup_line + +                if self.collapse_join: +                    line = line.lstrip() +                line = buildup_line + line + +                # careful: pay attention to line number when incrementing it +                if isinstance(self.current_line, list): +                    self.current_line[1] = self.current_line[1] + 1 +                else: +                    self.current_line = [self.current_line, self.current_line + 1] +            # just an ordinary line, read it as usual +            else: +                if line is None:  # eof +                    return None + +                # still have to be careful about incrementing the line number! +                if isinstance(self.current_line, list): +                    self.current_line = self.current_line[1] + 1 +                else: +                    self.current_line = self.current_line + 1 + +            # strip whitespace however the client wants (leading and +            # trailing, or one or the other, or neither) +            if self.lstrip_ws and self.rstrip_ws: +                line = line.strip() +            elif self.lstrip_ws: +                line = line.lstrip() +            elif self.rstrip_ws: +                line = line.rstrip() + +            # blank line (whether we rstrip'ed or not)? skip to next line +            # if appropriate +            if (line == '' or line == '\n') and self.skip_blanks: +                continue + +            if self.join_lines: +                if line[-1] == '\\': +                    buildup_line = line[:-1] +                    continue + +                if line[-2:] == '\\\n': +                    buildup_line = line[0:-2] + '\n' +                    continue + +            # well, I guess there's some actual content there: return it +            return line + +    def readlines(self): +        """Read and return the list of all logical lines remaining in the +        current file.""" +        lines = [] +        while True: +            line = self.readline() +            if line is None: +                return lines +            lines.append(line) + +    def unreadline(self, line): +        """Push 'line' (a string) onto an internal buffer that will be +        checked by future 'readline()' calls.  Handy for implementing +        a parser with line-at-a-time lookahead.""" +        self.linebuf.append(line) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/unixccompiler.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/unixccompiler.py new file mode 100644 index 0000000..4ab771a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/unixccompiler.py @@ -0,0 +1,401 @@ +"""distutils.unixccompiler + +Contains the UnixCCompiler class, a subclass of CCompiler that handles +the "typical" Unix-style command-line C compiler: +  * macros defined with -Dname[=value] +  * macros undefined with -Uname +  * include search directories specified with -Idir +  * libraries specified with -lllib +  * library search directories specified with -Ldir +  * compile handled by 'cc' (or similar) executable with -c option: +    compiles .c to .o +  * link static library handled by 'ar' command (possibly with 'ranlib') +  * link shared library handled by 'cc -shared' +""" + +import os +import sys +import re +import shlex +import itertools + +from distutils import sysconfig +from distutils.dep_util import newer +from distutils.ccompiler import CCompiler, gen_preprocess_options, gen_lib_options +from distutils.errors import DistutilsExecError, CompileError, LibError, LinkError +from distutils import log +from ._macos_compat import compiler_fixup + +# XXX Things not currently handled: +#   * optimization/debug/warning flags; we just use whatever's in Python's +#     Makefile and live with it.  Is this adequate?  If not, we might +#     have to have a bunch of subclasses GNUCCompiler, SGICCompiler, +#     SunCCompiler, and I suspect down that road lies madness. +#   * even if we don't know a warning flag from an optimization flag, +#     we need some way for outsiders to feed preprocessor/compiler/linker +#     flags in to us -- eg. a sysadmin might want to mandate certain flags +#     via a site config file, or a user might want to set something for +#     compiling this module distribution only via the setup.py command +#     line, whatever.  As long as these options come from something on the +#     current system, they can be as system-dependent as they like, and we +#     should just happily stuff them into the preprocessor/compiler/linker +#     options and carry on. + + +def _split_env(cmd): +    """ +    For macOS, split command into 'env' portion (if any) +    and the rest of the linker command. + +    >>> _split_env(['a', 'b', 'c']) +    ([], ['a', 'b', 'c']) +    >>> _split_env(['/usr/bin/env', 'A=3', 'gcc']) +    (['/usr/bin/env', 'A=3'], ['gcc']) +    """ +    pivot = 0 +    if os.path.basename(cmd[0]) == "env": +        pivot = 1 +        while '=' in cmd[pivot]: +            pivot += 1 +    return cmd[:pivot], cmd[pivot:] + + +def _split_aix(cmd): +    """ +    AIX platforms prefix the compiler with the ld_so_aix +    script, so split that from the linker command. + +    >>> _split_aix(['a', 'b', 'c']) +    ([], ['a', 'b', 'c']) +    >>> _split_aix(['/bin/foo/ld_so_aix', 'gcc']) +    (['/bin/foo/ld_so_aix'], ['gcc']) +    """ +    pivot = os.path.basename(cmd[0]) == 'ld_so_aix' +    return cmd[:pivot], cmd[pivot:] + + +def _linker_params(linker_cmd, compiler_cmd): +    """ +    The linker command usually begins with the compiler +    command (possibly multiple elements), followed by zero or more +    params for shared library building. + +    If the LDSHARED env variable overrides the linker command, +    however, the commands may not match. + +    Return the best guess of the linker parameters by stripping +    the linker command. If the compiler command does not +    match the linker command, assume the linker command is +    just the first element. + +    >>> _linker_params('gcc foo bar'.split(), ['gcc']) +    ['foo', 'bar'] +    >>> _linker_params('gcc foo bar'.split(), ['other']) +    ['foo', 'bar'] +    >>> _linker_params('ccache gcc foo bar'.split(), 'ccache gcc'.split()) +    ['foo', 'bar'] +    >>> _linker_params(['gcc'], ['gcc']) +    [] +    """ +    c_len = len(compiler_cmd) +    pivot = c_len if linker_cmd[:c_len] == compiler_cmd else 1 +    return linker_cmd[pivot:] + + +class UnixCCompiler(CCompiler): + +    compiler_type = 'unix' + +    # These are used by CCompiler in two places: the constructor sets +    # instance attributes 'preprocessor', 'compiler', etc. from them, and +    # 'set_executable()' allows any of these to be set.  The defaults here +    # are pretty generic; they will probably have to be set by an outsider +    # (eg. using information discovered by the sysconfig about building +    # Python extensions). +    executables = { +        'preprocessor': None, +        'compiler': ["cc"], +        'compiler_so': ["cc"], +        'compiler_cxx': ["cc"], +        'linker_so': ["cc", "-shared"], +        'linker_exe': ["cc"], +        'archiver': ["ar", "-cr"], +        'ranlib': None, +    } + +    if sys.platform[:6] == "darwin": +        executables['ranlib'] = ["ranlib"] + +    # Needed for the filename generation methods provided by the base +    # class, CCompiler.  NB. whoever instantiates/uses a particular +    # UnixCCompiler instance should set 'shared_lib_ext' -- we set a +    # reasonable common default here, but it's not necessarily used on all +    # Unices! + +    src_extensions = [".c", ".C", ".cc", ".cxx", ".cpp", ".m"] +    obj_extension = ".o" +    static_lib_extension = ".a" +    shared_lib_extension = ".so" +    dylib_lib_extension = ".dylib" +    xcode_stub_lib_extension = ".tbd" +    static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s" +    xcode_stub_lib_format = dylib_lib_format +    if sys.platform == "cygwin": +        exe_extension = ".exe" + +    def preprocess( +        self, +        source, +        output_file=None, +        macros=None, +        include_dirs=None, +        extra_preargs=None, +        extra_postargs=None, +    ): +        fixed_args = self._fix_compile_args(None, macros, include_dirs) +        ignore, macros, include_dirs = fixed_args +        pp_opts = gen_preprocess_options(macros, include_dirs) +        pp_args = self.preprocessor + pp_opts +        if output_file: +            pp_args.extend(['-o', output_file]) +        if extra_preargs: +            pp_args[:0] = extra_preargs +        if extra_postargs: +            pp_args.extend(extra_postargs) +        pp_args.append(source) + +        # reasons to preprocess: +        # - force is indicated +        # - output is directed to stdout +        # - source file is newer than the target +        preprocess = self.force or output_file is None or newer(source, output_file) +        if not preprocess: +            return + +        if output_file: +            self.mkpath(os.path.dirname(output_file)) + +        try: +            self.spawn(pp_args) +        except DistutilsExecError as msg: +            raise CompileError(msg) + +    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts): +        compiler_so = compiler_fixup(self.compiler_so, cc_args + extra_postargs) +        try: +            self.spawn(compiler_so + cc_args + [src, '-o', obj] + extra_postargs) +        except DistutilsExecError as msg: +            raise CompileError(msg) + +    def create_static_lib( +        self, objects, output_libname, output_dir=None, debug=0, target_lang=None +    ): +        objects, output_dir = self._fix_object_args(objects, output_dir) + +        output_filename = self.library_filename(output_libname, output_dir=output_dir) + +        if self._need_link(objects, output_filename): +            self.mkpath(os.path.dirname(output_filename)) +            self.spawn(self.archiver + [output_filename] + objects + self.objects) + +            # Not many Unices required ranlib anymore -- SunOS 4.x is, I +            # think the only major Unix that does.  Maybe we need some +            # platform intelligence here to skip ranlib if it's not +            # needed -- or maybe Python's configure script took care of +            # it for us, hence the check for leading colon. +            if self.ranlib: +                try: +                    self.spawn(self.ranlib + [output_filename]) +                except DistutilsExecError as msg: +                    raise LibError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    def link( +        self, +        target_desc, +        objects, +        output_filename, +        output_dir=None, +        libraries=None, +        library_dirs=None, +        runtime_library_dirs=None, +        export_symbols=None, +        debug=0, +        extra_preargs=None, +        extra_postargs=None, +        build_temp=None, +        target_lang=None, +    ): +        objects, output_dir = self._fix_object_args(objects, output_dir) +        fixed_args = self._fix_lib_args(libraries, library_dirs, runtime_library_dirs) +        libraries, library_dirs, runtime_library_dirs = fixed_args + +        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs, libraries) +        if not isinstance(output_dir, (str, type(None))): +            raise TypeError("'output_dir' must be a string or None") +        if output_dir is not None: +            output_filename = os.path.join(output_dir, output_filename) + +        if self._need_link(objects, output_filename): +            ld_args = objects + self.objects + lib_opts + ['-o', output_filename] +            if debug: +                ld_args[:0] = ['-g'] +            if extra_preargs: +                ld_args[:0] = extra_preargs +            if extra_postargs: +                ld_args.extend(extra_postargs) +            self.mkpath(os.path.dirname(output_filename)) +            try: +                # Select a linker based on context: linker_exe when +                # building an executable or linker_so (with shared options) +                # when building a shared library. +                building_exe = target_desc == CCompiler.EXECUTABLE +                linker = (self.linker_exe if building_exe else self.linker_so)[:] + +                if target_lang == "c++" and self.compiler_cxx: +                    env, linker_ne = _split_env(linker) +                    aix, linker_na = _split_aix(linker_ne) +                    _, compiler_cxx_ne = _split_env(self.compiler_cxx) +                    _, linker_exe_ne = _split_env(self.linker_exe) + +                    params = _linker_params(linker_na, linker_exe_ne) +                    linker = env + aix + compiler_cxx_ne + params + +                linker = compiler_fixup(linker, ld_args) + +                self.spawn(linker + ld_args) +            except DistutilsExecError as msg: +                raise LinkError(msg) +        else: +            log.debug("skipping %s (up-to-date)", output_filename) + +    # -- Miscellaneous methods ----------------------------------------- +    # These are all used by the 'gen_lib_options() function, in +    # ccompiler.py. + +    def library_dir_option(self, dir): +        return "-L" + dir + +    def _is_gcc(self): +        cc_var = sysconfig.get_config_var("CC") +        compiler = os.path.basename(shlex.split(cc_var)[0]) +        return "gcc" in compiler or "g++" in compiler + +    def runtime_library_dir_option(self, dir): +        # XXX Hackish, at the very least.  See Python bug #445902: +        # http://sourceforge.net/tracker/index.php +        #   ?func=detail&aid=445902&group_id=5470&atid=105470 +        # Linkers on different platforms need different options to +        # specify that directories need to be added to the list of +        # directories searched for dependencies when a dynamic library +        # is sought.  GCC on GNU systems (Linux, FreeBSD, ...) has to +        # be told to pass the -R option through to the linker, whereas +        # other compilers and gcc on other systems just know this. +        # Other compilers may need something slightly different.  At +        # this time, there's no way to determine this information from +        # the configuration data stored in the Python installation, so +        # we use this hack. +        if sys.platform[:6] == "darwin": +            from distutils.util import get_macosx_target_ver, split_version + +            macosx_target_ver = get_macosx_target_ver() +            if macosx_target_ver and split_version(macosx_target_ver) >= [10, 5]: +                return "-Wl,-rpath," + dir +            else:  # no support for -rpath on earlier macOS versions +                return "-L" + dir +        elif sys.platform[:7] == "freebsd": +            return "-Wl,-rpath=" + dir +        elif sys.platform[:5] == "hp-ux": +            return [ +                "-Wl,+s" if self._is_gcc() else "+s", +                "-L" + dir, +            ] + +        # For all compilers, `-Wl` is the presumed way to +        # pass a compiler option to the linker and `-R` is +        # the way to pass an RPATH. +        if sysconfig.get_config_var("GNULD") == "yes": +            # GNU ld needs an extra option to get a RUNPATH +            # instead of just an RPATH. +            return "-Wl,--enable-new-dtags,-R" + dir +        else: +            return "-Wl,-R" + dir + +    def library_option(self, lib): +        return "-l" + lib + +    @staticmethod +    def _library_root(dir): +        """ +        macOS users can specify an alternate SDK using'-isysroot'. +        Calculate the SDK root if it is specified. + +        Note that, as of Xcode 7, Apple SDKs may contain textual stub +        libraries with .tbd extensions rather than the normal .dylib +        shared libraries installed in /.  The Apple compiler tool +        chain handles this transparently but it can cause problems +        for programs that are being built with an SDK and searching +        for specific libraries.  Callers of find_library_file need to +        keep in mind that the base filename of the returned SDK library +        file might have a different extension from that of the library +        file installed on the running system, for example: +          /Applications/Xcode.app/Contents/Developer/Platforms/ +              MacOSX.platform/Developer/SDKs/MacOSX10.11.sdk/ +              usr/lib/libedit.tbd +        vs +          /usr/lib/libedit.dylib +        """ +        cflags = sysconfig.get_config_var('CFLAGS') +        match = re.search(r'-isysroot\s*(\S+)', cflags) + +        apply_root = ( +            sys.platform == 'darwin' +            and match +            and ( +                dir.startswith('/System/') +                or (dir.startswith('/usr/') and not dir.startswith('/usr/local/')) +            ) +        ) + +        return os.path.join(match.group(1), dir[1:]) if apply_root else dir + +    def find_library_file(self, dirs, lib, debug=0): +        r""" +        Second-guess the linker with not much hard +        data to go on: GCC seems to prefer the shared library, so +        assume that *all* Unix C compilers do, +        ignoring even GCC's "-static" option. + +        >>> compiler = UnixCCompiler() +        >>> compiler._library_root = lambda dir: dir +        >>> monkeypatch = getfixture('monkeypatch') +        >>> monkeypatch.setattr(os.path, 'exists', lambda d: 'existing' in d) +        >>> dirs = ('/foo/bar/missing', '/foo/bar/existing') +        >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/') +        '/foo/bar/existing/libabc.dylib' +        >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/') +        '/foo/bar/existing/libabc.dylib' +        >>> monkeypatch.setattr(os.path, 'exists', +        ...     lambda d: 'existing' in d and '.a' in d) +        >>> compiler.find_library_file(dirs, 'abc').replace('\\', '/') +        '/foo/bar/existing/libabc.a' +        >>> compiler.find_library_file(reversed(dirs), 'abc').replace('\\', '/') +        '/foo/bar/existing/libabc.a' +        """ +        lib_names = ( +            self.library_filename(lib, lib_type=type) +            for type in 'dylib xcode_stub shared static'.split() +        ) + +        roots = map(self._library_root, dirs) + +        searched = ( +            os.path.join(root, lib_name) +            for root, lib_name in itertools.product(roots, lib_names) +        ) + +        found = filter(os.path.exists, searched) + +        # Return None if it could not be found in any dir. +        return next(found, None) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/util.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/util.py new file mode 100644 index 0000000..4763202 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/util.py @@ -0,0 +1,513 @@ +"""distutils.util + +Miscellaneous utility functions -- anything that doesn't fit into +one of the other *util.py modules. +""" + +import importlib.util +import os +import re +import string +import subprocess +import sys +import sysconfig +import functools + +from distutils.errors import DistutilsPlatformError, DistutilsByteCompileError +from distutils.dep_util import newer +from distutils.spawn import spawn +from distutils import log + + +def get_host_platform(): +    """ +    Return a string that identifies the current platform. Use this +    function to distinguish platform-specific build directories and +    platform-specific built distributions. +    """ + +    # This function initially exposed platforms as defined in Python 3.9 +    # even with older Python versions when distutils was split out. +    # Now it delegates to stdlib sysconfig, but maintains compatibility. + +    if sys.version_info < (3, 8): +        if os.name == 'nt': +            if '(arm)' in sys.version.lower(): +                return 'win-arm32' +            if '(arm64)' in sys.version.lower(): +                return 'win-arm64' + +    if sys.version_info < (3, 9): +        if os.name == "posix" and hasattr(os, 'uname'): +            osname, host, release, version, machine = os.uname() +            if osname[:3] == "aix": +                from .py38compat import aix_platform + +                return aix_platform(osname, version, release) + +    return sysconfig.get_platform() + + +def get_platform(): +    if os.name == 'nt': +        TARGET_TO_PLAT = { +            'x86': 'win32', +            'x64': 'win-amd64', +            'arm': 'win-arm32', +            'arm64': 'win-arm64', +        } +        target = os.environ.get('VSCMD_ARG_TGT_ARCH') +        return TARGET_TO_PLAT.get(target) or get_host_platform() +    return get_host_platform() + + +if sys.platform == 'darwin': +    _syscfg_macosx_ver = None  # cache the version pulled from sysconfig +MACOSX_VERSION_VAR = 'MACOSX_DEPLOYMENT_TARGET' + + +def _clear_cached_macosx_ver(): +    """For testing only. Do not call.""" +    global _syscfg_macosx_ver +    _syscfg_macosx_ver = None + + +def get_macosx_target_ver_from_syscfg(): +    """Get the version of macOS latched in the Python interpreter configuration. +    Returns the version as a string or None if can't obtain one. Cached.""" +    global _syscfg_macosx_ver +    if _syscfg_macosx_ver is None: +        from distutils import sysconfig + +        ver = sysconfig.get_config_var(MACOSX_VERSION_VAR) or '' +        if ver: +            _syscfg_macosx_ver = ver +    return _syscfg_macosx_ver + + +def get_macosx_target_ver(): +    """Return the version of macOS for which we are building. + +    The target version defaults to the version in sysconfig latched at time +    the Python interpreter was built, unless overridden by an environment +    variable. If neither source has a value, then None is returned""" + +    syscfg_ver = get_macosx_target_ver_from_syscfg() +    env_ver = os.environ.get(MACOSX_VERSION_VAR) + +    if env_ver: +        # Validate overridden version against sysconfig version, if have both. +        # Ensure that the deployment target of the build process is not less +        # than 10.3 if the interpreter was built for 10.3 or later.  This +        # ensures extension modules are built with correct compatibility +        # values, specifically LDSHARED which can use +        # '-undefined dynamic_lookup' which only works on >= 10.3. +        if ( +            syscfg_ver +            and split_version(syscfg_ver) >= [10, 3] +            and split_version(env_ver) < [10, 3] +        ): +            my_msg = ( +                '$' + MACOSX_VERSION_VAR + ' mismatch: ' +                'now "%s" but "%s" during configure; ' +                'must use 10.3 or later' % (env_ver, syscfg_ver) +            ) +            raise DistutilsPlatformError(my_msg) +        return env_ver +    return syscfg_ver + + +def split_version(s): +    """Convert a dot-separated string into a list of numbers for comparisons""" +    return [int(n) for n in s.split('.')] + + +def convert_path(pathname): +    """Return 'pathname' as a name that will work on the native filesystem, +    i.e. split it on '/' and put it back together again using the current +    directory separator.  Needed because filenames in the setup script are +    always supplied in Unix style, and have to be converted to the local +    convention before we can actually use them in the filesystem.  Raises +    ValueError on non-Unix-ish systems if 'pathname' either starts or +    ends with a slash. +    """ +    if os.sep == '/': +        return pathname +    if not pathname: +        return pathname +    if pathname[0] == '/': +        raise ValueError("path '%s' cannot be absolute" % pathname) +    if pathname[-1] == '/': +        raise ValueError("path '%s' cannot end with '/'" % pathname) + +    paths = pathname.split('/') +    while '.' in paths: +        paths.remove('.') +    if not paths: +        return os.curdir +    return os.path.join(*paths) + + +# convert_path () + + +def change_root(new_root, pathname): +    """Return 'pathname' with 'new_root' prepended.  If 'pathname' is +    relative, this is equivalent to "os.path.join(new_root,pathname)". +    Otherwise, it requires making 'pathname' relative and then joining the +    two, which is tricky on DOS/Windows and Mac OS. +    """ +    if os.name == 'posix': +        if not os.path.isabs(pathname): +            return os.path.join(new_root, pathname) +        else: +            return os.path.join(new_root, pathname[1:]) + +    elif os.name == 'nt': +        (drive, path) = os.path.splitdrive(pathname) +        if path[0] == '\\': +            path = path[1:] +        return os.path.join(new_root, path) + +    raise DistutilsPlatformError(f"nothing known about platform '{os.name}'") + + +@functools.lru_cache() +def check_environ(): +    """Ensure that 'os.environ' has all the environment variables we +    guarantee that users can use in config files, command-line options, +    etc.  Currently this includes: +      HOME - user's home directory (Unix only) +      PLAT - description of the current platform, including hardware +             and OS (see 'get_platform()') +    """ +    if os.name == 'posix' and 'HOME' not in os.environ: +        try: +            import pwd + +            os.environ['HOME'] = pwd.getpwuid(os.getuid())[5] +        except (ImportError, KeyError): +            # bpo-10496: if the current user identifier doesn't exist in the +            # password database, do nothing +            pass + +    if 'PLAT' not in os.environ: +        os.environ['PLAT'] = get_platform() + + +def subst_vars(s, local_vars): +    """ +    Perform variable substitution on 'string'. +    Variables are indicated by format-style braces ("{var}"). +    Variable is substituted by the value found in the 'local_vars' +    dictionary or in 'os.environ' if it's not in 'local_vars'. +    'os.environ' is first checked/augmented to guarantee that it contains +    certain values: see 'check_environ()'.  Raise ValueError for any +    variables not found in either 'local_vars' or 'os.environ'. +    """ +    check_environ() +    lookup = dict(os.environ) +    lookup.update((name, str(value)) for name, value in local_vars.items()) +    try: +        return _subst_compat(s).format_map(lookup) +    except KeyError as var: +        raise ValueError(f"invalid variable {var}") + + +def _subst_compat(s): +    """ +    Replace shell/Perl-style variable substitution with +    format-style. For compatibility. +    """ + +    def _subst(match): +        return f'{{{match.group(1)}}}' + +    repl = re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) +    if repl != s: +        import warnings + +        warnings.warn( +            "shell/Perl-style substitions are deprecated", +            DeprecationWarning, +        ) +    return repl + + +def grok_environment_error(exc, prefix="error: "): +    # Function kept for backward compatibility. +    # Used to try clever things with EnvironmentErrors, +    # but nowadays str(exception) produces good messages. +    return prefix + str(exc) + + +# Needed by 'split_quoted()' +_wordchars_re = _squote_re = _dquote_re = None + + +def _init_regex(): +    global _wordchars_re, _squote_re, _dquote_re +    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace) +    _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'") +    _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"') + + +def split_quoted(s): +    """Split a string up according to Unix shell-like rules for quotes and +    backslashes.  In short: words are delimited by spaces, as long as those +    spaces are not escaped by a backslash, or inside a quoted string. +    Single and double quotes are equivalent, and the quote characters can +    be backslash-escaped.  The backslash is stripped from any two-character +    escape sequence, leaving only the escaped character.  The quote +    characters are stripped from any quoted string.  Returns a list of +    words. +    """ + +    # This is a nice algorithm for splitting up a single string, since it +    # doesn't require character-by-character examination.  It was a little +    # bit of a brain-bender to get it working right, though... +    if _wordchars_re is None: +        _init_regex() + +    s = s.strip() +    words = [] +    pos = 0 + +    while s: +        m = _wordchars_re.match(s, pos) +        end = m.end() +        if end == len(s): +            words.append(s[:end]) +            break + +        if s[end] in string.whitespace: +            # unescaped, unquoted whitespace: now +            # we definitely have a word delimiter +            words.append(s[:end]) +            s = s[end:].lstrip() +            pos = 0 + +        elif s[end] == '\\': +            # preserve whatever is being escaped; +            # will become part of the current word +            s = s[:end] + s[end + 1 :] +            pos = end + 1 + +        else: +            if s[end] == "'":  # slurp singly-quoted string +                m = _squote_re.match(s, end) +            elif s[end] == '"':  # slurp doubly-quoted string +                m = _dquote_re.match(s, end) +            else: +                raise RuntimeError("this can't happen (bad char '%c')" % s[end]) + +            if m is None: +                raise ValueError("bad string (mismatched %s quotes?)" % s[end]) + +            (beg, end) = m.span() +            s = s[:beg] + s[beg + 1 : end - 1] + s[end:] +            pos = m.end() - 2 + +        if pos >= len(s): +            words.append(s) +            break + +    return words + + +# split_quoted () + + +def execute(func, args, msg=None, verbose=0, dry_run=0): +    """Perform some action that affects the outside world (eg.  by +    writing to the filesystem).  Such actions are special because they +    are disabled by the 'dry_run' flag.  This method takes care of all +    that bureaucracy for you; all you have to do is supply the +    function to call and an argument tuple for it (to embody the +    "external action" being performed), and an optional message to +    print. +    """ +    if msg is None: +        msg = "{}{!r}".format(func.__name__, args) +        if msg[-2:] == ',)':  # correct for singleton tuple +            msg = msg[0:-2] + ')' + +    log.info(msg) +    if not dry_run: +        func(*args) + + +def strtobool(val): +    """Convert a string representation of truth to true (1) or false (0). + +    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values +    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if +    'val' is anything else. +    """ +    val = val.lower() +    if val in ('y', 'yes', 't', 'true', 'on', '1'): +        return 1 +    elif val in ('n', 'no', 'f', 'false', 'off', '0'): +        return 0 +    else: +        raise ValueError("invalid truth value {!r}".format(val)) + + +def byte_compile(  # noqa: C901 +    py_files, +    optimize=0, +    force=0, +    prefix=None, +    base_dir=None, +    verbose=1, +    dry_run=0, +    direct=None, +): +    """Byte-compile a collection of Python source files to .pyc +    files in a __pycache__ subdirectory.  'py_files' is a list +    of files to compile; any files that don't end in ".py" are silently +    skipped.  'optimize' must be one of the following: +      0 - don't optimize +      1 - normal optimization (like "python -O") +      2 - extra optimization (like "python -OO") +    If 'force' is true, all files are recompiled regardless of +    timestamps. + +    The source filename encoded in each bytecode file defaults to the +    filenames listed in 'py_files'; you can modify these with 'prefix' and +    'basedir'.  'prefix' is a string that will be stripped off of each +    source filename, and 'base_dir' is a directory name that will be +    prepended (after 'prefix' is stripped).  You can supply either or both +    (or neither) of 'prefix' and 'base_dir', as you wish. + +    If 'dry_run' is true, doesn't actually do anything that would +    affect the filesystem. + +    Byte-compilation is either done directly in this interpreter process +    with the standard py_compile module, or indirectly by writing a +    temporary script and executing it.  Normally, you should let +    'byte_compile()' figure out to use direct compilation or not (see +    the source for details).  The 'direct' flag is used by the script +    generated in indirect mode; unless you know what you're doing, leave +    it set to None. +    """ + +    # nothing is done if sys.dont_write_bytecode is True +    if sys.dont_write_bytecode: +        raise DistutilsByteCompileError('byte-compiling is disabled.') + +    # First, if the caller didn't force us into direct or indirect mode, +    # figure out which mode we should be in.  We take a conservative +    # approach: choose direct mode *only* if the current interpreter is +    # in debug mode and optimize is 0.  If we're not in debug mode (-O +    # or -OO), we don't know which level of optimization this +    # interpreter is running with, so we can't do direct +    # byte-compilation and be certain that it's the right thing.  Thus, +    # always compile indirectly if the current interpreter is in either +    # optimize mode, or if either optimization level was requested by +    # the caller. +    if direct is None: +        direct = __debug__ and optimize == 0 + +    # "Indirect" byte-compilation: write a temporary script and then +    # run it with the appropriate flags. +    if not direct: +        try: +            from tempfile import mkstemp + +            (script_fd, script_name) = mkstemp(".py") +        except ImportError: +            from tempfile import mktemp + +            (script_fd, script_name) = None, mktemp(".py") +        log.info("writing byte-compilation script '%s'", script_name) +        if not dry_run: +            if script_fd is not None: +                script = os.fdopen(script_fd, "w") +            else: +                script = open(script_name, "w") + +            with script: +                script.write( +                    """\ +from distutils.util import byte_compile +files = [ +""" +                ) + +                # XXX would be nice to write absolute filenames, just for +                # safety's sake (script should be more robust in the face of +                # chdir'ing before running it).  But this requires abspath'ing +                # 'prefix' as well, and that breaks the hack in build_lib's +                # 'byte_compile()' method that carefully tacks on a trailing +                # slash (os.sep really) to make sure the prefix here is "just +                # right".  This whole prefix business is rather delicate -- the +                # problem is that it's really a directory, but I'm treating it +                # as a dumb string, so trailing slashes and so forth matter. + +                script.write(",\n".join(map(repr, py_files)) + "]\n") +                script.write( +                    """ +byte_compile(files, optimize=%r, force=%r, +             prefix=%r, base_dir=%r, +             verbose=%r, dry_run=0, +             direct=1) +""" +                    % (optimize, force, prefix, base_dir, verbose) +                ) + +        cmd = [sys.executable] +        cmd.extend(subprocess._optim_args_from_interpreter_flags()) +        cmd.append(script_name) +        spawn(cmd, dry_run=dry_run) +        execute(os.remove, (script_name,), "removing %s" % script_name, dry_run=dry_run) + +    # "Direct" byte-compilation: use the py_compile module to compile +    # right here, right now.  Note that the script generated in indirect +    # mode simply calls 'byte_compile()' in direct mode, a weird sort of +    # cross-process recursion.  Hey, it works! +    else: +        from py_compile import compile + +        for file in py_files: +            if file[-3:] != ".py": +                # This lets us be lazy and not filter filenames in +                # the "install_lib" command. +                continue + +            # Terminology from the py_compile module: +            #   cfile - byte-compiled file +            #   dfile - purported source filename (same as 'file' by default) +            if optimize >= 0: +                opt = '' if optimize == 0 else optimize +                cfile = importlib.util.cache_from_source(file, optimization=opt) +            else: +                cfile = importlib.util.cache_from_source(file) +            dfile = file +            if prefix: +                if file[: len(prefix)] != prefix: +                    raise ValueError( +                        "invalid prefix: filename %r doesn't start with %r" +                        % (file, prefix) +                    ) +                dfile = dfile[len(prefix) :] +            if base_dir: +                dfile = os.path.join(base_dir, dfile) + +            cfile_base = os.path.basename(cfile) +            if direct: +                if force or newer(file, cfile): +                    log.info("byte-compiling %s to %s", file, cfile_base) +                    if not dry_run: +                        compile(file, cfile, dfile) +                else: +                    log.debug("skipping byte-compilation of %s to %s", file, cfile_base) + + +def rfc822_escape(header): +    """Return a version of the string escaped for inclusion in an +    RFC-822 header, by ensuring there are 8 spaces space after each newline. +    """ +    lines = header.split('\n') +    sep = '\n' + 8 * ' ' +    return sep.join(lines) diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/version.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/version.py new file mode 100644 index 0000000..e29e265 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/version.py @@ -0,0 +1,358 @@ +# +# distutils/version.py +# +# Implements multiple version numbering conventions for the +# Python Module Distribution Utilities. +# +# $Id$ +# + +"""Provides classes to represent module version numbers (one class for +each style of version numbering).  There are currently two such classes +implemented: StrictVersion and LooseVersion. + +Every version number class implements the following interface: +  * the 'parse' method takes a string and parses it to some internal +    representation; if the string is an invalid version number, +    'parse' raises a ValueError exception +  * the class constructor takes an optional string argument which, +    if supplied, is passed to 'parse' +  * __str__ reconstructs the string that was passed to 'parse' (or +    an equivalent string -- ie. one that will generate an equivalent +    version number instance) +  * __repr__ generates Python code to recreate the version number instance +  * _cmp compares the current instance with either another instance +    of the same class or a string (which will be parsed to an instance +    of the same class, thus must follow the same rules) +""" + +import re +import warnings +import contextlib + + +@contextlib.contextmanager +def suppress_known_deprecation(): +    with warnings.catch_warnings(record=True) as ctx: +        warnings.filterwarnings( +            action='default', +            category=DeprecationWarning, +            message="distutils Version classes are deprecated.", +        ) +        yield ctx + + +class Version: +    """Abstract base class for version numbering classes.  Just provides +    constructor (__init__) and reproducer (__repr__), because those +    seem to be the same for all version numbering classes; and route +    rich comparisons to _cmp. +    """ + +    def __init__(self, vstring=None): +        if vstring: +            self.parse(vstring) +        warnings.warn( +            "distutils Version classes are deprecated. " +            "Use packaging.version instead.", +            DeprecationWarning, +            stacklevel=2, +        ) + +    def __repr__(self): +        return "{} ('{}')".format(self.__class__.__name__, str(self)) + +    def __eq__(self, other): +        c = self._cmp(other) +        if c is NotImplemented: +            return c +        return c == 0 + +    def __lt__(self, other): +        c = self._cmp(other) +        if c is NotImplemented: +            return c +        return c < 0 + +    def __le__(self, other): +        c = self._cmp(other) +        if c is NotImplemented: +            return c +        return c <= 0 + +    def __gt__(self, other): +        c = self._cmp(other) +        if c is NotImplemented: +            return c +        return c > 0 + +    def __ge__(self, other): +        c = self._cmp(other) +        if c is NotImplemented: +            return c +        return c >= 0 + + +# Interface for version-number classes -- must be implemented +# by the following classes (the concrete ones -- Version should +# be treated as an abstract class). +#    __init__ (string) - create and take same action as 'parse' +#                        (string parameter is optional) +#    parse (string)    - convert a string representation to whatever +#                        internal representation is appropriate for +#                        this style of version numbering +#    __str__ (self)    - convert back to a string; should be very similar +#                        (if not identical to) the string supplied to parse +#    __repr__ (self)   - generate Python code to recreate +#                        the instance +#    _cmp (self, other) - compare two version numbers ('other' may +#                        be an unparsed version string, or another +#                        instance of your version class) + + +class StrictVersion(Version): + +    """Version numbering for anal retentives and software idealists. +    Implements the standard interface for version number classes as +    described above.  A version number consists of two or three +    dot-separated numeric components, with an optional "pre-release" tag +    on the end.  The pre-release tag consists of the letter 'a' or 'b' +    followed by a number.  If the numeric components of two version +    numbers are equal, then one with a pre-release tag will always +    be deemed earlier (lesser) than one without. + +    The following are valid version numbers (shown in the order that +    would be obtained by sorting according to the supplied cmp function): + +        0.4       0.4.0  (these two are equivalent) +        0.4.1 +        0.5a1 +        0.5b3 +        0.5 +        0.9.6 +        1.0 +        1.0.4a3 +        1.0.4b1 +        1.0.4 + +    The following are examples of invalid version numbers: + +        1 +        2.7.2.2 +        1.3.a4 +        1.3pl1 +        1.3c4 + +    The rationale for this version numbering system will be explained +    in the distutils documentation. +    """ + +    version_re = re.compile( +        r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$', re.VERBOSE | re.ASCII +    ) + +    def parse(self, vstring): +        match = self.version_re.match(vstring) +        if not match: +            raise ValueError("invalid version number '%s'" % vstring) + +        (major, minor, patch, prerelease, prerelease_num) = match.group(1, 2, 4, 5, 6) + +        if patch: +            self.version = tuple(map(int, [major, minor, patch])) +        else: +            self.version = tuple(map(int, [major, minor])) + (0,) + +        if prerelease: +            self.prerelease = (prerelease[0], int(prerelease_num)) +        else: +            self.prerelease = None + +    def __str__(self): + +        if self.version[2] == 0: +            vstring = '.'.join(map(str, self.version[0:2])) +        else: +            vstring = '.'.join(map(str, self.version)) + +        if self.prerelease: +            vstring = vstring + self.prerelease[0] + str(self.prerelease[1]) + +        return vstring + +    def _cmp(self, other):  # noqa: C901 +        if isinstance(other, str): +            with suppress_known_deprecation(): +                other = StrictVersion(other) +        elif not isinstance(other, StrictVersion): +            return NotImplemented + +        if self.version != other.version: +            # numeric versions don't match +            # prerelease stuff doesn't matter +            if self.version < other.version: +                return -1 +            else: +                return 1 + +        # have to compare prerelease +        # case 1: neither has prerelease; they're equal +        # case 2: self has prerelease, other doesn't; other is greater +        # case 3: self doesn't have prerelease, other does: self is greater +        # case 4: both have prerelease: must compare them! + +        if not self.prerelease and not other.prerelease: +            return 0 +        elif self.prerelease and not other.prerelease: +            return -1 +        elif not self.prerelease and other.prerelease: +            return 1 +        elif self.prerelease and other.prerelease: +            if self.prerelease == other.prerelease: +                return 0 +            elif self.prerelease < other.prerelease: +                return -1 +            else: +                return 1 +        else: +            assert False, "never get here" + + +# end class StrictVersion + + +# The rules according to Greg Stein: +# 1) a version number has 1 or more numbers separated by a period or by +#    sequences of letters. If only periods, then these are compared +#    left-to-right to determine an ordering. +# 2) sequences of letters are part of the tuple for comparison and are +#    compared lexicographically +# 3) recognize the numeric components may have leading zeroes +# +# The LooseVersion class below implements these rules: a version number +# string is split up into a tuple of integer and string components, and +# comparison is a simple tuple comparison.  This means that version +# numbers behave in a predictable and obvious way, but a way that might +# not necessarily be how people *want* version numbers to behave.  There +# wouldn't be a problem if people could stick to purely numeric version +# numbers: just split on period and compare the numbers as tuples. +# However, people insist on putting letters into their version numbers; +# the most common purpose seems to be: +#   - indicating a "pre-release" version +#     ('alpha', 'beta', 'a', 'b', 'pre', 'p') +#   - indicating a post-release patch ('p', 'pl', 'patch') +# but of course this can't cover all version number schemes, and there's +# no way to know what a programmer means without asking him. +# +# The problem is what to do with letters (and other non-numeric +# characters) in a version number.  The current implementation does the +# obvious and predictable thing: keep them as strings and compare +# lexically within a tuple comparison.  This has the desired effect if +# an appended letter sequence implies something "post-release": +# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002". +# +# However, if letters in a version number imply a pre-release version, +# the "obvious" thing isn't correct.  Eg. you would expect that +# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison +# implemented here, this just isn't so. +# +# Two possible solutions come to mind.  The first is to tie the +# comparison algorithm to a particular set of semantic rules, as has +# been done in the StrictVersion class above.  This works great as long +# as everyone can go along with bondage and discipline.  Hopefully a +# (large) subset of Python module programmers will agree that the +# particular flavour of bondage and discipline provided by StrictVersion +# provides enough benefit to be worth using, and will submit their +# version numbering scheme to its domination.  The free-thinking +# anarchists in the lot will never give in, though, and something needs +# to be done to accommodate them. +# +# Perhaps a "moderately strict" version class could be implemented that +# lets almost anything slide (syntactically), and makes some heuristic +# assumptions about non-digits in version number strings.  This could +# sink into special-case-hell, though; if I was as talented and +# idiosyncratic as Larry Wall, I'd go ahead and implement a class that +# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is +# just as happy dealing with things like "2g6" and "1.13++".  I don't +# think I'm smart enough to do it right though. +# +# In any case, I've coded the test suite for this module (see +# ../test/test_version.py) specifically to fail on things like comparing +# "1.2a2" and "1.2".  That's not because the *code* is doing anything +# wrong, it's because the simple, obvious design doesn't match my +# complicated, hairy expectations for real-world version numbers.  It +# would be a snap to fix the test suite to say, "Yep, LooseVersion does +# the Right Thing" (ie. the code matches the conception).  But I'd rather +# have a conception that matches common notions about version numbers. + + +class LooseVersion(Version): + +    """Version numbering for anarchists and software realists. +    Implements the standard interface for version number classes as +    described above.  A version number consists of a series of numbers, +    separated by either periods or strings of letters.  When comparing +    version numbers, the numeric components will be compared +    numerically, and the alphabetic components lexically.  The following +    are all valid version numbers, in no particular order: + +        1.5.1 +        1.5.2b2 +        161 +        3.10a +        8.02 +        3.4j +        1996.07.12 +        3.2.pl0 +        3.1.1.6 +        2g6 +        11g +        0.960923 +        2.2beta29 +        1.13++ +        5.5.kw +        2.0b1pl0 + +    In fact, there is no such thing as an invalid version number under +    this scheme; the rules for comparison are simple and predictable, +    but may not always give the results you want (for some definition +    of "want"). +    """ + +    component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE) + +    def parse(self, vstring): +        # I've given up on thinking I can reconstruct the version string +        # from the parsed tuple -- so I just store the string here for +        # use by __str__ +        self.vstring = vstring +        components = [x for x in self.component_re.split(vstring) if x and x != '.'] +        for i, obj in enumerate(components): +            try: +                components[i] = int(obj) +            except ValueError: +                pass + +        self.version = components + +    def __str__(self): +        return self.vstring + +    def __repr__(self): +        return "LooseVersion ('%s')" % str(self) + +    def _cmp(self, other): +        if isinstance(other, str): +            other = LooseVersion(other) +        elif not isinstance(other, LooseVersion): +            return NotImplemented + +        if self.version == other.version: +            return 0 +        if self.version < other.version: +            return -1 +        if self.version > other.version: +            return 1 + + +# end class LooseVersion diff --git a/venv/lib/python3.11/site-packages/setuptools/_distutils/versionpredicate.py b/venv/lib/python3.11/site-packages/setuptools/_distutils/versionpredicate.py new file mode 100644 index 0000000..6ea1192 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_distutils/versionpredicate.py @@ -0,0 +1,175 @@ +"""Module for parsing and testing package version predicate strings. +""" +import re +import distutils.version +import operator + + +re_validPackage = re.compile(r"(?i)^\s*([a-z_]\w*(?:\.[a-z_]\w*)*)(.*)", re.ASCII) +# (package) (rest) + +re_paren = re.compile(r"^\s*\((.*)\)\s*$")  # (list) inside of parentheses +re_splitComparison = re.compile(r"^\s*(<=|>=|<|>|!=|==)\s*([^\s,]+)\s*$") +# (comp) (version) + + +def splitUp(pred): +    """Parse a single version comparison. + +    Return (comparison string, StrictVersion) +    """ +    res = re_splitComparison.match(pred) +    if not res: +        raise ValueError("bad package restriction syntax: %r" % pred) +    comp, verStr = res.groups() +    with distutils.version.suppress_known_deprecation(): +        other = distutils.version.StrictVersion(verStr) +    return (comp, other) + + +compmap = { +    "<": operator.lt, +    "<=": operator.le, +    "==": operator.eq, +    ">": operator.gt, +    ">=": operator.ge, +    "!=": operator.ne, +} + + +class VersionPredicate: +    """Parse and test package version predicates. + +    >>> v = VersionPredicate('pyepat.abc (>1.0, <3333.3a1, !=1555.1b3)') + +    The `name` attribute provides the full dotted name that is given:: + +    >>> v.name +    'pyepat.abc' + +    The str() of a `VersionPredicate` provides a normalized +    human-readable version of the expression:: + +    >>> print(v) +    pyepat.abc (> 1.0, < 3333.3a1, != 1555.1b3) + +    The `satisfied_by()` method can be used to determine with a given +    version number is included in the set described by the version +    restrictions:: + +    >>> v.satisfied_by('1.1') +    True +    >>> v.satisfied_by('1.4') +    True +    >>> v.satisfied_by('1.0') +    False +    >>> v.satisfied_by('4444.4') +    False +    >>> v.satisfied_by('1555.1b3') +    False + +    `VersionPredicate` is flexible in accepting extra whitespace:: + +    >>> v = VersionPredicate(' pat( ==  0.1  )  ') +    >>> v.name +    'pat' +    >>> v.satisfied_by('0.1') +    True +    >>> v.satisfied_by('0.2') +    False + +    If any version numbers passed in do not conform to the +    restrictions of `StrictVersion`, a `ValueError` is raised:: + +    >>> v = VersionPredicate('p1.p2.p3.p4(>=1.0, <=1.3a1, !=1.2zb3)') +    Traceback (most recent call last): +      ... +    ValueError: invalid version number '1.2zb3' + +    It the module or package name given does not conform to what's +    allowed as a legal module or package name, `ValueError` is +    raised:: + +    >>> v = VersionPredicate('foo-bar') +    Traceback (most recent call last): +      ... +    ValueError: expected parenthesized list: '-bar' + +    >>> v = VersionPredicate('foo bar (12.21)') +    Traceback (most recent call last): +      ... +    ValueError: expected parenthesized list: 'bar (12.21)' + +    """ + +    def __init__(self, versionPredicateStr): +        """Parse a version predicate string.""" +        # Fields: +        #    name:  package name +        #    pred:  list of (comparison string, StrictVersion) + +        versionPredicateStr = versionPredicateStr.strip() +        if not versionPredicateStr: +            raise ValueError("empty package restriction") +        match = re_validPackage.match(versionPredicateStr) +        if not match: +            raise ValueError("bad package name in %r" % versionPredicateStr) +        self.name, paren = match.groups() +        paren = paren.strip() +        if paren: +            match = re_paren.match(paren) +            if not match: +                raise ValueError("expected parenthesized list: %r" % paren) +            str = match.groups()[0] +            self.pred = [splitUp(aPred) for aPred in str.split(",")] +            if not self.pred: +                raise ValueError("empty parenthesized list in %r" % versionPredicateStr) +        else: +            self.pred = [] + +    def __str__(self): +        if self.pred: +            seq = [cond + " " + str(ver) for cond, ver in self.pred] +            return self.name + " (" + ", ".join(seq) + ")" +        else: +            return self.name + +    def satisfied_by(self, version): +        """True if version is compatible with all the predicates in self. +        The parameter version must be acceptable to the StrictVersion +        constructor.  It may be either a string or StrictVersion. +        """ +        for cond, ver in self.pred: +            if not compmap[cond](version, ver): +                return False +        return True + + +_provision_rx = None + + +def split_provision(value): +    """Return the name and optional version number of a provision. + +    The version number, if given, will be returned as a `StrictVersion` +    instance, otherwise it will be `None`. + +    >>> split_provision('mypkg') +    ('mypkg', None) +    >>> split_provision(' mypkg( 1.2 ) ') +    ('mypkg', StrictVersion ('1.2')) +    """ +    global _provision_rx +    if _provision_rx is None: +        _provision_rx = re.compile( +            r"([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)(?:\s*\(\s*([^)\s]+)\s*\))?$", re.ASCII +        ) +    value = value.strip() +    m = _provision_rx.match(value) +    if not m: +        raise ValueError("illegal provides specification: %r" % value) +    ver = m.group(2) or None +    if ver: +        with distutils.version.suppress_known_deprecation(): +            ver = distutils.version.StrictVersion(ver) +    return m.group(1), ver | 
