diff options
Diffstat (limited to 'venv/lib/python3.11/site-packages/setuptools/_vendor/packaging')
22 files changed, 2928 insertions, 0 deletions
| diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__about__.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__about__.py new file mode 100644 index 0000000..3551bc2 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__about__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +__all__ = [ +    "__title__", +    "__summary__", +    "__uri__", +    "__version__", +    "__author__", +    "__email__", +    "__license__", +    "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "21.3" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD-2-Clause or Apache-2.0" +__copyright__ = "2014-2019 %s" % __author__ diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__init__.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__init__.py new file mode 100644 index 0000000..3c50c5d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__init__.py @@ -0,0 +1,25 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from .__about__ import ( +    __author__, +    __copyright__, +    __email__, +    __license__, +    __summary__, +    __title__, +    __uri__, +    __version__, +) + +__all__ = [ +    "__title__", +    "__summary__", +    "__uri__", +    "__version__", +    "__author__", +    "__email__", +    "__license__", +    "__copyright__", +] diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-311.pycBinary files differ new file mode 100644 index 0000000..a3c748c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/__about__.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pycBinary files differ new file mode 100644 index 0000000..7da354c --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/__init__.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pycBinary files differ new file mode 100644 index 0000000..18f5613 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pycBinary files differ new file mode 100644 index 0000000..5284f6a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pycBinary files differ new file mode 100644 index 0000000..e813336 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/_structures.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pycBinary files differ new file mode 100644 index 0000000..aa44949 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/markers.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pycBinary files differ new file mode 100644 index 0000000..63e418f --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/requirements.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pycBinary files differ new file mode 100644 index 0000000..66e8935 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pycBinary files differ new file mode 100644 index 0000000..eff3ace --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/tags.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pycBinary files differ new file mode 100644 index 0000000..f591d4d --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/utils.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pycBinary files differ new file mode 100644 index 0000000..12eff75 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/__pycache__/version.cpython-311.pyc diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_manylinux.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_manylinux.py new file mode 100644 index 0000000..4c379aa --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_manylinux.py @@ -0,0 +1,301 @@ +import collections +import functools +import os +import re +import struct +import sys +import warnings +from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple + + +# Python does not provide platform information at sufficient granularity to +# identify the architecture of the running executable in some cases, so we +# determine it dynamically by reading the information from the running +# process. This only applies on Linux, which uses the ELF format. +class _ELFFileHeader: +    # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header +    class _InvalidELFFileHeader(ValueError): +        """ +        An invalid ELF file header was found. +        """ + +    ELF_MAGIC_NUMBER = 0x7F454C46 +    ELFCLASS32 = 1 +    ELFCLASS64 = 2 +    ELFDATA2LSB = 1 +    ELFDATA2MSB = 2 +    EM_386 = 3 +    EM_S390 = 22 +    EM_ARM = 40 +    EM_X86_64 = 62 +    EF_ARM_ABIMASK = 0xFF000000 +    EF_ARM_ABI_VER5 = 0x05000000 +    EF_ARM_ABI_FLOAT_HARD = 0x00000400 + +    def __init__(self, file: IO[bytes]) -> None: +        def unpack(fmt: str) -> int: +            try: +                data = file.read(struct.calcsize(fmt)) +                result: Tuple[int, ...] = struct.unpack(fmt, data) +            except struct.error: +                raise _ELFFileHeader._InvalidELFFileHeader() +            return result[0] + +        self.e_ident_magic = unpack(">I") +        if self.e_ident_magic != self.ELF_MAGIC_NUMBER: +            raise _ELFFileHeader._InvalidELFFileHeader() +        self.e_ident_class = unpack("B") +        if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}: +            raise _ELFFileHeader._InvalidELFFileHeader() +        self.e_ident_data = unpack("B") +        if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}: +            raise _ELFFileHeader._InvalidELFFileHeader() +        self.e_ident_version = unpack("B") +        self.e_ident_osabi = unpack("B") +        self.e_ident_abiversion = unpack("B") +        self.e_ident_pad = file.read(7) +        format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H" +        format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I" +        format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q" +        format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q +        self.e_type = unpack(format_h) +        self.e_machine = unpack(format_h) +        self.e_version = unpack(format_i) +        self.e_entry = unpack(format_p) +        self.e_phoff = unpack(format_p) +        self.e_shoff = unpack(format_p) +        self.e_flags = unpack(format_i) +        self.e_ehsize = unpack(format_h) +        self.e_phentsize = unpack(format_h) +        self.e_phnum = unpack(format_h) +        self.e_shentsize = unpack(format_h) +        self.e_shnum = unpack(format_h) +        self.e_shstrndx = unpack(format_h) + + +def _get_elf_header() -> Optional[_ELFFileHeader]: +    try: +        with open(sys.executable, "rb") as f: +            elf_header = _ELFFileHeader(f) +    except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader): +        return None +    return elf_header + + +def _is_linux_armhf() -> bool: +    # hard-float ABI can be detected from the ELF header of the running +    # process +    # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf +    elf_header = _get_elf_header() +    if elf_header is None: +        return False +    result = elf_header.e_ident_class == elf_header.ELFCLASS32 +    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB +    result &= elf_header.e_machine == elf_header.EM_ARM +    result &= ( +        elf_header.e_flags & elf_header.EF_ARM_ABIMASK +    ) == elf_header.EF_ARM_ABI_VER5 +    result &= ( +        elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD +    ) == elf_header.EF_ARM_ABI_FLOAT_HARD +    return result + + +def _is_linux_i686() -> bool: +    elf_header = _get_elf_header() +    if elf_header is None: +        return False +    result = elf_header.e_ident_class == elf_header.ELFCLASS32 +    result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB +    result &= elf_header.e_machine == elf_header.EM_386 +    return result + + +def _have_compatible_abi(arch: str) -> bool: +    if arch == "armv7l": +        return _is_linux_armhf() +    if arch == "i686": +        return _is_linux_i686() +    return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"} + + +# If glibc ever changes its major version, we need to know what the last +# minor version was, so we can build the complete list of all versions. +# For now, guess what the highest minor version might be, assume it will +# be 50 for testing. Once this actually happens, update the dictionary +# with the actual value. +_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50) + + +class _GLibCVersion(NamedTuple): +    major: int +    minor: int + + +def _glibc_version_string_confstr() -> Optional[str]: +    """ +    Primary implementation of glibc_version_string using os.confstr. +    """ +    # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely +    # to be broken or missing. This strategy is used in the standard library +    # platform module. +    # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 +    try: +        # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". +        version_string = os.confstr("CS_GNU_LIBC_VERSION") +        assert version_string is not None +        _, version = version_string.split() +    except (AssertionError, AttributeError, OSError, ValueError): +        # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... +        return None +    return version + + +def _glibc_version_string_ctypes() -> Optional[str]: +    """ +    Fallback implementation of glibc_version_string using ctypes. +    """ +    try: +        import ctypes +    except ImportError: +        return None + +    # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen +    # manpage says, "If filename is NULL, then the returned handle is for the +    # main program". This way we can let the linker do the work to figure out +    # which libc our process is actually using. +    # +    # We must also handle the special case where the executable is not a +    # dynamically linked executable. This can occur when using musl libc, +    # for example. In this situation, dlopen() will error, leading to an +    # OSError. Interestingly, at least in the case of musl, there is no +    # errno set on the OSError. The single string argument used to construct +    # OSError comes from libc itself and is therefore not portable to +    # hard code here. In any case, failure to call dlopen() means we +    # can proceed, so we bail on our attempt. +    try: +        process_namespace = ctypes.CDLL(None) +    except OSError: +        return None + +    try: +        gnu_get_libc_version = process_namespace.gnu_get_libc_version +    except AttributeError: +        # Symbol doesn't exist -> therefore, we are not linked to +        # glibc. +        return None + +    # Call gnu_get_libc_version, which returns a string like "2.5" +    gnu_get_libc_version.restype = ctypes.c_char_p +    version_str: str = gnu_get_libc_version() +    # py2 / py3 compatibility: +    if not isinstance(version_str, str): +        version_str = version_str.decode("ascii") + +    return version_str + + +def _glibc_version_string() -> Optional[str]: +    """Returns glibc version string, or None if not using glibc.""" +    return _glibc_version_string_confstr() or _glibc_version_string_ctypes() + + +def _parse_glibc_version(version_str: str) -> Tuple[int, int]: +    """Parse glibc version. + +    We use a regexp instead of str.split because we want to discard any +    random junk that might come after the minor version -- this might happen +    in patched/forked versions of glibc (e.g. Linaro's version of glibc +    uses version strings like "2.20-2014.11"). See gh-3588. +    """ +    m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) +    if not m: +        warnings.warn( +            "Expected glibc version with 2 components major.minor," +            " got: %s" % version_str, +            RuntimeWarning, +        ) +        return -1, -1 +    return int(m.group("major")), int(m.group("minor")) + + +@functools.lru_cache() +def _get_glibc_version() -> Tuple[int, int]: +    version_str = _glibc_version_string() +    if version_str is None: +        return (-1, -1) +    return _parse_glibc_version(version_str) + + +# From PEP 513, PEP 600 +def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool: +    sys_glibc = _get_glibc_version() +    if sys_glibc < version: +        return False +    # Check for presence of _manylinux module. +    try: +        import _manylinux  # noqa +    except ImportError: +        return True +    if hasattr(_manylinux, "manylinux_compatible"): +        result = _manylinux.manylinux_compatible(version[0], version[1], arch) +        if result is not None: +            return bool(result) +        return True +    if version == _GLibCVersion(2, 5): +        if hasattr(_manylinux, "manylinux1_compatible"): +            return bool(_manylinux.manylinux1_compatible) +    if version == _GLibCVersion(2, 12): +        if hasattr(_manylinux, "manylinux2010_compatible"): +            return bool(_manylinux.manylinux2010_compatible) +    if version == _GLibCVersion(2, 17): +        if hasattr(_manylinux, "manylinux2014_compatible"): +            return bool(_manylinux.manylinux2014_compatible) +    return True + + +_LEGACY_MANYLINUX_MAP = { +    # CentOS 7 w/ glibc 2.17 (PEP 599) +    (2, 17): "manylinux2014", +    # CentOS 6 w/ glibc 2.12 (PEP 571) +    (2, 12): "manylinux2010", +    # CentOS 5 w/ glibc 2.5 (PEP 513) +    (2, 5): "manylinux1", +} + + +def platform_tags(linux: str, arch: str) -> Iterator[str]: +    if not _have_compatible_abi(arch): +        return +    # Oldest glibc to be supported regardless of architecture is (2, 17). +    too_old_glibc2 = _GLibCVersion(2, 16) +    if arch in {"x86_64", "i686"}: +        # On x86/i686 also oldest glibc to be supported is (2, 5). +        too_old_glibc2 = _GLibCVersion(2, 4) +    current_glibc = _GLibCVersion(*_get_glibc_version()) +    glibc_max_list = [current_glibc] +    # We can assume compatibility across glibc major versions. +    # https://sourceware.org/bugzilla/show_bug.cgi?id=24636 +    # +    # Build a list of maximum glibc versions so that we can +    # output the canonical list of all glibc from current_glibc +    # down to too_old_glibc2, including all intermediary versions. +    for glibc_major in range(current_glibc.major - 1, 1, -1): +        glibc_minor = _LAST_GLIBC_MINOR[glibc_major] +        glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor)) +    for glibc_max in glibc_max_list: +        if glibc_max.major == too_old_glibc2.major: +            min_minor = too_old_glibc2.minor +        else: +            # For other glibc major versions oldest supported is (x, 0). +            min_minor = -1 +        for glibc_minor in range(glibc_max.minor, min_minor, -1): +            glibc_version = _GLibCVersion(glibc_max.major, glibc_minor) +            tag = "manylinux_{}_{}".format(*glibc_version) +            if _is_compatible(tag, arch, glibc_version): +                yield linux.replace("linux", tag) +            # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags. +            if glibc_version in _LEGACY_MANYLINUX_MAP: +                legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version] +                if _is_compatible(legacy_tag, arch, glibc_version): +                    yield linux.replace("linux", legacy_tag) diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_musllinux.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_musllinux.py new file mode 100644 index 0000000..8ac3059 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_musllinux.py @@ -0,0 +1,136 @@ +"""PEP 656 support. + +This module implements logic to detect if the currently running Python is +linked against musl, and what musl version is used. +""" + +import contextlib +import functools +import operator +import os +import re +import struct +import subprocess +import sys +from typing import IO, Iterator, NamedTuple, Optional, Tuple + + +def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]: +    return struct.unpack(fmt, f.read(struct.calcsize(fmt))) + + +def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: +    """Detect musl libc location by parsing the Python executable. + +    Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca +    ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html +    """ +    f.seek(0) +    try: +        ident = _read_unpacked(f, "16B") +    except struct.error: +        return None +    if ident[:4] != tuple(b"\x7fELF"):  # Invalid magic, not ELF. +        return None +    f.seek(struct.calcsize("HHI"), 1)  # Skip file type, machine, and version. + +    try: +        # e_fmt: Format for program header. +        # p_fmt: Format for section header. +        # p_idx: Indexes to find p_type, p_offset, and p_filesz. +        e_fmt, p_fmt, p_idx = { +            1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)),  # 32-bit. +            2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)),  # 64-bit. +        }[ident[4]] +    except KeyError: +        return None +    else: +        p_get = operator.itemgetter(*p_idx) + +    # Find the interpreter section and return its content. +    try: +        _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) +    except struct.error: +        return None +    for i in range(e_phnum + 1): +        f.seek(e_phoff + e_phentsize * i) +        try: +            p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) +        except struct.error: +            return None +        if p_type != 3:  # Not PT_INTERP. +            continue +        f.seek(p_offset) +        interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") +        if "musl" not in interpreter: +            return None +        return interpreter +    return None + + +class _MuslVersion(NamedTuple): +    major: int +    minor: int + + +def _parse_musl_version(output: str) -> Optional[_MuslVersion]: +    lines = [n for n in (n.strip() for n in output.splitlines()) if n] +    if len(lines) < 2 or lines[0][:4] != "musl": +        return None +    m = re.match(r"Version (\d+)\.(\d+)", lines[1]) +    if not m: +        return None +    return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2))) + + +@functools.lru_cache() +def _get_musl_version(executable: str) -> Optional[_MuslVersion]: +    """Detect currently-running musl runtime version. + +    This is done by checking the specified executable's dynamic linking +    information, and invoking the loader to parse its output for a version +    string. If the loader is musl, the output would be something like:: + +        musl libc (x86_64) +        Version 1.2.2 +        Dynamic Program Loader +    """ +    with contextlib.ExitStack() as stack: +        try: +            f = stack.enter_context(open(executable, "rb")) +        except OSError: +            return None +        ld = _parse_ld_musl_from_elf(f) +    if not ld: +        return None +    proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) +    return _parse_musl_version(proc.stderr) + + +def platform_tags(arch: str) -> Iterator[str]: +    """Generate musllinux tags compatible to the current platform. + +    :param arch: Should be the part of platform tag after the ``linux_`` +        prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a +        prerequisite for the current platform to be musllinux-compatible. + +    :returns: An iterator of compatible musllinux tags. +    """ +    sys_musl = _get_musl_version(sys.executable) +    if sys_musl is None:  # Python not dynamically linked against musl. +        return +    for minor in range(sys_musl.minor, -1, -1): +        yield f"musllinux_{sys_musl.major}_{minor}_{arch}" + + +if __name__ == "__main__":  # pragma: no cover +    import sysconfig + +    plat = sysconfig.get_platform() +    assert plat.startswith("linux-"), "not linux" + +    print("plat:", plat) +    print("musl:", _get_musl_version(sys.executable)) +    print("tags:", end=" ") +    for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])): +        print(t, end="\n      ") diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_structures.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_structures.py new file mode 100644 index 0000000..90a6465 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/_structures.py @@ -0,0 +1,61 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + + +class InfinityType: +    def __repr__(self) -> str: +        return "Infinity" + +    def __hash__(self) -> int: +        return hash(repr(self)) + +    def __lt__(self, other: object) -> bool: +        return False + +    def __le__(self, other: object) -> bool: +        return False + +    def __eq__(self, other: object) -> bool: +        return isinstance(other, self.__class__) + +    def __gt__(self, other: object) -> bool: +        return True + +    def __ge__(self, other: object) -> bool: +        return True + +    def __neg__(self: object) -> "NegativeInfinityType": +        return NegativeInfinity + + +Infinity = InfinityType() + + +class NegativeInfinityType: +    def __repr__(self) -> str: +        return "-Infinity" + +    def __hash__(self) -> int: +        return hash(repr(self)) + +    def __lt__(self, other: object) -> bool: +        return True + +    def __le__(self, other: object) -> bool: +        return True + +    def __eq__(self, other: object) -> bool: +        return isinstance(other, self.__class__) + +    def __gt__(self, other: object) -> bool: +        return False + +    def __ge__(self, other: object) -> bool: +        return False + +    def __neg__(self: object) -> InfinityType: +        return Infinity + + +NegativeInfinity = NegativeInfinityType() diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/markers.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/markers.py new file mode 100644 index 0000000..eb0541b --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/markers.py @@ -0,0 +1,304 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import operator +import os +import platform +import sys +from typing import Any, Callable, Dict, List, Optional, Tuple, Union + +from setuptools.extern.pyparsing import (  # noqa: N817 +    Forward, +    Group, +    Literal as L, +    ParseException, +    ParseResults, +    QuotedString, +    ZeroOrMore, +    stringEnd, +    stringStart, +) + +from .specifiers import InvalidSpecifier, Specifier + +__all__ = [ +    "InvalidMarker", +    "UndefinedComparison", +    "UndefinedEnvironmentName", +    "Marker", +    "default_environment", +] + +Operator = Callable[[str, str], bool] + + +class InvalidMarker(ValueError): +    """ +    An invalid marker was found, users should refer to PEP 508. +    """ + + +class UndefinedComparison(ValueError): +    """ +    An invalid operation was attempted on a value that doesn't support it. +    """ + + +class UndefinedEnvironmentName(ValueError): +    """ +    A name was attempted to be used that does not exist inside of the +    environment. +    """ + + +class Node: +    def __init__(self, value: Any) -> None: +        self.value = value + +    def __str__(self) -> str: +        return str(self.value) + +    def __repr__(self) -> str: +        return f"<{self.__class__.__name__}('{self}')>" + +    def serialize(self) -> str: +        raise NotImplementedError + + +class Variable(Node): +    def serialize(self) -> str: +        return str(self) + + +class Value(Node): +    def serialize(self) -> str: +        return f'"{self}"' + + +class Op(Node): +    def serialize(self) -> str: +        return str(self) + + +VARIABLE = ( +    L("implementation_version") +    | L("platform_python_implementation") +    | L("implementation_name") +    | L("python_full_version") +    | L("platform_release") +    | L("platform_version") +    | L("platform_machine") +    | L("platform_system") +    | L("python_version") +    | L("sys_platform") +    | L("os_name") +    | L("os.name")  # PEP-345 +    | L("sys.platform")  # PEP-345 +    | L("platform.version")  # PEP-345 +    | L("platform.machine")  # PEP-345 +    | L("platform.python_implementation")  # PEP-345 +    | L("python_implementation")  # undocumented setuptools legacy +    | L("extra")  # PEP-508 +) +ALIASES = { +    "os.name": "os_name", +    "sys.platform": "sys_platform", +    "platform.version": "platform_version", +    "platform.machine": "platform_machine", +    "platform.python_implementation": "platform_python_implementation", +    "python_implementation": "platform_python_implementation", +} +VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0]))) + +VERSION_CMP = ( +    L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") +MARKER_OP.setParseAction(lambda s, l, t: Op(t[0])) + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]: +    if isinstance(results, ParseResults): +        return [_coerce_parse_result(i) for i in results] +    else: +        return results + + +def _format_marker( +    marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True +) -> str: + +    assert isinstance(marker, (list, tuple, str)) + +    # Sometimes we have a structure like [[...]] which is a single item list +    # where the single item is itself it's own list. In that case we want skip +    # the rest of this function so that we don't get extraneous () on the +    # outside. +    if ( +        isinstance(marker, list) +        and len(marker) == 1 +        and isinstance(marker[0], (list, tuple)) +    ): +        return _format_marker(marker[0]) + +    if isinstance(marker, list): +        inner = (_format_marker(m, first=False) for m in marker) +        if first: +            return " ".join(inner) +        else: +            return "(" + " ".join(inner) + ")" +    elif isinstance(marker, tuple): +        return " ".join([m.serialize() for m in marker]) +    else: +        return marker + + +_operators: Dict[str, Operator] = { +    "in": lambda lhs, rhs: lhs in rhs, +    "not in": lambda lhs, rhs: lhs not in rhs, +    "<": operator.lt, +    "<=": operator.le, +    "==": operator.eq, +    "!=": operator.ne, +    ">=": operator.ge, +    ">": operator.gt, +} + + +def _eval_op(lhs: str, op: Op, rhs: str) -> bool: +    try: +        spec = Specifier("".join([op.serialize(), rhs])) +    except InvalidSpecifier: +        pass +    else: +        return spec.contains(lhs) + +    oper: Optional[Operator] = _operators.get(op.serialize()) +    if oper is None: +        raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.") + +    return oper(lhs, rhs) + + +class Undefined: +    pass + + +_undefined = Undefined() + + +def _get_env(environment: Dict[str, str], name: str) -> str: +    value: Union[str, Undefined] = environment.get(name, _undefined) + +    if isinstance(value, Undefined): +        raise UndefinedEnvironmentName( +            f"{name!r} does not exist in evaluation environment." +        ) + +    return value + + +def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool: +    groups: List[List[bool]] = [[]] + +    for marker in markers: +        assert isinstance(marker, (list, tuple, str)) + +        if isinstance(marker, list): +            groups[-1].append(_evaluate_markers(marker, environment)) +        elif isinstance(marker, tuple): +            lhs, op, rhs = marker + +            if isinstance(lhs, Variable): +                lhs_value = _get_env(environment, lhs.value) +                rhs_value = rhs.value +            else: +                lhs_value = lhs.value +                rhs_value = _get_env(environment, rhs.value) + +            groups[-1].append(_eval_op(lhs_value, op, rhs_value)) +        else: +            assert marker in ["and", "or"] +            if marker == "or": +                groups.append([]) + +    return any(all(item) for item in groups) + + +def format_full_version(info: "sys._version_info") -> str: +    version = "{0.major}.{0.minor}.{0.micro}".format(info) +    kind = info.releaselevel +    if kind != "final": +        version += kind[0] + str(info.serial) +    return version + + +def default_environment() -> Dict[str, str]: +    iver = format_full_version(sys.implementation.version) +    implementation_name = sys.implementation.name +    return { +        "implementation_name": implementation_name, +        "implementation_version": iver, +        "os_name": os.name, +        "platform_machine": platform.machine(), +        "platform_release": platform.release(), +        "platform_system": platform.system(), +        "platform_version": platform.version(), +        "python_full_version": platform.python_version(), +        "platform_python_implementation": platform.python_implementation(), +        "python_version": ".".join(platform.python_version_tuple()[:2]), +        "sys_platform": sys.platform, +    } + + +class Marker: +    def __init__(self, marker: str) -> None: +        try: +            self._markers = _coerce_parse_result(MARKER.parseString(marker)) +        except ParseException as e: +            raise InvalidMarker( +                f"Invalid marker: {marker!r}, parse error at " +                f"{marker[e.loc : e.loc + 8]!r}" +            ) + +    def __str__(self) -> str: +        return _format_marker(self._markers) + +    def __repr__(self) -> str: +        return f"<Marker('{self}')>" + +    def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool: +        """Evaluate a marker. + +        Return the boolean from evaluating the given marker against the +        environment. environment is an optional argument to override all or +        part of the determined environment. + +        The environment is determined from the current Python process. +        """ +        current_environment = default_environment() +        if environment is not None: +            current_environment.update(environment) + +        return _evaluate_markers(self._markers, current_environment) diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/requirements.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/requirements.py new file mode 100644 index 0000000..0d93231 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/requirements.py @@ -0,0 +1,146 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +import string +import urllib.parse +from typing import List, Optional as TOptional, Set + +from setuptools.extern.pyparsing import (  # noqa +    Combine, +    Literal as L, +    Optional, +    ParseException, +    Regex, +    Word, +    ZeroOrMore, +    originalTextFor, +    stringEnd, +    stringStart, +) + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): +    """ +    An invalid requirement was found, users should refer to PEP 508. +    """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r"[^ ]+")("url") +URL = AT + URI + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine( +    VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False +)("_raw_spec") +_VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "") + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( +    lambda s, l, t: Marker(s[t._original_start : t._original_end]) +) +MARKER_SEPARATOR = SEMICOLON +MARKER = MARKER_SEPARATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd +# setuptools.extern.pyparsing isn't thread safe during initialization, so we do it eagerly, see +# issue #104 +REQUIREMENT.parseString("x[]") + + +class Requirement: +    """Parse a requirement. + +    Parse a given requirement string into its parts, such as name, specifier, +    URL, and extras. Raises InvalidRequirement on a badly-formed requirement +    string. +    """ + +    # TODO: Can we test whether something is contained within a requirement? +    #       If so how do we do that? Do we need to test against the _name_ of +    #       the thing as well as the version? What about the markers? +    # TODO: Can we normalize the name and extra name? + +    def __init__(self, requirement_string: str) -> None: +        try: +            req = REQUIREMENT.parseString(requirement_string) +        except ParseException as e: +            raise InvalidRequirement( +                f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}' +            ) + +        self.name: str = req.name +        if req.url: +            parsed_url = urllib.parse.urlparse(req.url) +            if parsed_url.scheme == "file": +                if urllib.parse.urlunparse(parsed_url) != req.url: +                    raise InvalidRequirement("Invalid URL given") +            elif not (parsed_url.scheme and parsed_url.netloc) or ( +                not parsed_url.scheme and not parsed_url.netloc +            ): +                raise InvalidRequirement(f"Invalid URL: {req.url}") +            self.url: TOptional[str] = req.url +        else: +            self.url = None +        self.extras: Set[str] = set(req.extras.asList() if req.extras else []) +        self.specifier: SpecifierSet = SpecifierSet(req.specifier) +        self.marker: TOptional[Marker] = req.marker if req.marker else None + +    def __str__(self) -> str: +        parts: List[str] = [self.name] + +        if self.extras: +            formatted_extras = ",".join(sorted(self.extras)) +            parts.append(f"[{formatted_extras}]") + +        if self.specifier: +            parts.append(str(self.specifier)) + +        if self.url: +            parts.append(f"@ {self.url}") +            if self.marker: +                parts.append(" ") + +        if self.marker: +            parts.append(f"; {self.marker}") + +        return "".join(parts) + +    def __repr__(self) -> str: +        return f"<Requirement('{self}')>" diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/specifiers.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..0e218a6 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/specifiers.py @@ -0,0 +1,802 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import abc +import functools +import itertools +import re +import warnings +from typing import ( +    Callable, +    Dict, +    Iterable, +    Iterator, +    List, +    Optional, +    Pattern, +    Set, +    Tuple, +    TypeVar, +    Union, +) + +from .utils import canonicalize_version +from .version import LegacyVersion, Version, parse + +ParsedVersion = Union[Version, LegacyVersion] +UnparsedVersion = Union[Version, LegacyVersion, str] +VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion) +CallableOperator = Callable[[ParsedVersion, str], bool] + + +class InvalidSpecifier(ValueError): +    """ +    An invalid specifier was found, users should refer to PEP 440. +    """ + + +class BaseSpecifier(metaclass=abc.ABCMeta): +    @abc.abstractmethod +    def __str__(self) -> str: +        """ +        Returns the str representation of this Specifier like object. This +        should be representative of the Specifier itself. +        """ + +    @abc.abstractmethod +    def __hash__(self) -> int: +        """ +        Returns a hash value for this Specifier like object. +        """ + +    @abc.abstractmethod +    def __eq__(self, other: object) -> bool: +        """ +        Returns a boolean representing whether or not the two Specifier like +        objects are equal. +        """ + +    @abc.abstractproperty +    def prereleases(self) -> Optional[bool]: +        """ +        Returns whether or not pre-releases as a whole are allowed by this +        specifier. +        """ + +    @prereleases.setter +    def prereleases(self, value: bool) -> None: +        """ +        Sets whether or not pre-releases as a whole are allowed by this +        specifier. +        """ + +    @abc.abstractmethod +    def contains(self, item: str, prereleases: Optional[bool] = None) -> bool: +        """ +        Determines if the given item is contained within this specifier. +        """ + +    @abc.abstractmethod +    def filter( +        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None +    ) -> Iterable[VersionTypeVar]: +        """ +        Takes an iterable of items and filters them so that only items which +        are contained within this specifier are allowed in it. +        """ + + +class _IndividualSpecifier(BaseSpecifier): + +    _operators: Dict[str, str] = {} +    _regex: Pattern[str] + +    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: +        match = self._regex.search(spec) +        if not match: +            raise InvalidSpecifier(f"Invalid specifier: '{spec}'") + +        self._spec: Tuple[str, str] = ( +            match.group("operator").strip(), +            match.group("version").strip(), +        ) + +        # Store whether or not this Specifier should accept prereleases +        self._prereleases = prereleases + +    def __repr__(self) -> str: +        pre = ( +            f", prereleases={self.prereleases!r}" +            if self._prereleases is not None +            else "" +        ) + +        return f"<{self.__class__.__name__}({str(self)!r}{pre})>" + +    def __str__(self) -> str: +        return "{}{}".format(*self._spec) + +    @property +    def _canonical_spec(self) -> Tuple[str, str]: +        return self._spec[0], canonicalize_version(self._spec[1]) + +    def __hash__(self) -> int: +        return hash(self._canonical_spec) + +    def __eq__(self, other: object) -> bool: +        if isinstance(other, str): +            try: +                other = self.__class__(str(other)) +            except InvalidSpecifier: +                return NotImplemented +        elif not isinstance(other, self.__class__): +            return NotImplemented + +        return self._canonical_spec == other._canonical_spec + +    def _get_operator(self, op: str) -> CallableOperator: +        operator_callable: CallableOperator = getattr( +            self, f"_compare_{self._operators[op]}" +        ) +        return operator_callable + +    def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion: +        if not isinstance(version, (LegacyVersion, Version)): +            version = parse(version) +        return version + +    @property +    def operator(self) -> str: +        return self._spec[0] + +    @property +    def version(self) -> str: +        return self._spec[1] + +    @property +    def prereleases(self) -> Optional[bool]: +        return self._prereleases + +    @prereleases.setter +    def prereleases(self, value: bool) -> None: +        self._prereleases = value + +    def __contains__(self, item: str) -> bool: +        return self.contains(item) + +    def contains( +        self, item: UnparsedVersion, prereleases: Optional[bool] = None +    ) -> bool: + +        # Determine if prereleases are to be allowed or not. +        if prereleases is None: +            prereleases = self.prereleases + +        # Normalize item to a Version or LegacyVersion, this allows us to have +        # a shortcut for ``"2.0" in Specifier(">=2") +        normalized_item = self._coerce_version(item) + +        # Determine if we should be supporting prereleases in this specifier +        # or not, if we do not support prereleases than we can short circuit +        # logic if this version is a prereleases. +        if normalized_item.is_prerelease and not prereleases: +            return False + +        # Actually do the comparison to determine if this item is contained +        # within this Specifier or not. +        operator_callable: CallableOperator = self._get_operator(self.operator) +        return operator_callable(normalized_item, self.version) + +    def filter( +        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None +    ) -> Iterable[VersionTypeVar]: + +        yielded = False +        found_prereleases = [] + +        kw = {"prereleases": prereleases if prereleases is not None else True} + +        # Attempt to iterate over all the values in the iterable and if any of +        # them match, yield them. +        for version in iterable: +            parsed_version = self._coerce_version(version) + +            if self.contains(parsed_version, **kw): +                # If our version is a prerelease, and we were not set to allow +                # prereleases, then we'll store it for later in case nothing +                # else matches this specifier. +                if parsed_version.is_prerelease and not ( +                    prereleases or self.prereleases +                ): +                    found_prereleases.append(version) +                # Either this is not a prerelease, or we should have been +                # accepting prereleases from the beginning. +                else: +                    yielded = True +                    yield version + +        # Now that we've iterated over everything, determine if we've yielded +        # any values, and if we have not and we have any prereleases stored up +        # then we will go ahead and yield the prereleases. +        if not yielded and found_prereleases: +            for version in found_prereleases: +                yield version + + +class LegacySpecifier(_IndividualSpecifier): + +    _regex_str = r""" +        (?P<operator>(==|!=|<=|>=|<|>)) +        \s* +        (?P<version> +            [^,;\s)]* # Since this is a "legacy" specifier, and the version +                      # string can be just about anything, we match everything +                      # except for whitespace, a semi-colon for marker support, +                      # a closing paren since versions can be enclosed in +                      # them, and a comma since it's a version separator. +        ) +        """ + +    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + +    _operators = { +        "==": "equal", +        "!=": "not_equal", +        "<=": "less_than_equal", +        ">=": "greater_than_equal", +        "<": "less_than", +        ">": "greater_than", +    } + +    def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None: +        super().__init__(spec, prereleases) + +        warnings.warn( +            "Creating a LegacyVersion has been deprecated and will be " +            "removed in the next major release", +            DeprecationWarning, +        ) + +    def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion: +        if not isinstance(version, LegacyVersion): +            version = LegacyVersion(str(version)) +        return version + +    def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool: +        return prospective == self._coerce_version(spec) + +    def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool: +        return prospective != self._coerce_version(spec) + +    def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool: +        return prospective <= self._coerce_version(spec) + +    def _compare_greater_than_equal( +        self, prospective: LegacyVersion, spec: str +    ) -> bool: +        return prospective >= self._coerce_version(spec) + +    def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool: +        return prospective < self._coerce_version(spec) + +    def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool: +        return prospective > self._coerce_version(spec) + + +def _require_version_compare( +    fn: Callable[["Specifier", ParsedVersion, str], bool] +) -> Callable[["Specifier", ParsedVersion, str], bool]: +    @functools.wraps(fn) +    def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool: +        if not isinstance(prospective, Version): +            return False +        return fn(self, prospective, spec) + +    return wrapped + + +class Specifier(_IndividualSpecifier): + +    _regex_str = r""" +        (?P<operator>(~=|==|!=|<=|>=|<|>|===)) +        (?P<version> +            (?: +                # The identity operators allow for an escape hatch that will +                # do an exact string match of the version you wish to install. +                # This will not be parsed by PEP 440 and we cannot determine +                # any semantic meaning from it. This operator is discouraged +                # but included entirely as an escape hatch. +                (?<====)  # Only match for the identity operator +                \s* +                [^\s]*    # We just match everything, except for whitespace +                          # since we are only testing for strict identity. +            ) +            | +            (?: +                # The (non)equality operators allow for wild card and local +                # versions to be specified so we have to define these two +                # operators separately to enable that. +                (?<===|!=)            # Only match for equals and not equals + +                \s* +                v? +                (?:[0-9]+!)?          # epoch +                [0-9]+(?:\.[0-9]+)*   # release +                (?:                   # pre release +                    [-_\.]? +                    (a|b|c|rc|alpha|beta|pre|preview) +                    [-_\.]? +                    [0-9]* +                )? +                (?:                   # post release +                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) +                )? + +                # You cannot use a wild card and a dev or local version +                # together so group them with a | and make them optional. +                (?: +                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release +                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local +                    | +                    \.\*  # Wild card syntax of .* +                )? +            ) +            | +            (?: +                # The compatible operator requires at least two digits in the +                # release segment. +                (?<=~=)               # Only match for the compatible operator + +                \s* +                v? +                (?:[0-9]+!)?          # epoch +                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *) +                (?:                   # pre release +                    [-_\.]? +                    (a|b|c|rc|alpha|beta|pre|preview) +                    [-_\.]? +                    [0-9]* +                )? +                (?:                                   # post release +                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) +                )? +                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release +            ) +            | +            (?: +                # All other operators only allow a sub set of what the +                # (non)equality operators do. Specifically they do not allow +                # local versions to be specified nor do they allow the prefix +                # matching wild cards. +                (?<!==|!=|~=)         # We have special cases for these +                                      # operators so we want to make sure they +                                      # don't match here. + +                \s* +                v? +                (?:[0-9]+!)?          # epoch +                [0-9]+(?:\.[0-9]+)*   # release +                (?:                   # pre release +                    [-_\.]? +                    (a|b|c|rc|alpha|beta|pre|preview) +                    [-_\.]? +                    [0-9]* +                )? +                (?:                                   # post release +                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) +                )? +                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release +            ) +        ) +        """ + +    _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + +    _operators = { +        "~=": "compatible", +        "==": "equal", +        "!=": "not_equal", +        "<=": "less_than_equal", +        ">=": "greater_than_equal", +        "<": "less_than", +        ">": "greater_than", +        "===": "arbitrary", +    } + +    @_require_version_compare +    def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool: + +        # Compatible releases have an equivalent combination of >= and ==. That +        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to +        # implement this in terms of the other specifiers instead of +        # implementing it ourselves. The only thing we need to do is construct +        # the other specifiers. + +        # We want everything but the last item in the version, but we want to +        # ignore suffix segments. +        prefix = ".".join( +            list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1] +        ) + +        # Add the prefix notation to the end of our string +        prefix += ".*" + +        return self._get_operator(">=")(prospective, spec) and self._get_operator("==")( +            prospective, prefix +        ) + +    @_require_version_compare +    def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool: + +        # We need special logic to handle prefix matching +        if spec.endswith(".*"): +            # In the case of prefix matching we want to ignore local segment. +            prospective = Version(prospective.public) +            # Split the spec out by dots, and pretend that there is an implicit +            # dot in between a release segment and a pre-release segment. +            split_spec = _version_split(spec[:-2])  # Remove the trailing .* + +            # Split the prospective version out by dots, and pretend that there +            # is an implicit dot in between a release segment and a pre-release +            # segment. +            split_prospective = _version_split(str(prospective)) + +            # Shorten the prospective version to be the same length as the spec +            # so that we can determine if the specifier is a prefix of the +            # prospective version or not. +            shortened_prospective = split_prospective[: len(split_spec)] + +            # Pad out our two sides with zeros so that they both equal the same +            # length. +            padded_spec, padded_prospective = _pad_version( +                split_spec, shortened_prospective +            ) + +            return padded_prospective == padded_spec +        else: +            # Convert our spec string into a Version +            spec_version = Version(spec) + +            # If the specifier does not have a local segment, then we want to +            # act as if the prospective version also does not have a local +            # segment. +            if not spec_version.local: +                prospective = Version(prospective.public) + +            return prospective == spec_version + +    @_require_version_compare +    def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool: +        return not self._compare_equal(prospective, spec) + +    @_require_version_compare +    def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool: + +        # NB: Local version identifiers are NOT permitted in the version +        # specifier, so local version labels can be universally removed from +        # the prospective version. +        return Version(prospective.public) <= Version(spec) + +    @_require_version_compare +    def _compare_greater_than_equal( +        self, prospective: ParsedVersion, spec: str +    ) -> bool: + +        # NB: Local version identifiers are NOT permitted in the version +        # specifier, so local version labels can be universally removed from +        # the prospective version. +        return Version(prospective.public) >= Version(spec) + +    @_require_version_compare +    def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + +        # Convert our spec to a Version instance, since we'll want to work with +        # it as a version. +        spec = Version(spec_str) + +        # Check to see if the prospective version is less than the spec +        # version. If it's not we can short circuit and just return False now +        # instead of doing extra unneeded work. +        if not prospective < spec: +            return False + +        # This special case is here so that, unless the specifier itself +        # includes is a pre-release version, that we do not accept pre-release +        # versions for the version mentioned in the specifier (e.g. <3.1 should +        # not match 3.1.dev0, but should match 3.0.dev0). +        if not spec.is_prerelease and prospective.is_prerelease: +            if Version(prospective.base_version) == Version(spec.base_version): +                return False + +        # If we've gotten to here, it means that prospective version is both +        # less than the spec version *and* it's not a pre-release of the same +        # version in the spec. +        return True + +    @_require_version_compare +    def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool: + +        # Convert our spec to a Version instance, since we'll want to work with +        # it as a version. +        spec = Version(spec_str) + +        # Check to see if the prospective version is greater than the spec +        # version. If it's not we can short circuit and just return False now +        # instead of doing extra unneeded work. +        if not prospective > spec: +            return False + +        # This special case is here so that, unless the specifier itself +        # includes is a post-release version, that we do not accept +        # post-release versions for the version mentioned in the specifier +        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). +        if not spec.is_postrelease and prospective.is_postrelease: +            if Version(prospective.base_version) == Version(spec.base_version): +                return False + +        # Ensure that we do not allow a local version of the version mentioned +        # in the specifier, which is technically greater than, to match. +        if prospective.local is not None: +            if Version(prospective.base_version) == Version(spec.base_version): +                return False + +        # If we've gotten to here, it means that prospective version is both +        # greater than the spec version *and* it's not a pre-release of the +        # same version in the spec. +        return True + +    def _compare_arbitrary(self, prospective: Version, spec: str) -> bool: +        return str(prospective).lower() == str(spec).lower() + +    @property +    def prereleases(self) -> bool: + +        # If there is an explicit prereleases set for this, then we'll just +        # blindly use that. +        if self._prereleases is not None: +            return self._prereleases + +        # Look at all of our specifiers and determine if they are inclusive +        # operators, and if they are if they are including an explicit +        # prerelease. +        operator, version = self._spec +        if operator in ["==", ">=", "<=", "~=", "==="]: +            # The == specifier can include a trailing .*, if it does we +            # want to remove before parsing. +            if operator == "==" and version.endswith(".*"): +                version = version[:-2] + +            # Parse the version, and if it is a pre-release than this +            # specifier allows pre-releases. +            if parse(version).is_prerelease: +                return True + +        return False + +    @prereleases.setter +    def prereleases(self, value: bool) -> None: +        self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version: str) -> List[str]: +    result: List[str] = [] +    for item in version.split("."): +        match = _prefix_regex.search(item) +        if match: +            result.extend(match.groups()) +        else: +            result.append(item) +    return result + + +def _is_not_suffix(segment: str) -> bool: +    return not any( +        segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post") +    ) + + +def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]: +    left_split, right_split = [], [] + +    # Get the release segment of our versions +    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) +    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + +    # Get the rest of our versions +    left_split.append(left[len(left_split[0]) :]) +    right_split.append(right[len(right_split[0]) :]) + +    # Insert our padding +    left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0]))) +    right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0]))) + +    return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split))) + + +class SpecifierSet(BaseSpecifier): +    def __init__( +        self, specifiers: str = "", prereleases: Optional[bool] = None +    ) -> None: + +        # Split on , to break each individual specifier into it's own item, and +        # strip each item to remove leading/trailing whitespace. +        split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + +        # Parsed each individual specifier, attempting first to make it a +        # Specifier and falling back to a LegacySpecifier. +        parsed: Set[_IndividualSpecifier] = set() +        for specifier in split_specifiers: +            try: +                parsed.add(Specifier(specifier)) +            except InvalidSpecifier: +                parsed.add(LegacySpecifier(specifier)) + +        # Turn our parsed specifiers into a frozen set and save them for later. +        self._specs = frozenset(parsed) + +        # Store our prereleases value so we can use it later to determine if +        # we accept prereleases or not. +        self._prereleases = prereleases + +    def __repr__(self) -> str: +        pre = ( +            f", prereleases={self.prereleases!r}" +            if self._prereleases is not None +            else "" +        ) + +        return f"<SpecifierSet({str(self)!r}{pre})>" + +    def __str__(self) -> str: +        return ",".join(sorted(str(s) for s in self._specs)) + +    def __hash__(self) -> int: +        return hash(self._specs) + +    def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet": +        if isinstance(other, str): +            other = SpecifierSet(other) +        elif not isinstance(other, SpecifierSet): +            return NotImplemented + +        specifier = SpecifierSet() +        specifier._specs = frozenset(self._specs | other._specs) + +        if self._prereleases is None and other._prereleases is not None: +            specifier._prereleases = other._prereleases +        elif self._prereleases is not None and other._prereleases is None: +            specifier._prereleases = self._prereleases +        elif self._prereleases == other._prereleases: +            specifier._prereleases = self._prereleases +        else: +            raise ValueError( +                "Cannot combine SpecifierSets with True and False prerelease " +                "overrides." +            ) + +        return specifier + +    def __eq__(self, other: object) -> bool: +        if isinstance(other, (str, _IndividualSpecifier)): +            other = SpecifierSet(str(other)) +        elif not isinstance(other, SpecifierSet): +            return NotImplemented + +        return self._specs == other._specs + +    def __len__(self) -> int: +        return len(self._specs) + +    def __iter__(self) -> Iterator[_IndividualSpecifier]: +        return iter(self._specs) + +    @property +    def prereleases(self) -> Optional[bool]: + +        # If we have been given an explicit prerelease modifier, then we'll +        # pass that through here. +        if self._prereleases is not None: +            return self._prereleases + +        # If we don't have any specifiers, and we don't have a forced value, +        # then we'll just return None since we don't know if this should have +        # pre-releases or not. +        if not self._specs: +            return None + +        # Otherwise we'll see if any of the given specifiers accept +        # prereleases, if any of them do we'll return True, otherwise False. +        return any(s.prereleases for s in self._specs) + +    @prereleases.setter +    def prereleases(self, value: bool) -> None: +        self._prereleases = value + +    def __contains__(self, item: UnparsedVersion) -> bool: +        return self.contains(item) + +    def contains( +        self, item: UnparsedVersion, prereleases: Optional[bool] = None +    ) -> bool: + +        # Ensure that our item is a Version or LegacyVersion instance. +        if not isinstance(item, (LegacyVersion, Version)): +            item = parse(item) + +        # Determine if we're forcing a prerelease or not, if we're not forcing +        # one for this particular filter call, then we'll use whatever the +        # SpecifierSet thinks for whether or not we should support prereleases. +        if prereleases is None: +            prereleases = self.prereleases + +        # We can determine if we're going to allow pre-releases by looking to +        # see if any of the underlying items supports them. If none of them do +        # and this item is a pre-release then we do not allow it and we can +        # short circuit that here. +        # Note: This means that 1.0.dev1 would not be contained in something +        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 +        if not prereleases and item.is_prerelease: +            return False + +        # We simply dispatch to the underlying specs here to make sure that the +        # given version is contained within all of them. +        # Note: This use of all() here means that an empty set of specifiers +        #       will always return True, this is an explicit design decision. +        return all(s.contains(item, prereleases=prereleases) for s in self._specs) + +    def filter( +        self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None +    ) -> Iterable[VersionTypeVar]: + +        # Determine if we're forcing a prerelease or not, if we're not forcing +        # one for this particular filter call, then we'll use whatever the +        # SpecifierSet thinks for whether or not we should support prereleases. +        if prereleases is None: +            prereleases = self.prereleases + +        # If we have any specifiers, then we want to wrap our iterable in the +        # filter method for each one, this will act as a logical AND amongst +        # each specifier. +        if self._specs: +            for spec in self._specs: +                iterable = spec.filter(iterable, prereleases=bool(prereleases)) +            return iterable +        # If we do not have any specifiers, then we need to have a rough filter +        # which will filter out any pre-releases, unless there are no final +        # releases, and which will filter out LegacyVersion in general. +        else: +            filtered: List[VersionTypeVar] = [] +            found_prereleases: List[VersionTypeVar] = [] + +            item: UnparsedVersion +            parsed_version: Union[Version, LegacyVersion] + +            for item in iterable: +                # Ensure that we some kind of Version class for this item. +                if not isinstance(item, (LegacyVersion, Version)): +                    parsed_version = parse(item) +                else: +                    parsed_version = item + +                # Filter out any item which is parsed as a LegacyVersion +                if isinstance(parsed_version, LegacyVersion): +                    continue + +                # Store any item which is a pre-release for later unless we've +                # already found a final version or we are accepting prereleases +                if parsed_version.is_prerelease and not prereleases: +                    if not filtered: +                        found_prereleases.append(item) +                else: +                    filtered.append(item) + +            # If we've found no items except for pre-releases, then we'll go +            # ahead and use the pre-releases +            if not filtered and found_prereleases and prereleases is None: +                return found_prereleases + +            return filtered diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/tags.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/tags.py new file mode 100644 index 0000000..9a3d25a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/tags.py @@ -0,0 +1,487 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import logging +import platform +import sys +import sysconfig +from importlib.machinery import EXTENSION_SUFFIXES +from typing import ( +    Dict, +    FrozenSet, +    Iterable, +    Iterator, +    List, +    Optional, +    Sequence, +    Tuple, +    Union, +    cast, +) + +from . import _manylinux, _musllinux + +logger = logging.getLogger(__name__) + +PythonVersion = Sequence[int] +MacVersion = Tuple[int, int] + +INTERPRETER_SHORT_NAMES: Dict[str, str] = { +    "python": "py",  # Generic. +    "cpython": "cp", +    "pypy": "pp", +    "ironpython": "ip", +    "jython": "jy", +} + + +_32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32 + + +class Tag: +    """ +    A representation of the tag triple for a wheel. + +    Instances are considered immutable and thus are hashable. Equality checking +    is also supported. +    """ + +    __slots__ = ["_interpreter", "_abi", "_platform", "_hash"] + +    def __init__(self, interpreter: str, abi: str, platform: str) -> None: +        self._interpreter = interpreter.lower() +        self._abi = abi.lower() +        self._platform = platform.lower() +        # The __hash__ of every single element in a Set[Tag] will be evaluated each time +        # that a set calls its `.disjoint()` method, which may be called hundreds of +        # times when scanning a page of links for packages with tags matching that +        # Set[Tag]. Pre-computing the value here produces significant speedups for +        # downstream consumers. +        self._hash = hash((self._interpreter, self._abi, self._platform)) + +    @property +    def interpreter(self) -> str: +        return self._interpreter + +    @property +    def abi(self) -> str: +        return self._abi + +    @property +    def platform(self) -> str: +        return self._platform + +    def __eq__(self, other: object) -> bool: +        if not isinstance(other, Tag): +            return NotImplemented + +        return ( +            (self._hash == other._hash)  # Short-circuit ASAP for perf reasons. +            and (self._platform == other._platform) +            and (self._abi == other._abi) +            and (self._interpreter == other._interpreter) +        ) + +    def __hash__(self) -> int: +        return self._hash + +    def __str__(self) -> str: +        return f"{self._interpreter}-{self._abi}-{self._platform}" + +    def __repr__(self) -> str: +        return f"<{self} @ {id(self)}>" + + +def parse_tag(tag: str) -> FrozenSet[Tag]: +    """ +    Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. + +    Returning a set is required due to the possibility that the tag is a +    compressed tag set. +    """ +    tags = set() +    interpreters, abis, platforms = tag.split("-") +    for interpreter in interpreters.split("."): +        for abi in abis.split("."): +            for platform_ in platforms.split("."): +                tags.add(Tag(interpreter, abi, platform_)) +    return frozenset(tags) + + +def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]: +    value = sysconfig.get_config_var(name) +    if value is None and warn: +        logger.debug( +            "Config variable '%s' is unset, Python ABI tag may be incorrect", name +        ) +    return value + + +def _normalize_string(string: str) -> str: +    return string.replace(".", "_").replace("-", "_") + + +def _abi3_applies(python_version: PythonVersion) -> bool: +    """ +    Determine if the Python version supports abi3. + +    PEP 384 was first implemented in Python 3.2. +    """ +    return len(python_version) > 1 and tuple(python_version) >= (3, 2) + + +def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]: +    py_version = tuple(py_version)  # To allow for version comparison. +    abis = [] +    version = _version_nodot(py_version[:2]) +    debug = pymalloc = ucs4 = "" +    with_debug = _get_config_var("Py_DEBUG", warn) +    has_refcount = hasattr(sys, "gettotalrefcount") +    # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled +    # extension modules is the best option. +    # https://github.com/pypa/pip/issues/3383#issuecomment-173267692 +    has_ext = "_d.pyd" in EXTENSION_SUFFIXES +    if with_debug or (with_debug is None and (has_refcount or has_ext)): +        debug = "d" +    if py_version < (3, 8): +        with_pymalloc = _get_config_var("WITH_PYMALLOC", warn) +        if with_pymalloc or with_pymalloc is None: +            pymalloc = "m" +        if py_version < (3, 3): +            unicode_size = _get_config_var("Py_UNICODE_SIZE", warn) +            if unicode_size == 4 or ( +                unicode_size is None and sys.maxunicode == 0x10FFFF +            ): +                ucs4 = "u" +    elif debug: +        # Debug builds can also load "normal" extension modules. +        # We can also assume no UCS-4 or pymalloc requirement. +        abis.append(f"cp{version}") +    abis.insert( +        0, +        "cp{version}{debug}{pymalloc}{ucs4}".format( +            version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4 +        ), +    ) +    return abis + + +def cpython_tags( +    python_version: Optional[PythonVersion] = None, +    abis: Optional[Iterable[str]] = None, +    platforms: Optional[Iterable[str]] = None, +    *, +    warn: bool = False, +) -> Iterator[Tag]: +    """ +    Yields the tags for a CPython interpreter. + +    The tags consist of: +    - cp<python_version>-<abi>-<platform> +    - cp<python_version>-abi3-<platform> +    - cp<python_version>-none-<platform> +    - cp<less than python_version>-abi3-<platform>  # Older Python versions down to 3.2. + +    If python_version only specifies a major version then user-provided ABIs and +    the 'none' ABItag will be used. + +    If 'abi3' or 'none' are specified in 'abis' then they will be yielded at +    their normal position and not at the beginning. +    """ +    if not python_version: +        python_version = sys.version_info[:2] + +    interpreter = f"cp{_version_nodot(python_version[:2])}" + +    if abis is None: +        if len(python_version) > 1: +            abis = _cpython_abis(python_version, warn) +        else: +            abis = [] +    abis = list(abis) +    # 'abi3' and 'none' are explicitly handled later. +    for explicit_abi in ("abi3", "none"): +        try: +            abis.remove(explicit_abi) +        except ValueError: +            pass + +    platforms = list(platforms or platform_tags()) +    for abi in abis: +        for platform_ in platforms: +            yield Tag(interpreter, abi, platform_) +    if _abi3_applies(python_version): +        yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) +    yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) + +    if _abi3_applies(python_version): +        for minor_version in range(python_version[1] - 1, 1, -1): +            for platform_ in platforms: +                interpreter = "cp{version}".format( +                    version=_version_nodot((python_version[0], minor_version)) +                ) +                yield Tag(interpreter, "abi3", platform_) + + +def _generic_abi() -> Iterator[str]: +    abi = sysconfig.get_config_var("SOABI") +    if abi: +        yield _normalize_string(abi) + + +def generic_tags( +    interpreter: Optional[str] = None, +    abis: Optional[Iterable[str]] = None, +    platforms: Optional[Iterable[str]] = None, +    *, +    warn: bool = False, +) -> Iterator[Tag]: +    """ +    Yields the tags for a generic interpreter. + +    The tags consist of: +    - <interpreter>-<abi>-<platform> + +    The "none" ABI will be added if it was not explicitly provided. +    """ +    if not interpreter: +        interp_name = interpreter_name() +        interp_version = interpreter_version(warn=warn) +        interpreter = "".join([interp_name, interp_version]) +    if abis is None: +        abis = _generic_abi() +    platforms = list(platforms or platform_tags()) +    abis = list(abis) +    if "none" not in abis: +        abis.append("none") +    for abi in abis: +        for platform_ in platforms: +            yield Tag(interpreter, abi, platform_) + + +def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: +    """ +    Yields Python versions in descending order. + +    After the latest version, the major-only version will be yielded, and then +    all previous versions of that major version. +    """ +    if len(py_version) > 1: +        yield f"py{_version_nodot(py_version[:2])}" +    yield f"py{py_version[0]}" +    if len(py_version) > 1: +        for minor in range(py_version[1] - 1, -1, -1): +            yield f"py{_version_nodot((py_version[0], minor))}" + + +def compatible_tags( +    python_version: Optional[PythonVersion] = None, +    interpreter: Optional[str] = None, +    platforms: Optional[Iterable[str]] = None, +) -> Iterator[Tag]: +    """ +    Yields the sequence of tags that are compatible with a specific version of Python. + +    The tags consist of: +    - py*-none-<platform> +    - <interpreter>-none-any  # ... if `interpreter` is provided. +    - py*-none-any +    """ +    if not python_version: +        python_version = sys.version_info[:2] +    platforms = list(platforms or platform_tags()) +    for version in _py_interpreter_range(python_version): +        for platform_ in platforms: +            yield Tag(version, "none", platform_) +    if interpreter: +        yield Tag(interpreter, "none", "any") +    for version in _py_interpreter_range(python_version): +        yield Tag(version, "none", "any") + + +def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str: +    if not is_32bit: +        return arch + +    if arch.startswith("ppc"): +        return "ppc" + +    return "i386" + + +def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]: +    formats = [cpu_arch] +    if cpu_arch == "x86_64": +        if version < (10, 4): +            return [] +        formats.extend(["intel", "fat64", "fat32"]) + +    elif cpu_arch == "i386": +        if version < (10, 4): +            return [] +        formats.extend(["intel", "fat32", "fat"]) + +    elif cpu_arch == "ppc64": +        # TODO: Need to care about 32-bit PPC for ppc64 through 10.2? +        if version > (10, 5) or version < (10, 4): +            return [] +        formats.append("fat64") + +    elif cpu_arch == "ppc": +        if version > (10, 6): +            return [] +        formats.extend(["fat32", "fat"]) + +    if cpu_arch in {"arm64", "x86_64"}: +        formats.append("universal2") + +    if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}: +        formats.append("universal") + +    return formats + + +def mac_platforms( +    version: Optional[MacVersion] = None, arch: Optional[str] = None +) -> Iterator[str]: +    """ +    Yields the platform tags for a macOS system. + +    The `version` parameter is a two-item tuple specifying the macOS version to +    generate platform tags for. The `arch` parameter is the CPU architecture to +    generate platform tags for. Both parameters default to the appropriate value +    for the current system. +    """ +    version_str, _, cpu_arch = platform.mac_ver() +    if version is None: +        version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) +    else: +        version = version +    if arch is None: +        arch = _mac_arch(cpu_arch) +    else: +        arch = arch + +    if (10, 0) <= version and version < (11, 0): +        # Prior to Mac OS 11, each yearly release of Mac OS bumped the +        # "minor" version number.  The major version was always 10. +        for minor_version in range(version[1], -1, -1): +            compat_version = 10, minor_version +            binary_formats = _mac_binary_formats(compat_version, arch) +            for binary_format in binary_formats: +                yield "macosx_{major}_{minor}_{binary_format}".format( +                    major=10, minor=minor_version, binary_format=binary_format +                ) + +    if version >= (11, 0): +        # Starting with Mac OS 11, each yearly release bumps the major version +        # number.   The minor versions are now the midyear updates. +        for major_version in range(version[0], 10, -1): +            compat_version = major_version, 0 +            binary_formats = _mac_binary_formats(compat_version, arch) +            for binary_format in binary_formats: +                yield "macosx_{major}_{minor}_{binary_format}".format( +                    major=major_version, minor=0, binary_format=binary_format +                ) + +    if version >= (11, 0): +        # Mac OS 11 on x86_64 is compatible with binaries from previous releases. +        # Arm64 support was introduced in 11.0, so no Arm binaries from previous +        # releases exist. +        # +        # However, the "universal2" binary format can have a +        # macOS version earlier than 11.0 when the x86_64 part of the binary supports +        # that version of macOS. +        if arch == "x86_64": +            for minor_version in range(16, 3, -1): +                compat_version = 10, minor_version +                binary_formats = _mac_binary_formats(compat_version, arch) +                for binary_format in binary_formats: +                    yield "macosx_{major}_{minor}_{binary_format}".format( +                        major=compat_version[0], +                        minor=compat_version[1], +                        binary_format=binary_format, +                    ) +        else: +            for minor_version in range(16, 3, -1): +                compat_version = 10, minor_version +                binary_format = "universal2" +                yield "macosx_{major}_{minor}_{binary_format}".format( +                    major=compat_version[0], +                    minor=compat_version[1], +                    binary_format=binary_format, +                ) + + +def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]: +    linux = _normalize_string(sysconfig.get_platform()) +    if is_32bit: +        if linux == "linux_x86_64": +            linux = "linux_i686" +        elif linux == "linux_aarch64": +            linux = "linux_armv7l" +    _, arch = linux.split("_", 1) +    yield from _manylinux.platform_tags(linux, arch) +    yield from _musllinux.platform_tags(arch) +    yield linux + + +def _generic_platforms() -> Iterator[str]: +    yield _normalize_string(sysconfig.get_platform()) + + +def platform_tags() -> Iterator[str]: +    """ +    Provides the platform tags for this installation. +    """ +    if platform.system() == "Darwin": +        return mac_platforms() +    elif platform.system() == "Linux": +        return _linux_platforms() +    else: +        return _generic_platforms() + + +def interpreter_name() -> str: +    """ +    Returns the name of the running interpreter. +    """ +    name = sys.implementation.name +    return INTERPRETER_SHORT_NAMES.get(name) or name + + +def interpreter_version(*, warn: bool = False) -> str: +    """ +    Returns the version of the running interpreter. +    """ +    version = _get_config_var("py_version_nodot", warn=warn) +    if version: +        version = str(version) +    else: +        version = _version_nodot(sys.version_info[:2]) +    return version + + +def _version_nodot(version: PythonVersion) -> str: +    return "".join(map(str, version)) + + +def sys_tags(*, warn: bool = False) -> Iterator[Tag]: +    """ +    Returns the sequence of tag triples for the running interpreter. + +    The order of the sequence corresponds to priority order for the +    interpreter, from most to least important. +    """ + +    interp_name = interpreter_name() +    if interp_name == "cp": +        yield from cpython_tags(warn=warn) +    else: +        yield from generic_tags() + +    if interp_name == "pp": +        yield from compatible_tags(interpreter="pp3") +    else: +        yield from compatible_tags() diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/utils.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/utils.py new file mode 100644 index 0000000..bab11b8 --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/utils.py @@ -0,0 +1,136 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import re +from typing import FrozenSet, NewType, Tuple, Union, cast + +from .tags import Tag, parse_tag +from .version import InvalidVersion, Version + +BuildTag = Union[Tuple[()], Tuple[int, str]] +NormalizedName = NewType("NormalizedName", str) + + +class InvalidWheelFilename(ValueError): +    """ +    An invalid wheel filename was found, users should refer to PEP 427. +    """ + + +class InvalidSdistFilename(ValueError): +    """ +    An invalid sdist filename was found, users should refer to the packaging user guide. +    """ + + +_canonicalize_regex = re.compile(r"[-_.]+") +# PEP 427: The build number must start with a digit. +_build_tag_regex = re.compile(r"(\d+)(.*)") + + +def canonicalize_name(name: str) -> NormalizedName: +    # This is taken from PEP 503. +    value = _canonicalize_regex.sub("-", name).lower() +    return cast(NormalizedName, value) + + +def canonicalize_version(version: Union[Version, str]) -> str: +    """ +    This is very similar to Version.__str__, but has one subtle difference +    with the way it handles the release segment. +    """ +    if isinstance(version, str): +        try: +            parsed = Version(version) +        except InvalidVersion: +            # Legacy versions cannot be normalized +            return version +    else: +        parsed = version + +    parts = [] + +    # Epoch +    if parsed.epoch != 0: +        parts.append(f"{parsed.epoch}!") + +    # Release segment +    # NB: This strips trailing '.0's to normalize +    parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release))) + +    # Pre-release +    if parsed.pre is not None: +        parts.append("".join(str(x) for x in parsed.pre)) + +    # Post-release +    if parsed.post is not None: +        parts.append(f".post{parsed.post}") + +    # Development release +    if parsed.dev is not None: +        parts.append(f".dev{parsed.dev}") + +    # Local version segment +    if parsed.local is not None: +        parts.append(f"+{parsed.local}") + +    return "".join(parts) + + +def parse_wheel_filename( +    filename: str, +) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]: +    if not filename.endswith(".whl"): +        raise InvalidWheelFilename( +            f"Invalid wheel filename (extension must be '.whl'): {filename}" +        ) + +    filename = filename[:-4] +    dashes = filename.count("-") +    if dashes not in (4, 5): +        raise InvalidWheelFilename( +            f"Invalid wheel filename (wrong number of parts): {filename}" +        ) + +    parts = filename.split("-", dashes - 2) +    name_part = parts[0] +    # See PEP 427 for the rules on escaping the project name +    if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None: +        raise InvalidWheelFilename(f"Invalid project name: {filename}") +    name = canonicalize_name(name_part) +    version = Version(parts[1]) +    if dashes == 5: +        build_part = parts[2] +        build_match = _build_tag_regex.match(build_part) +        if build_match is None: +            raise InvalidWheelFilename( +                f"Invalid build number: {build_part} in '{filename}'" +            ) +        build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2))) +    else: +        build = () +    tags = parse_tag(parts[-1]) +    return (name, version, build, tags) + + +def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]: +    if filename.endswith(".tar.gz"): +        file_stem = filename[: -len(".tar.gz")] +    elif filename.endswith(".zip"): +        file_stem = filename[: -len(".zip")] +    else: +        raise InvalidSdistFilename( +            f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):" +            f" {filename}" +        ) + +    # We are requiring a PEP 440 version, which cannot contain dashes, +    # so we split on the last dash. +    name_part, sep, version_part = file_stem.rpartition("-") +    if not sep: +        raise InvalidSdistFilename(f"Invalid sdist filename: {filename}") + +    name = canonicalize_name(name_part) +    version = Version(version_part) +    return (name, version) diff --git a/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/version.py b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/version.py new file mode 100644 index 0000000..de9a09a --- /dev/null +++ b/venv/lib/python3.11/site-packages/setuptools/_vendor/packaging/version.py @@ -0,0 +1,504 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +import collections +import itertools +import re +import warnings +from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union + +from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType + +__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] + +InfiniteTypes = Union[InfinityType, NegativeInfinityType] +PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] +SubLocalType = Union[InfiniteTypes, int, str] +LocalType = Union[ +    NegativeInfinityType, +    Tuple[ +        Union[ +            SubLocalType, +            Tuple[SubLocalType, str], +            Tuple[NegativeInfinityType, SubLocalType], +        ], +        ..., +    ], +] +CmpKey = Tuple[ +    int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType +] +LegacyCmpKey = Tuple[int, Tuple[str, ...]] +VersionComparisonMethod = Callable[ +    [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool +] + +_Version = collections.namedtuple( +    "_Version", ["epoch", "release", "dev", "pre", "post", "local"] +) + + +def parse(version: str) -> Union["LegacyVersion", "Version"]: +    """ +    Parse the given version string and return either a :class:`Version` object +    or a :class:`LegacyVersion` object depending on if the given version is +    a valid PEP 440 version or a legacy version. +    """ +    try: +        return Version(version) +    except InvalidVersion: +        return LegacyVersion(version) + + +class InvalidVersion(ValueError): +    """ +    An invalid version was found, users should refer to PEP 440. +    """ + + +class _BaseVersion: +    _key: Union[CmpKey, LegacyCmpKey] + +    def __hash__(self) -> int: +        return hash(self._key) + +    # Please keep the duplicated `isinstance` check +    # in the six comparisons hereunder +    # unless you find a way to avoid adding overhead function calls. +    def __lt__(self, other: "_BaseVersion") -> bool: +        if not isinstance(other, _BaseVersion): +            return NotImplemented + +        return self._key < other._key + +    def __le__(self, other: "_BaseVersion") -> bool: +        if not isinstance(other, _BaseVersion): +            return NotImplemented + +        return self._key <= other._key + +    def __eq__(self, other: object) -> bool: +        if not isinstance(other, _BaseVersion): +            return NotImplemented + +        return self._key == other._key + +    def __ge__(self, other: "_BaseVersion") -> bool: +        if not isinstance(other, _BaseVersion): +            return NotImplemented + +        return self._key >= other._key + +    def __gt__(self, other: "_BaseVersion") -> bool: +        if not isinstance(other, _BaseVersion): +            return NotImplemented + +        return self._key > other._key + +    def __ne__(self, other: object) -> bool: +        if not isinstance(other, _BaseVersion): +            return NotImplemented + +        return self._key != other._key + + +class LegacyVersion(_BaseVersion): +    def __init__(self, version: str) -> None: +        self._version = str(version) +        self._key = _legacy_cmpkey(self._version) + +        warnings.warn( +            "Creating a LegacyVersion has been deprecated and will be " +            "removed in the next major release", +            DeprecationWarning, +        ) + +    def __str__(self) -> str: +        return self._version + +    def __repr__(self) -> str: +        return f"<LegacyVersion('{self}')>" + +    @property +    def public(self) -> str: +        return self._version + +    @property +    def base_version(self) -> str: +        return self._version + +    @property +    def epoch(self) -> int: +        return -1 + +    @property +    def release(self) -> None: +        return None + +    @property +    def pre(self) -> None: +        return None + +    @property +    def post(self) -> None: +        return None + +    @property +    def dev(self) -> None: +        return None + +    @property +    def local(self) -> None: +        return None + +    @property +    def is_prerelease(self) -> bool: +        return False + +    @property +    def is_postrelease(self) -> bool: +        return False + +    @property +    def is_devrelease(self) -> bool: +        return False + + +_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) + +_legacy_version_replacement_map = { +    "pre": "c", +    "preview": "c", +    "-": "final-", +    "rc": "c", +    "dev": "@", +} + + +def _parse_version_parts(s: str) -> Iterator[str]: +    for part in _legacy_version_component_re.split(s): +        part = _legacy_version_replacement_map.get(part, part) + +        if not part or part == ".": +            continue + +        if part[:1] in "0123456789": +            # pad for numeric comparison +            yield part.zfill(8) +        else: +            yield "*" + part + +    # ensure that alpha/beta/candidate are before final +    yield "*final" + + +def _legacy_cmpkey(version: str) -> LegacyCmpKey: + +    # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch +    # greater than or equal to 0. This will effectively put the LegacyVersion, +    # which uses the defacto standard originally implemented by setuptools, +    # as before all PEP 440 versions. +    epoch = -1 + +    # This scheme is taken from pkg_resources.parse_version setuptools prior to +    # it's adoption of the packaging library. +    parts: List[str] = [] +    for part in _parse_version_parts(version.lower()): +        if part.startswith("*"): +            # remove "-" before a prerelease tag +            if part < "*final": +                while parts and parts[-1] == "*final-": +                    parts.pop() + +            # remove trailing zeros from each series of numeric parts +            while parts and parts[-1] == "00000000": +                parts.pop() + +        parts.append(part) + +    return epoch, tuple(parts) + + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" +    v? +    (?: +        (?:(?P<epoch>[0-9]+)!)?                           # epoch +        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment +        (?P<pre>                                          # pre-release +            [-_\.]? +            (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) +            [-_\.]? +            (?P<pre_n>[0-9]+)? +        )? +        (?P<post>                                         # post release +            (?:-(?P<post_n1>[0-9]+)) +            | +            (?: +                [-_\.]? +                (?P<post_l>post|rev|r) +                [-_\.]? +                (?P<post_n2>[0-9]+)? +            ) +        )? +        (?P<dev>                                          # dev release +            [-_\.]? +            (?P<dev_l>dev) +            [-_\.]? +            (?P<dev_n>[0-9]+)? +        )? +    ) +    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version +""" + + +class Version(_BaseVersion): + +    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE) + +    def __init__(self, version: str) -> None: + +        # Validate the version and parse it into pieces +        match = self._regex.search(version) +        if not match: +            raise InvalidVersion(f"Invalid version: '{version}'") + +        # Store the parsed out pieces of the version +        self._version = _Version( +            epoch=int(match.group("epoch")) if match.group("epoch") else 0, +            release=tuple(int(i) for i in match.group("release").split(".")), +            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")), +            post=_parse_letter_version( +                match.group("post_l"), match.group("post_n1") or match.group("post_n2") +            ), +            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")), +            local=_parse_local_version(match.group("local")), +        ) + +        # Generate a key which will be used for sorting +        self._key = _cmpkey( +            self._version.epoch, +            self._version.release, +            self._version.pre, +            self._version.post, +            self._version.dev, +            self._version.local, +        ) + +    def __repr__(self) -> str: +        return f"<Version('{self}')>" + +    def __str__(self) -> str: +        parts = [] + +        # Epoch +        if self.epoch != 0: +            parts.append(f"{self.epoch}!") + +        # Release segment +        parts.append(".".join(str(x) for x in self.release)) + +        # Pre-release +        if self.pre is not None: +            parts.append("".join(str(x) for x in self.pre)) + +        # Post-release +        if self.post is not None: +            parts.append(f".post{self.post}") + +        # Development release +        if self.dev is not None: +            parts.append(f".dev{self.dev}") + +        # Local version segment +        if self.local is not None: +            parts.append(f"+{self.local}") + +        return "".join(parts) + +    @property +    def epoch(self) -> int: +        _epoch: int = self._version.epoch +        return _epoch + +    @property +    def release(self) -> Tuple[int, ...]: +        _release: Tuple[int, ...] = self._version.release +        return _release + +    @property +    def pre(self) -> Optional[Tuple[str, int]]: +        _pre: Optional[Tuple[str, int]] = self._version.pre +        return _pre + +    @property +    def post(self) -> Optional[int]: +        return self._version.post[1] if self._version.post else None + +    @property +    def dev(self) -> Optional[int]: +        return self._version.dev[1] if self._version.dev else None + +    @property +    def local(self) -> Optional[str]: +        if self._version.local: +            return ".".join(str(x) for x in self._version.local) +        else: +            return None + +    @property +    def public(self) -> str: +        return str(self).split("+", 1)[0] + +    @property +    def base_version(self) -> str: +        parts = [] + +        # Epoch +        if self.epoch != 0: +            parts.append(f"{self.epoch}!") + +        # Release segment +        parts.append(".".join(str(x) for x in self.release)) + +        return "".join(parts) + +    @property +    def is_prerelease(self) -> bool: +        return self.dev is not None or self.pre is not None + +    @property +    def is_postrelease(self) -> bool: +        return self.post is not None + +    @property +    def is_devrelease(self) -> bool: +        return self.dev is not None + +    @property +    def major(self) -> int: +        return self.release[0] if len(self.release) >= 1 else 0 + +    @property +    def minor(self) -> int: +        return self.release[1] if len(self.release) >= 2 else 0 + +    @property +    def micro(self) -> int: +        return self.release[2] if len(self.release) >= 3 else 0 + + +def _parse_letter_version( +    letter: str, number: Union[str, bytes, SupportsInt] +) -> Optional[Tuple[str, int]]: + +    if letter: +        # We consider there to be an implicit 0 in a pre-release if there is +        # not a numeral associated with it. +        if number is None: +            number = 0 + +        # We normalize any letters to their lower case form +        letter = letter.lower() + +        # We consider some words to be alternate spellings of other words and +        # in those cases we want to normalize the spellings to our preferred +        # spelling. +        if letter == "alpha": +            letter = "a" +        elif letter == "beta": +            letter = "b" +        elif letter in ["c", "pre", "preview"]: +            letter = "rc" +        elif letter in ["rev", "r"]: +            letter = "post" + +        return letter, int(number) +    if not letter and number: +        # We assume if we are given a number, but we are not given a letter +        # then this is using the implicit post release syntax (e.g. 1.0-1) +        letter = "post" + +        return letter, int(number) + +    return None + + +_local_version_separators = re.compile(r"[\._-]") + + +def _parse_local_version(local: str) -> Optional[LocalType]: +    """ +    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). +    """ +    if local is not None: +        return tuple( +            part.lower() if not part.isdigit() else int(part) +            for part in _local_version_separators.split(local) +        ) +    return None + + +def _cmpkey( +    epoch: int, +    release: Tuple[int, ...], +    pre: Optional[Tuple[str, int]], +    post: Optional[Tuple[str, int]], +    dev: Optional[Tuple[str, int]], +    local: Optional[Tuple[SubLocalType]], +) -> CmpKey: + +    # When we compare a release version, we want to compare it with all of the +    # trailing zeros removed. So we'll use a reverse the list, drop all the now +    # leading zeros until we come to something non zero, then take the rest +    # re-reverse it back into the correct order and make it a tuple and use +    # that for our sorting key. +    _release = tuple( +        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release)))) +    ) + +    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. +    # We'll do this by abusing the pre segment, but we _only_ want to do this +    # if there is not a pre or a post segment. If we have one of those then +    # the normal sorting rules will handle this case correctly. +    if pre is None and post is None and dev is not None: +        _pre: PrePostDevType = NegativeInfinity +    # Versions without a pre-release (except as noted above) should sort after +    # those with one. +    elif pre is None: +        _pre = Infinity +    else: +        _pre = pre + +    # Versions without a post segment should sort before those with one. +    if post is None: +        _post: PrePostDevType = NegativeInfinity + +    else: +        _post = post + +    # Versions without a development segment should sort after those with one. +    if dev is None: +        _dev: PrePostDevType = Infinity + +    else: +        _dev = dev + +    if local is None: +        # Versions without a local segment should sort before those with one. +        _local: LocalType = NegativeInfinity +    else: +        # Versions with a local segment need that segment parsed to implement +        # the sorting rules in PEP440. +        # - Alpha numeric segments sort before numeric segments +        # - Alpha numeric segments sort lexicographically +        # - Numeric segments sort numerically +        # - Shorter versions sort before longer versions when the prefixes +        #   match exactly +        _local = tuple( +            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local +        ) + +    return epoch, _release, _pre, _post, _dev, _local | 
