BATOSAY Shell
Server IP : 170.10.162.208  /  Your IP : 216.73.216.181
Web Server : LiteSpeed
System : Linux altar19.supremepanel19.com 4.18.0-553.69.1.lve.el8.x86_64 #1 SMP Wed Aug 13 19:53:59 UTC 2025 x86_64
User : deltahospital ( 1806)
PHP Version : 7.4.33
Disable Function : NONE
MySQL : OFF  |  cURL : ON  |  WGET : ON  |  Perl : ON  |  Python : ON  |  Sudo : OFF  |  Pkexec : OFF
Directory :  /home/deltahospital/.cagefs/tmp/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ HOME ]     

Current File : /home/deltahospital/.cagefs/tmp/phpeZgHsp
"""Generate and work with PEP 425 Compatibility Tags."""
from __future__ import absolute_import

import distutils.util
import logging
import platform
import re
import sys
import sysconfig
import warnings
from collections import OrderedDict

import pip._internal.utils.glibc
from pip._internal.utils.compat import get_extension_suffixes
from pip._internal.utils.typing import MYPY_CHECK_RUNNING

if MYPY_CHECK_RUNNING:
    from typing import (
        Tuple, Callable, List, Optional, Union, Dict, Set
    )

    Pep425Tag = Tuple[str, str, str]

logger = logging.getLogger(__name__)

_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)')


def get_config_var(var):
    # type: (str) -> Optional[str]
    try:
        return sysconfig.get_config_var(var)
    except IOError as e:  # Issue #1074
        warnings.warn("{}".format(e), RuntimeWarning)
        return None


def get_abbr_impl():
    # type: () -> str
    """Return abbreviated implementation name."""
    if hasattr(sys, 'pypy_version_info'):
        pyimpl = 'pp'
    elif sys.platform.startswith('java'):
        pyimpl = 'jy'
    elif sys.platform == 'cli':
        pyimpl = 'ip'
    else:
        pyimpl = 'cp'
    return pyimpl


def version_info_to_nodot(version_info):
    # type: (Tuple[int, ...]) -> str
    # Only use up to the first two numbers.
    return ''.join(map(str, version_info[:2]))


def get_impl_ver():
    # type: () -> str
    """Return implementation version."""
    impl_ver = get_config_var("py_version_nodot")
    if not impl_ver or get_abbr_impl() == 'pp':
        impl_ver = ''.join(map(str, get_impl_version_info()))
    return impl_ver


def get_impl_version_info():
    # type: () -> Tuple[int, ...]
    """Return sys.version_info-like tuple for use in decrementing the minor
    version."""
    if get_abbr_impl() == 'pp':
        # as per https://github.com/pypa/pip/issues/2882
        # attrs exist only on pypy
        return (sys.version_info[0],
                sys.pypy_version_info.major,  # type: ignore
                sys.pypy_version_info.minor)  # type: ignore
    else:
        return sys.version_info[0], sys.version_info[1]


def get_impl_tag():
    # type: () -> str
    """
    Returns the Tag for this specific implementation.
    """
    return "{}{}".format(get_abbr_impl(), get_impl_ver())


def get_flag(var, fallback, expected=True, warn=True):
    # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool
    """Use a fallback method for determining SOABI flags if the needed config
    var is unset or unavailable."""
    val = get_config_var(var)
    if val is None:
        if warn:
            logger.debug("Config variable '%s' is unset, Python ABI tag may "
                         "be incorrect", var)
        return fallback()
    return val == expected


def get_abi_tag():
    # type: () -> Optional[str]
    """Return the ABI tag based on SOABI (if available) or emulate SOABI
    (CPython 2, PyPy)."""
    soabi = get_config_var('SOABI')
    impl = get_abbr_impl()
    abi = None  # type: Optional[str]

    if not soabi and impl in {'cp', 'pp'} and hasattr(sys, 'maxunicode'):
        d = ''
        m = ''
        u = ''
        is_cpython = (impl == 'cp')
        if get_flag(
                'Py_DEBUG', lambda: hasattr(sys, 'gettotalrefcount'),
                warn=is_cpython):
            d = 'd'
        if sys.version_info < (3, 8) and get_flag(
                'WITH_PYMALLOC', lambda: is_cpython, warn=is_cpython):
            m = 'm'
        if sys.version_info < (3, 3) and get_flag(
                'Py_UNICODE_SIZE', lambda: sys.maxunicode == 0x10ffff,
                expected=4, warn=is_cpython):
            u = 'u'
        abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u)
    elif soabi and soabi.startswith('cpython-'):
        abi = 'cp' + soabi.split('-')[1]
    elif soabi:
        abi = soabi.replace('.', '_').replace('-', '_')

    return abi


def _is_running_32bit():
    # type: () -> bool
    return sys.maxsize == 2147483647


def get_platform():
    # type: () -> str
    """Return our platform name 'win32', 'linux_x86_64'"""
    if sys.platform == 'darwin':
        # distutils.util.get_platform() returns the release based on the value
        # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may
        # be significantly older than the user's current machine.
        release, _, machine = platform.mac_ver()
        split_ver = release.split('.')

        if machine == "x86_64" and _is_running_32bit():
            machine = "i386"
        elif machine == "ppc64" and _is_running_32bit():
            machine = "ppc"

        return 'macosx_{}_{}_{}'.format(split_ver[0], split_ver[1], machine)

    # XXX remove distutils dependency
    result = distutils.util.get_platform().replace('.', '_').replace('-', '_')
    if result == "linux_x86_64" and _is_running_32bit():
        # 32 bit Python program (running on a 64 bit Linux): pip should only
        # install and run 32 bit compiled extensions in that case.
        result = "linux_i686"

    return result


def is_linux_armhf():
    # type: () -> bool
    if get_platform() != "linux_armv7l":
        return False
    # hard-float ABI can be detected from the ELF header of the running
    # process
    try:
        with open(sys.executable, 'rb') as f:
            elf_header_raw = f.read(40)  # read 40 first bytes of ELF header
    except (IOError, OSError, TypeError):
        return False
    if elf_header_raw is None or len(elf_header_raw) < 40:
        return False
    if isinstance(elf_header_raw, str):
        elf_header = [ord(c) for c in elf_header_raw]
    else:
        elf_header = [b for b in elf_header_raw]
    result = elf_header[0:4] == [0x7f, 0x45, 0x4c, 0x46]  # ELF magic number
    result &= elf_header[4:5] == [1]  # 32-bit ELF
    result &= elf_header[5:6] == [1]  # little-endian
    result &= elf_header[18:20] == [0x28, 0]  # ARM machine
    result &= elf_header[39:40] == [5]  # ARM EABIv5
    result &= (elf_header[37:38][0] & 4) == 4  # EF_ARM_ABI_FLOAT_HARD
    return result


def is_manylinux1_compatible():
    # type: () -> bool
    # Only Linux, and only x86-64 / i686
    if get_platform() not in {"linux_x86_64", "linux_i686"}:
        return False

    # Check for presence of _manylinux module
    try:
        import _manylinux
        return bool(_manylinux.manylinux1_compatible)
    except (ImportError, AttributeError):
        # Fall through to heuristic check below
        pass

    # Check glibc version. CentOS 5 uses glibc 2.5.
    return pip._internal.utils.glibc.have_compatible_glibc(2, 5)


def is_manylinux2010_compatible():
    # type: () -> bool
    # Only Linux, and only x86-64 / i686
    if get_platform() not in {"linux_x86_64", "linux_i686"}:
        return False

    # Check for presence of _manylinux module
    try:
        import _manylinux
        return bool(_manylinux.manylinux2010_compatible)
    except (ImportError, AttributeError):
        # Fall through to heuristic check below
        pass

    # Check glibc version. CentOS 6 uses glibc 2.12.
    return pip._internal.utils.glibc.have_compatible_glibc(2, 12)


def is_manylinux2014_compatible():
    # type: () -> bool
    # Only Linux, and only supported architectures
    platform = get_platform()
    if platform not in {"linux_x86_64", "linux_i686", "linux_aarch64",
                        "linux_armv7l", "linux_ppc64", "linux_ppc64le",
                        "linux_s390x"}:
        return False

    # check for hard-float ABI in case we're running linux_armv7l not to
    # install hard-float ABI wheel in a soft-float ABI environment
    if platform == "linux_armv7l" and not is_linux_armhf():
        return False

    # Check for presence of _manylinux module
    try:
        import _manylinux
        return bool(_manylinux.manylinux2014_compatible)
    except (ImportError, AttributeError):
        # Fall through to heuristic check below
        pass

    # Check glibc version. CentOS 7 uses glibc 2.17.
    return pip._internal.utils.glibc.have_compatible_glibc(2, 17)


def get_darwin_arches(major, minor, machine):
    # type: (int, int, str) -> List[str]
    """Return a list of supported arches (including group arches) for
    the given major, minor and machine architecture of an macOS machine.
    """
    arches = []

    def _supports_arch(major, minor, arch):
        # type: (int, int, str) -> bool
        # Looking at the application support for macOS versions in the chart
        # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears
        # our timeline looks roughly like:
        #
        # 10.0 - Introduces ppc support.
        # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64
        #        and x86_64 support is CLI only, and cannot be used for GUI
        #        applications.
        # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications.
        # 10.6 - Drops support for ppc64
        # 10.7 - Drops support for ppc
        #
        # Given that we do not know if we're installing a CLI or a GUI
        # application, we must be conservative and assume it might be a GUI
        # application and behave as if ppc64 and x86_64 support did not occur
        # until 10.5.
        #
        # Note: The above information is taken from the "Application support"
        #       column in the chart not the "Processor support" since I believe
        #       that we care about what instruction sets an application can use
        #       not which processors the OS supports.
        if arch == 'ppc':
            return (major, minor) <= (10, 5)
        if arch == 'ppc64':
            return (major, minor) == (10, 5)
        if arch == 'i386':
            return (major, minor) >= (10, 4)
        if arch == 'x86_64':
            return (major, minor) >= (10, 5)
        if arch in groups:
            for garch in groups[arch]:
                if _supports_arch(major, minor, garch):
                    return True
        return False

    groups = OrderedDict([
        ("fat", ("i386", "ppc")),
        ("intel", ("x86_64", "i386")),
        ("fat64", ("x86_64", "ppc64")),
        ("fat32", ("x86_64", "i386", "ppc")),
    ])  # type: Dict[str, Tuple[str, ...]]

    if _supports_arch(major, minor, machine):
        arches.append(machine)

    for garch in groups:
        if machine in groups[garch] and _supports_arch(major, minor, garch):
            arches.append(garch)

    arches.append('universal')

    return arches


def get_all_minor_versions_as_strings(version_info):
    # type: (Tuple[int, ...]) -> List[str]
    versions = []
    major = version_info[:-1]
    # Support all previous minor Python versions.
    for minor in range(version_info[-1], -1, -1):
        versions.append(''.join(map(str, major + (minor,))))
    return versions


def get_supported(
    versions=None,  # type: Optional[List[str]]
    noarch=False,  # type: bool
    platform=None,  # type: Optional[str]
    impl=None,  # type: Optional[str]
    abi=None  # type: Optional[str]
):
    # type: (...) -> List[Pep425Tag]
    """Return a list of supported tags for each version specified in
    `versions`.

    :param versions: a list of string versions, of the form ["33", "32"],
        or None. The first version will be assumed to support our ABI.
    :param platform: specify the exact platform you want valid
        tags for, or None. If None, use the local system platform.
    :param impl: specify the exact implementation you want valid
        tags for, or None. If None, use the local interpreter impl.
    :param abi: specify the exact abi you want valid
        tags for, or None. If None, use the local interpreter abi.
    """
    supported = []

    # Versions must be given with respect to the preference
    if versions is None:
        version_info = get_impl_version_info()
        versions = get_all_minor_versions_as_strings(version_info)

    impl = impl or get_abbr_impl()

    abis = []  # type: List[str]

    abi = abi or get_abi_tag()
    if abi:
        abis[0:0] = [abi]

    abi3s = set()  # type: Set[str]
    for suffix in get_extension_suffixes():
        if suffix.startswith('.abi'):
            abi3s.add(suffix.split('.', 2)[1])

    abis.extend(sorted(list(abi3s)))

    abis.append('none')

    if not noarch:
        arch = platform or get_platform()
        arch_prefix, arch_sep, arch_suffix = arch.partition('_')
        if arch.startswith('macosx'):
            # support macosx-10.6-intel on macosx-10.9-x86_64
            match = _osx_arch_pat.match(arch)
            if match:
                name, major, minor, actual_arch = match.groups()
                tpl = '{}_{}_%i_%s'.format(name, major)
                arches = []
                for m in reversed(range(int(minor) + 1)):
                    for a in get_darwin_arches(int(major), m, actual_arch):
                        arches.append(tpl % (m, a))
            else:
                # arch pattern didn't match (?!)
                arches = [arch]
        elif arch_prefix == 'manylinux2014':
            arches = [arch]
            # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
            # with the exception of wheels depending on ncurses. PEP 599 states
            # manylinux1/manylinux2010 wheels should be considered
            # manylinux2014 wheels:
            # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
            if arch_suffix in {'i686', 'x86_64'}:
                arches.append('manylinux2010' + arch_sep + arch_suffix)
                arches.append('manylinux1' + arch_sep + arch_suffix)
        elif arch_prefix == 'manylinux2010':
            # manylinux1 wheels run on most manylinux2010 systems with the
            # exception of wheels depending on ncurses. PEP 571 states
            # manylinux1 wheels should be considered manylinux2010 wheels:
            # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
            arches = [arch, 'manylinux1' + arch_sep + arch_suffix]
        elif platform is None:
            arches = []
            if is_manylinux2014_compatible():
                arches.append('manylinux2014' + arch_sep + arch_suffix)
            if is_manylinux2010_compatible():
                arches.append('manylinux2010' + arch_sep + arch_suffix)
            if is_manylinux1_compatible():
                arches.append('manylinux1' + arch_sep + arch_suffix)
            arches.append(arch)
        else:
            arches = [arch]

        # Current version, current API (built specifically for our Python):
        for abi in abis:
            for arch in arches:
                supported.append(('%s%s' % (impl, versions[0]), abi, arch))

        # abi3 modules compatible with older version of Python
        for version in versions[1:]:
            # abi3 was introduced in Python 3.2
            if version in {'31', '30'}:
                break
            for abi in abi3s:   # empty set if not Python 3
                for arch in arches:
                    supported.append(("%s%s" % (impl, version), abi, arch))

        # Has binaries, does not use the Python API:
        for arch in arches:
            supported.append(('py%s' % (versions[0][0]), 'none', arch))

    # No abi / arch, but requires our implementation:
    supported.append(('%s%s' % (impl, versions[0]), 'none', 'any'))
    # Tagged specifically as being cross-version compatible
    # (with just the major version specified)
    supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))

    # No abi / arch, generic Python
    for i, version in enumerate(versions):
        supported.append(('py%s' % (version,), 'none', 'any'))
        if i == 0:
            supported.append(('py%s' % (version[0]), 'none', 'any'))

    return supported


implementation_tag = get_impl_tag()
"""
Support for installing and building the "wheel" binary package format.
"""

# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
# mypy: disallow-untyped-defs=False

from __future__ import absolute_import

import collections
import compileall
import csv
import hashlib
import logging
import os.path
import re
import shutil
import stat
import sys
import warnings
from base64 import urlsafe_b64encode
from email.parser import Parser

from pip._vendor import pkg_resources
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor.distlib.util import get_export_entry
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.six import StringIO

from pip._internal import pep425tags
from pip._internal.exceptions import (
    InstallationError,
    InvalidWheelFilename,
    UnsupportedWheel,
)
from pip._internal.locations import distutils_scheme, get_major_minor_version
from pip._internal.models.link import Link
from pip._internal.utils.logging import indent_log
from pip._internal.utils.marker_files import has_delete_marker_file
from pip._internal.utils.misc import captured_stdout, ensure_dir, read_chunks
from pip._internal.utils.setuptools_build import make_setuptools_shim_args
from pip._internal.utils.subprocess import (
    LOG_DIVIDER,
    call_subprocess,
    format_command_args,
    runner_with_spinner_message,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.ui import open_spinner
from pip._internal.utils.unpacking import unpack_file
from pip._internal.utils.urls import path_to_url

if MYPY_CHECK_RUNNING:
    from typing import (
        Dict, List, Optional, Sequence, Mapping, Tuple, IO, Text, Any,
        Iterable, Callable, Set,
    )
    from pip._vendor.packaging.requirements import Requirement
    from pip._internal.req.req_install import InstallRequirement
    from pip._internal.operations.prepare import (
        RequirementPreparer
    )
    from pip._internal.cache import WheelCache
    from pip._internal.pep425tags import Pep425Tag

    InstalledCSVRow = Tuple[str, ...]

    BinaryAllowedPredicate = Callable[[InstallRequirement], bool]


VERSION_COMPATIBLE = (1, 0)


logger = logging.getLogger(__name__)


def normpath(src, p):
    return os.path.relpath(src, p).replace(os.path.sep, '/')


def hash_file(path, blocksize=1 << 20):
    # type: (str, int) -> Tuple[Any, int]
    """Return (hash, length) for path using hashlib.sha256()"""
    h = hashlib.sha256()
    length = 0
    with open(path, 'rb') as f:
        for block in read_chunks(f, size=blocksize):
            length += len(block)
            h.update(block)
    return (h, length)  # type: ignore


def rehash(path, blocksize=1 << 20):
    # type: (str, int) -> Tuple[str, str]
    """Return (encoded_digest, length) for path using hashlib.sha256()"""
    h, length = hash_file(path, blocksize)
    digest = 'sha256=' + urlsafe_b64encode(
        h.digest()
    ).decode('latin1').rstrip('=')
    # unicode/str python2 issues
    return (digest, str(length))  # type: ignore


def open_for_csv(name, mode):
    # type: (str, Text) -> IO
    if sys.version_info[0] < 3:
        nl = {}  # type: Dict[str, Any]
        bin = 'b'
    else:
        nl = {'newline': ''}  # type: Dict[str, Any]
        bin = ''
    return open(name, mode + bin, **nl)


def replace_python_tag(wheelname, new_tag):
    # type: (str, str) -> str
    """Replace the Python tag in a wheel file name with a new value.
    """
    parts = wheelname.split('-')
    parts[-3] = new_tag
    return '-'.join(parts)


def fix_script(path):
    # type: (str) -> Optional[bool]
    """Replace #!python with #!/path/to/python
    Return True if file was changed."""
    # XXX RECORD hashes will need to be updated
    if os.path.isfile(path):
        with open(path, 'rb') as script:
            firstline = script.readline()
            if not firstline.startswith(b'#!python'):
                return False
            exename = sys.executable.encode(sys.getfilesystemencoding())
            firstline = b'#!' + exename + os.linesep.encode("ascii")
            rest = script.read()
        with open(path, 'wb') as script:
            script.write(firstline)
            script.write(rest)
        return True
    return None


dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>.+?))?)
                                \.dist-info$""", re.VERBOSE)


def root_is_purelib(name, wheeldir):
    # type: (str, str) -> bool
    """
    Return True if the extracted wheel in wheeldir should go into purelib.
    """
    name_folded = name.replace("-", "_")
    for item in os.listdir(wheeldir):
        match = dist_info_re.match(item)
        if match and match.group('name') == name_folded:
            with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel:
                for line in wheel:
                    line = line.lower().rstrip()
                    if line == "root-is-purelib: true":
                        return True
    return False


def get_entrypoints(filename):
    # type: (str) -> Tuple[Dict[str, str], Dict[str, str]]
    if not os.path.exists(filename):
        return {}, {}

    # This is done because you can pass a string to entry_points wrappers which
    # means that they may or may not be valid INI files. The attempt here is to
    # strip leading and trailing whitespace in order to make them valid INI
    # files.
    with open(filename) as fp:
        data = StringIO()
        for line in fp:
            data.write(line.strip())
            data.write("\n")
        data.seek(0)

    # get the entry points and then the script names
    entry_points = pkg_resources.EntryPoint.parse_map(data)
    console = entry_points.get('console_scripts', {})
    gui = entry_points.get('gui_scripts', {})

    def _split_ep(s):
        """get the string representation of EntryPoint, remove space and split
        on '='"""
        return str(s).replace(" ", "").split("=")

    # convert the EntryPoint objects into strings with module:function
    console = dict(_split_ep(v) for v in console.values())
    gui = dict(_split_ep(v) for v in gui.values())
    return console, gui


def message_about_scripts_not_on_PATH(scripts):
    # type: (Sequence[str]) -> Optional[str]
    """Determine if any scripts are not on PATH and format a warning.

    Returns a warning message if one or more scripts are not on PATH,
    otherwise None.
    """
    if not scripts:
        return None

    # Group scripts by the path they were installed in
    grouped_by_dir = collections.defaultdict(set)  # type: Dict[str, Set[str]]
    for destfile in scripts:
        parent_dir = os.path.dirname(destfile)
        script_name = os.path.basename(destfile)
        grouped_by_dir[parent_dir].add(script_name)

    # We don't want to warn for directories that are on PATH.
    not_warn_dirs = [
        os.path.normcase(i).rstrip(os.sep) for i in
        os.environ.get("PATH", "").split(os.pathsep)
    ]
    # If an executable sits with sys.executable, we don't warn for it.
    #     This covers the case of venv invocations without activating the venv.
    not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
    warn_for = {
        parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
        if os.path.normcase(parent_dir) not in not_warn_dirs
    }  # type: Dict[str, Set[str]]
    if not warn_for:
        return None

    # Format a message
    msg_lines = []
    for parent_dir, dir_scripts in warn_for.items():
        sorted_scripts = sorted(dir_scripts)  # type: List[str]
        if len(sorted_scripts) == 1:
            start_text = "script {} is".format(sorted_scripts[0])
        else:
            start_text = "scripts {} are".format(
                ", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
            )

        msg_lines.append(
            "The {} installed in '{}' which is not on PATH."
            .format(start_text, parent_dir)
        )

    last_line_fmt = (
        "Consider adding {} to PATH or, if you prefer "
        "to suppress this warning, use --no-warn-script-location."
    )
    if len(msg_lines) == 1:
        msg_lines.append(last_line_fmt.format("this directory"))
    else:
        msg_lines.append(last_line_fmt.format("these directories"))

    # Returns the formatted multiline message
    return "\n".join(msg_lines)


def sorted_outrows(outrows):
    # type: (Iterable[InstalledCSVRow]) -> List[InstalledCSVRow]
    """
    Return the given rows of a RECORD file in sorted order.

    Each row is a 3-tuple (path, hash, size) and corresponds to a record of
    a RECORD file (see PEP 376 and PEP 427 for details).  For the rows
    passed to this function, the size can be an integer as an int or string,
    or the empty string.
    """
    # Normally, there should only be one row per path, in which case the
    # second and third elements don't come into play when sorting.
    # However, in cases in the wild where a path might happen to occur twice,
    # we don't want the sort operation to trigger an error (but still want
    # determinism).  Since the third element can be an int or string, we
    # coerce each element to a string to avoid a TypeError in this case.
    # For additional background, see--
    # https://github.com/pypa/pip/issues/5868
    return sorted(outrows, key=lambda row: tuple(str(x) for x in row))


def get_csv_rows_for_installed(
    old_csv_rows,  # type: Iterable[List[str]]
    installed,  # type: Dict[str, str]
    changed,  # type: set
    generated,  # type: List[str]
    lib_dir,  # type: str
):
    # type: (...) -> List[InstalledCSVRow]
    """
    :param installed: A map from archive RECORD path to installation RECORD
        path.
    """
    installed_rows = []  # type: List[InstalledCSVRow]
    for row in old_csv_rows:
        if len(row) > 3:
            logger.warning(
                'RECORD line has more than three elements: {}'.format(row)
            )
        # Make a copy because we are mutating the row.
        row = list(row)
        old_path = row[0]
        new_path = installed.pop(old_path, old_path)
        row[0] = new_path
        if new_path in changed:
            digest, length = rehash(new_path)
            row[1] = digest
            row[2] = length
        installed_rows.append(tuple(row))
    for f in generated:
        digest, length = rehash(f)
        installed_rows.append((normpath(f, lib_dir), digest, str(length)))
    for f in installed:
        installed_rows.append((installed[f], '', ''))
    return installed_rows


class MissingCallableSuffix(Exception):
    pass


def _raise_for_invalid_entrypoint(specification):
    entry = get_export_entry(specification)
    if entry is not None and entry.suffix is None:
        raise MissingCallableSuffix(str(entry))


class PipScriptMaker(ScriptMaker):
    def make(self, specification, options=None):
        _raise_for_invalid_entrypoint(specification)
        return super(PipScriptMaker, self).make(specification, options)


def move_wheel_files(
    name,  # type: str
    req,  # type: Requirement
    wheeldir,  # type: str
    user=False,  # type: bool
    home=None,  # type: Optional[str]
    root=None,  # type: Optional[str]
    pycompile=True,  # type: bool
    scheme=None,  # type: Optional[Mapping[str, str]]
    isolated=False,  # type: bool
    prefix=None,  # type: Optional[str]
    warn_script_location=True  # type: bool
):
    # type: (...) -> None
    """Install a wheel"""
    # TODO: Investigate and break this up.
    # TODO: Look into moving this into a dedicated class for representing an
    #       installation.

    if not scheme:
        scheme = distutils_scheme(
            name, user=user, home=home, root=root, isolated=isolated,
            prefix=prefix,
        )

    if root_is_purelib(name, wheeldir):
        lib_dir = scheme['purelib']
    else:
        lib_dir = scheme['platlib']

    info_dir = []  # type: List[str]
    data_dirs = []
    source = wheeldir.rstrip(os.path.sep) + os.path.sep

    # Record details of the files moved
    #   installed = files copied from the wheel to the destination
    #   changed = files changed while installing (scripts #! line typically)
    #   generated = files newly generated during the install (script wrappers)
    installed = {}  # type: Dict[str, str]
    changed = set()
    generated = []  # type: List[str]

    # Compile all of the pyc files that we're going to be installing
    if pycompile:
        with captured_stdout() as stdout:
            with warnings.catch_warnings():
                warnings.filterwarnings('ignore')
                compileall.compile_dir(source, force=True, quiet=True)
        logger.debug(stdout.getvalue())

    def record_installed(srcfile, destfile, modified=False):
        """Map archive RECORD paths to installation RECORD paths."""
        oldpath = normpath(srcfile, wheeldir)
        newpath = normpath(destfile, lib_dir)
        installed[oldpath] = newpath
        if modified:
            changed.add(destfile)

    def clobber(source, dest, is_base, fixer=None, filter=None):
        ensure_dir(dest)  # common for the 'include' path

        for dir, subdirs, files in os.walk(source):
            basedir = dir[len(source):].lstrip(os.path.sep)
            destdir = os.path.join(dest, basedir)
            if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'):
                continue
            for s in subdirs:
                destsubdir = os.path.join(dest, basedir, s)
                if is_base and basedir == '' and destsubdir.endswith('.data'):
                    data_dirs.append(s)
                    continue
                elif (is_base and
                        s.endswith('.dist-info') and
                        canonicalize_name(s).startswith(
                            canonicalize_name(req.name))):
                    assert not info_dir, ('Multiple .dist-info directories: ' +
                                          destsubdir + ', ' +
                                          ', '.join(info_dir))
                    info_dir.append(destsubdir)
            for f in files:
                # Skip unwanted files
                if filter and filter(f):
                    continue
                srcfile = os.path.join(dir, f)
                destfile = os.path.join(dest, basedir, f)
                # directory creation is lazy and after the file filtering above
                # to ensure we don't install empty dirs; empty dirs can't be
                # uninstalled.
                ensure_dir(destdir)

                # copyfile (called below) truncates the destination if it
                # exists and then writes the new contents. This is fine in most
                # cases, but can cause a segfault if pip has loaded a shared
                # object (e.g. from pyopenssl through its vendored urllib3)
                # Since the shared object is mmap'd an attempt to call a
                # symbol in it will then cause a segfault. Unlinking the file
                # allows writing of new contents while allowing the process to
                # continue to use the old copy.
                if os.path.exists(destfile):
                    os.unlink(destfile)

                # We use copyfile (not move, copy, or copy2) to be extra sure
                # that we are not moving directories over (copyfile fails for
                # directories) as well as to ensure that we are not copying
                # over any metadata because we want more control over what
                # metadata we actually copy over.
                shutil.copyfile(srcfile, destfile)

                # Copy over the metadata for the file, currently this only
                # includes the atime and mtime.
                st = os.stat(srcfile)
                if hasattr(os, "utime"):
                    os.utime(destfile, (st.st_atime, st.st_mtime))

                # If our file is executable, then make our destination file
                # executable.
                if os.access(srcfile, os.X_OK):
                    st = os.stat(srcfile)
                    permissions = (
                        st.st_mode | stat.S_IXUSR | stat.S_IX

Batosay - 2023
IDNSEO Team