init
This commit is contained in:
commit
38355d2442
9083 changed files with 1225834 additions and 0 deletions
|
|
@ -0,0 +1 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,118 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from abc import ABCMeta
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
from virtualenv.util.path import Path
|
||||
from virtualenv.util.six import ensure_str, ensure_text
|
||||
|
||||
from ..seeder import Seeder
|
||||
from ..wheels import Version
|
||||
|
||||
PERIODIC_UPDATE_ON_BY_DEFAULT = True
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class BaseEmbed(Seeder):
|
||||
def __init__(self, options):
|
||||
super(BaseEmbed, self).__init__(options, enabled=options.no_seed is False)
|
||||
|
||||
self.download = options.download
|
||||
self.extra_search_dir = [i.resolve() for i in options.extra_search_dir if i.exists()]
|
||||
|
||||
self.pip_version = options.pip
|
||||
self.setuptools_version = options.setuptools
|
||||
self.wheel_version = options.wheel
|
||||
|
||||
self.no_pip = options.no_pip
|
||||
self.no_setuptools = options.no_setuptools
|
||||
self.no_wheel = options.no_wheel
|
||||
self.app_data = options.app_data
|
||||
self.periodic_update = not options.no_periodic_update
|
||||
|
||||
if not self.distribution_to_versions():
|
||||
self.enabled = False
|
||||
|
||||
@classmethod
|
||||
def distributions(cls):
|
||||
return {
|
||||
"pip": Version.bundle,
|
||||
"setuptools": Version.bundle,
|
||||
"wheel": Version.bundle,
|
||||
}
|
||||
|
||||
def distribution_to_versions(self):
|
||||
return {
|
||||
distribution: getattr(self, "{}_version".format(distribution))
|
||||
for distribution in self.distributions()
|
||||
if getattr(self, "no_{}".format(distribution)) is False
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, app_data):
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument(
|
||||
"--no-download",
|
||||
"--never-download",
|
||||
dest="download",
|
||||
action="store_false",
|
||||
help="pass to disable download of the latest {} from PyPI".format("/".join(cls.distributions())),
|
||||
default=True,
|
||||
)
|
||||
group.add_argument(
|
||||
"--download",
|
||||
dest="download",
|
||||
action="store_true",
|
||||
help="pass to enable download of the latest {} from PyPI".format("/".join(cls.distributions())),
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--extra-search-dir",
|
||||
metavar="d",
|
||||
type=Path,
|
||||
nargs="+",
|
||||
help="a path containing wheels to extend the internal wheel list (can be set 1+ times)",
|
||||
default=[],
|
||||
)
|
||||
for distribution, default in cls.distributions().items():
|
||||
parser.add_argument(
|
||||
"--{}".format(distribution),
|
||||
dest=distribution,
|
||||
metavar="version",
|
||||
help="version of {} to install as seed: embed, bundle or exact version".format(distribution),
|
||||
default=default,
|
||||
)
|
||||
for distribution in cls.distributions():
|
||||
parser.add_argument(
|
||||
"--no-{}".format(distribution),
|
||||
dest="no_{}".format(distribution),
|
||||
action="store_true",
|
||||
help="do not install {}".format(distribution),
|
||||
default=False,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-periodic-update",
|
||||
dest="no_periodic_update",
|
||||
action="store_true",
|
||||
help="disable the periodic (once every 14 days) update of the embedded wheels",
|
||||
default=not PERIODIC_UPDATE_ON_BY_DEFAULT,
|
||||
)
|
||||
|
||||
def __unicode__(self):
|
||||
result = self.__class__.__name__
|
||||
result += "("
|
||||
if self.extra_search_dir:
|
||||
result += "extra_search_dir={},".format(", ".join(ensure_text(str(i)) for i in self.extra_search_dir))
|
||||
result += "download={},".format(self.download)
|
||||
for distribution in self.distributions():
|
||||
if getattr(self, "no_{}".format(distribution)):
|
||||
continue
|
||||
result += " {}{},".format(
|
||||
distribution,
|
||||
"={}".format(getattr(self, "{}_version".format(distribution), None) or "latest"),
|
||||
)
|
||||
return result[:-1] + ")"
|
||||
|
||||
def __repr__(self):
|
||||
return ensure_str(self.__unicode__())
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
from contextlib import contextmanager
|
||||
|
||||
from virtualenv.discovery.cached_py_info import LogCmd
|
||||
from virtualenv.seed.embed.base_embed import BaseEmbed
|
||||
from virtualenv.util.subprocess import Popen
|
||||
|
||||
from ..wheels import Version, get_wheel, pip_wheel_env_run
|
||||
|
||||
|
||||
class PipInvoke(BaseEmbed):
|
||||
def __init__(self, options):
|
||||
super(PipInvoke, self).__init__(options)
|
||||
|
||||
def run(self, creator):
|
||||
if not self.enabled:
|
||||
return
|
||||
for_py_version = creator.interpreter.version_release_str
|
||||
with self.get_pip_install_cmd(creator.exe, for_py_version) as cmd:
|
||||
env = pip_wheel_env_run(self.extra_search_dir, self.app_data, self.env)
|
||||
self._execute(cmd, env)
|
||||
|
||||
@staticmethod
|
||||
def _execute(cmd, env):
|
||||
logging.debug("pip seed by running: %s", LogCmd(cmd, env))
|
||||
process = Popen(cmd, env=env)
|
||||
process.communicate()
|
||||
if process.returncode != 0:
|
||||
raise RuntimeError("failed seed with code {}".format(process.returncode))
|
||||
return process
|
||||
|
||||
@contextmanager
|
||||
def get_pip_install_cmd(self, exe, for_py_version):
|
||||
cmd = [str(exe), "-m", "pip", "-q", "install", "--only-binary", ":all:", "--disable-pip-version-check"]
|
||||
if not self.download:
|
||||
cmd.append("--no-index")
|
||||
folders = set()
|
||||
for dist, version in self.distribution_to_versions().items():
|
||||
wheel = get_wheel(
|
||||
distribution=dist,
|
||||
version=version,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=self.extra_search_dir,
|
||||
download=False,
|
||||
app_data=self.app_data,
|
||||
do_periodic_update=self.periodic_update,
|
||||
env=self.env,
|
||||
)
|
||||
if wheel is None:
|
||||
raise RuntimeError("could not get wheel for distribution {}".format(dist))
|
||||
folders.add(str(wheel.path.parent))
|
||||
cmd.append(Version.as_pip_req(dist, wheel.version))
|
||||
for folder in sorted(folders):
|
||||
cmd.extend(["--find-links", str(folder)])
|
||||
yield cmd
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,200 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import zipfile
|
||||
from abc import ABCMeta, abstractmethod
|
||||
from itertools import chain
|
||||
from tempfile import mkdtemp
|
||||
|
||||
from distlib.scripts import ScriptMaker, enquote_executable
|
||||
from six import PY3, add_metaclass
|
||||
|
||||
from virtualenv.util import ConfigParser
|
||||
from virtualenv.util.path import Path, safe_delete
|
||||
from virtualenv.util.six import ensure_text
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class PipInstall(object):
|
||||
def __init__(self, wheel, creator, image_folder):
|
||||
self._wheel = wheel
|
||||
self._creator = creator
|
||||
self._image_dir = image_folder
|
||||
self._extracted = False
|
||||
self.__dist_info = None
|
||||
self._console_entry_points = None
|
||||
|
||||
@abstractmethod
|
||||
def _sync(self, src, dst):
|
||||
raise NotImplementedError
|
||||
|
||||
def install(self, version_info):
|
||||
self._extracted = True
|
||||
self._uninstall_previous_version()
|
||||
# sync image
|
||||
for filename in self._image_dir.iterdir():
|
||||
into = self._creator.purelib / filename.name
|
||||
self._sync(filename, into)
|
||||
# generate console executables
|
||||
consoles = set()
|
||||
script_dir = self._creator.script_dir
|
||||
for name, module in self._console_scripts.items():
|
||||
consoles.update(self._create_console_entry_point(name, module, script_dir, version_info))
|
||||
logging.debug("generated console scripts %s", " ".join(i.name for i in consoles))
|
||||
|
||||
def build_image(self):
|
||||
# 1. first extract the wheel
|
||||
logging.debug("build install image for %s to %s", self._wheel.name, self._image_dir)
|
||||
with zipfile.ZipFile(str(self._wheel)) as zip_ref:
|
||||
self._shorten_path_if_needed(zip_ref)
|
||||
zip_ref.extractall(str(self._image_dir))
|
||||
self._extracted = True
|
||||
# 2. now add additional files not present in the distribution
|
||||
new_files = self._generate_new_files()
|
||||
# 3. finally fix the records file
|
||||
self._fix_records(new_files)
|
||||
|
||||
def _shorten_path_if_needed(self, zip_ref):
|
||||
if os.name == "nt":
|
||||
to_folder = str(self._image_dir)
|
||||
# https://docs.microsoft.com/en-us/windows/win32/fileio/maximum-file-path-limitation
|
||||
zip_max_len = max(len(i) for i in zip_ref.namelist())
|
||||
path_len = zip_max_len + len(to_folder)
|
||||
if path_len > 260:
|
||||
self._image_dir.mkdir(exist_ok=True) # to get a short path must exist
|
||||
|
||||
from virtualenv.util.path import get_short_path_name
|
||||
|
||||
to_folder = get_short_path_name(to_folder)
|
||||
self._image_dir = Path(to_folder)
|
||||
|
||||
def _records_text(self, files):
|
||||
record_data = "\n".join(
|
||||
"{},,".format(os.path.relpath(ensure_text(str(rec)), ensure_text(str(self._image_dir)))) for rec in files
|
||||
)
|
||||
return record_data
|
||||
|
||||
def _generate_new_files(self):
|
||||
new_files = set()
|
||||
installer = self._dist_info / "INSTALLER"
|
||||
installer.write_text("pip\n")
|
||||
new_files.add(installer)
|
||||
# inject a no-op root element, as workaround for bug in https://github.com/pypa/pip/issues/7226
|
||||
marker = self._image_dir / "{}.virtualenv".format(self._dist_info.stem)
|
||||
marker.write_text("")
|
||||
new_files.add(marker)
|
||||
folder = mkdtemp()
|
||||
try:
|
||||
to_folder = Path(folder)
|
||||
rel = os.path.relpath(ensure_text(str(self._creator.script_dir)), ensure_text(str(self._creator.purelib)))
|
||||
version_info = self._creator.interpreter.version_info
|
||||
for name, module in self._console_scripts.items():
|
||||
new_files.update(
|
||||
Path(os.path.normpath(ensure_text(str(self._image_dir / rel / i.name))))
|
||||
for i in self._create_console_entry_point(name, module, to_folder, version_info)
|
||||
)
|
||||
finally:
|
||||
safe_delete(folder)
|
||||
return new_files
|
||||
|
||||
@property
|
||||
def _dist_info(self):
|
||||
if self._extracted is False:
|
||||
return None # pragma: no cover
|
||||
if self.__dist_info is None:
|
||||
files = []
|
||||
for filename in self._image_dir.iterdir():
|
||||
files.append(filename.name)
|
||||
if filename.suffix == ".dist-info":
|
||||
self.__dist_info = filename
|
||||
break
|
||||
else:
|
||||
msg = "no .dist-info at {}, has {}".format(self._image_dir, ", ".join(files)) # pragma: no cover
|
||||
raise RuntimeError(msg) # pragma: no cover
|
||||
return self.__dist_info
|
||||
|
||||
@abstractmethod
|
||||
def _fix_records(self, extra_record_data):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def _console_scripts(self):
|
||||
if self._extracted is False:
|
||||
return None # pragma: no cover
|
||||
if self._console_entry_points is None:
|
||||
self._console_entry_points = {}
|
||||
entry_points = self._dist_info / "entry_points.txt"
|
||||
if entry_points.exists():
|
||||
parser = ConfigParser.ConfigParser()
|
||||
with entry_points.open() as file_handler:
|
||||
reader = getattr(parser, "read_file" if PY3 else "readfp")
|
||||
reader(file_handler)
|
||||
if "console_scripts" in parser.sections():
|
||||
for name, value in parser.items("console_scripts"):
|
||||
match = re.match(r"(.*?)-?\d\.?\d*", name)
|
||||
if match:
|
||||
name = match.groups(1)[0]
|
||||
self._console_entry_points[name] = value
|
||||
return self._console_entry_points
|
||||
|
||||
def _create_console_entry_point(self, name, value, to_folder, version_info):
|
||||
result = []
|
||||
maker = ScriptMakerCustom(to_folder, version_info, self._creator.exe, name)
|
||||
specification = "{} = {}".format(name, value)
|
||||
new_files = maker.make(specification)
|
||||
result.extend(Path(i) for i in new_files)
|
||||
return result
|
||||
|
||||
def _uninstall_previous_version(self):
|
||||
dist_name = self._dist_info.stem.split("-")[0]
|
||||
in_folders = chain.from_iterable([i.iterdir() for i in {self._creator.purelib, self._creator.platlib}])
|
||||
paths = (p for p in in_folders if p.stem.split("-")[0] == dist_name and p.suffix == ".dist-info" and p.is_dir())
|
||||
existing_dist = next(paths, None)
|
||||
if existing_dist is not None:
|
||||
self._uninstall_dist(existing_dist)
|
||||
|
||||
@staticmethod
|
||||
def _uninstall_dist(dist):
|
||||
dist_base = dist.parent
|
||||
logging.debug("uninstall existing distribution %s from %s", dist.stem, dist_base)
|
||||
|
||||
top_txt = dist / "top_level.txt" # add top level packages at folder level
|
||||
paths = {dist.parent / i.strip() for i in top_txt.read_text().splitlines()} if top_txt.exists() else set()
|
||||
paths.add(dist) # add the dist-info folder itself
|
||||
|
||||
base_dirs, record = paths.copy(), dist / "RECORD" # collect entries in record that we did not register yet
|
||||
for name in (i.split(",")[0] for i in record.read_text().splitlines()) if record.exists() else ():
|
||||
path = dist_base / name
|
||||
if not any(p in base_dirs for p in path.parents): # only add if not already added as a base dir
|
||||
paths.add(path)
|
||||
|
||||
for path in sorted(paths): # actually remove stuff in a stable order
|
||||
if path.exists():
|
||||
if path.is_dir() and not path.is_symlink():
|
||||
safe_delete(path)
|
||||
else:
|
||||
path.unlink()
|
||||
|
||||
def clear(self):
|
||||
if self._image_dir.exists():
|
||||
safe_delete(self._image_dir)
|
||||
|
||||
def has_image(self):
|
||||
return self._image_dir.exists() and next(self._image_dir.iterdir()) is not None
|
||||
|
||||
|
||||
class ScriptMakerCustom(ScriptMaker):
|
||||
def __init__(self, target_dir, version_info, executable, name):
|
||||
super(ScriptMakerCustom, self).__init__(None, str(target_dir))
|
||||
self.clobber = True # overwrite
|
||||
self.set_mode = True # ensure they are executable
|
||||
self.executable = enquote_executable(str(executable))
|
||||
self.version_info = version_info.major, version_info.minor
|
||||
self.variants = {"", "X", "X.Y"}
|
||||
self._name = name
|
||||
|
||||
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
||||
names.add("{}{}.{}".format(self._name, *self.version_info))
|
||||
super(ScriptMakerCustom, self)._write_script(names, shebang, script_bytes, filenames, ext)
|
||||
|
|
@ -0,0 +1,35 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
from virtualenv.util.path import Path, copy
|
||||
from virtualenv.util.six import ensure_text
|
||||
|
||||
from .base import PipInstall
|
||||
|
||||
|
||||
class CopyPipInstall(PipInstall):
|
||||
def _sync(self, src, dst):
|
||||
copy(src, dst)
|
||||
|
||||
def _generate_new_files(self):
|
||||
# create the pyc files
|
||||
new_files = super(CopyPipInstall, self)._generate_new_files()
|
||||
new_files.update(self._cache_files())
|
||||
return new_files
|
||||
|
||||
def _cache_files(self):
|
||||
version = self._creator.interpreter.version_info
|
||||
py_c_ext = ".{}-{}{}.pyc".format(self._creator.interpreter.implementation.lower(), version.major, version.minor)
|
||||
for root, dirs, files in os.walk(ensure_text(str(self._image_dir)), topdown=True):
|
||||
root_path = Path(root)
|
||||
for name in files:
|
||||
if name.endswith(".py"):
|
||||
yield root_path / "{}{}".format(name[:-3], py_c_ext)
|
||||
for name in dirs:
|
||||
yield root_path / name / "__pycache__"
|
||||
|
||||
def _fix_records(self, new_files):
|
||||
extra_record_data_str = self._records_text(new_files)
|
||||
with open(ensure_text(str(self._dist_info / "RECORD")), "ab") as file_handler:
|
||||
file_handler.write(extra_record_data_str.encode("utf-8"))
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
from stat import S_IREAD, S_IRGRP, S_IROTH
|
||||
|
||||
from virtualenv.util.path import safe_delete, set_tree
|
||||
from virtualenv.util.six import ensure_text
|
||||
from virtualenv.util.subprocess import Popen
|
||||
|
||||
from .base import PipInstall
|
||||
|
||||
|
||||
class SymlinkPipInstall(PipInstall):
|
||||
def _sync(self, src, dst):
|
||||
src_str = ensure_text(str(src))
|
||||
dest_str = ensure_text(str(dst))
|
||||
os.symlink(src_str, dest_str)
|
||||
|
||||
def _generate_new_files(self):
|
||||
# create the pyc files, as the build image will be R/O
|
||||
process = Popen(
|
||||
[ensure_text(str(self._creator.exe)), "-m", "compileall", ensure_text(str(self._image_dir))],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
)
|
||||
process.communicate()
|
||||
# the root pyc is shared, so we'll not symlink that - but still add the pyc files to the RECORD for close
|
||||
root_py_cache = self._image_dir / "__pycache__"
|
||||
new_files = set()
|
||||
if root_py_cache.exists():
|
||||
new_files.update(root_py_cache.iterdir())
|
||||
new_files.add(root_py_cache)
|
||||
safe_delete(root_py_cache)
|
||||
core_new_files = super(SymlinkPipInstall, self)._generate_new_files()
|
||||
# remove files that are within the image folder deeper than one level (as these will be not linked directly)
|
||||
for file in core_new_files:
|
||||
try:
|
||||
rel = file.relative_to(self._image_dir)
|
||||
if len(rel.parts) > 1:
|
||||
continue
|
||||
except ValueError:
|
||||
pass
|
||||
new_files.add(file)
|
||||
return new_files
|
||||
|
||||
def _fix_records(self, new_files):
|
||||
new_files.update(i for i in self._image_dir.iterdir())
|
||||
extra_record_data_str = self._records_text(sorted(new_files, key=str))
|
||||
with open(ensure_text(str(self._dist_info / "RECORD")), "wb") as file_handler:
|
||||
file_handler.write(extra_record_data_str.encode("utf-8"))
|
||||
|
||||
def build_image(self):
|
||||
super(SymlinkPipInstall, self).build_image()
|
||||
# protect the image by making it read only
|
||||
set_tree(self._image_dir, S_IREAD | S_IRGRP | S_IROTH)
|
||||
|
||||
def clear(self):
|
||||
if self._image_dir.exists():
|
||||
safe_delete(self._image_dir)
|
||||
super(SymlinkPipInstall, self).clear()
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
"""Bootstrap"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
from subprocess import CalledProcessError
|
||||
from threading import Lock, Thread
|
||||
|
||||
from virtualenv.info import fs_supports_symlink
|
||||
from virtualenv.seed.embed.base_embed import BaseEmbed
|
||||
from virtualenv.seed.wheels import get_wheel
|
||||
from virtualenv.util.path import Path
|
||||
|
||||
from .pip_install.copy import CopyPipInstall
|
||||
from .pip_install.symlink import SymlinkPipInstall
|
||||
|
||||
|
||||
class FromAppData(BaseEmbed):
|
||||
def __init__(self, options):
|
||||
super(FromAppData, self).__init__(options)
|
||||
self.symlinks = options.symlink_app_data
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, app_data):
|
||||
super(FromAppData, cls).add_parser_arguments(parser, interpreter, app_data)
|
||||
can_symlink = app_data.transient is False and fs_supports_symlink()
|
||||
parser.add_argument(
|
||||
"--symlink-app-data",
|
||||
dest="symlink_app_data",
|
||||
action="store_true" if can_symlink else "store_false",
|
||||
help="{} symlink the python packages from the app-data folder (requires seed pip>=19.3)".format(
|
||||
"" if can_symlink else "not supported - ",
|
||||
),
|
||||
default=False,
|
||||
)
|
||||
|
||||
def run(self, creator):
|
||||
if not self.enabled:
|
||||
return
|
||||
with self._get_seed_wheels(creator) as name_to_whl:
|
||||
pip_version = name_to_whl["pip"].version_tuple if "pip" in name_to_whl else None
|
||||
installer_class = self.installer_class(pip_version)
|
||||
exceptions = {}
|
||||
|
||||
def _install(name, wheel):
|
||||
try:
|
||||
logging.debug("install %s from wheel %s via %s", name, wheel, installer_class.__name__)
|
||||
key = Path(installer_class.__name__) / wheel.path.stem
|
||||
wheel_img = self.app_data.wheel_image(creator.interpreter.version_release_str, key)
|
||||
installer = installer_class(wheel.path, creator, wheel_img)
|
||||
parent = self.app_data.lock / wheel_img.parent
|
||||
with parent.non_reentrant_lock_for_key(wheel_img.name):
|
||||
if not installer.has_image():
|
||||
installer.build_image()
|
||||
installer.install(creator.interpreter.version_info)
|
||||
except Exception: # noqa
|
||||
exceptions[name] = sys.exc_info()
|
||||
|
||||
threads = list(Thread(target=_install, args=(n, w)) for n, w in name_to_whl.items())
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
if exceptions:
|
||||
messages = ["failed to build image {} because:".format(", ".join(exceptions.keys()))]
|
||||
for value in exceptions.values():
|
||||
exc_type, exc_value, exc_traceback = value
|
||||
messages.append("".join(traceback.format_exception(exc_type, exc_value, exc_traceback)))
|
||||
raise RuntimeError("\n".join(messages))
|
||||
|
||||
@contextmanager
|
||||
def _get_seed_wheels(self, creator):
|
||||
name_to_whl, lock, fail = {}, Lock(), {}
|
||||
|
||||
def _get(distribution, version):
|
||||
for_py_version = creator.interpreter.version_release_str
|
||||
failure, result = None, None
|
||||
# fallback to download in case the exact version is not available
|
||||
for download in [True] if self.download else [False, True]:
|
||||
failure = None
|
||||
try:
|
||||
result = get_wheel(
|
||||
distribution=distribution,
|
||||
version=version,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=self.extra_search_dir,
|
||||
download=download,
|
||||
app_data=self.app_data,
|
||||
do_periodic_update=self.periodic_update,
|
||||
env=self.env,
|
||||
)
|
||||
if result is not None:
|
||||
break
|
||||
except Exception as exception: # noqa
|
||||
logging.exception("fail")
|
||||
failure = exception
|
||||
if failure:
|
||||
if isinstance(failure, CalledProcessError):
|
||||
msg = "failed to download {}".format(distribution)
|
||||
if version is not None:
|
||||
msg += " version {}".format(version)
|
||||
msg += ", pip download exit code {}".format(failure.returncode)
|
||||
output = failure.output if sys.version_info < (3, 5) else (failure.output + failure.stderr)
|
||||
if output:
|
||||
msg += "\n"
|
||||
msg += output
|
||||
else:
|
||||
msg = repr(failure)
|
||||
logging.error(msg)
|
||||
with lock:
|
||||
fail[distribution] = version
|
||||
else:
|
||||
with lock:
|
||||
name_to_whl[distribution] = result
|
||||
|
||||
threads = list(
|
||||
Thread(target=_get, args=(distribution, version))
|
||||
for distribution, version in self.distribution_to_versions().items()
|
||||
)
|
||||
for thread in threads:
|
||||
thread.start()
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
if fail:
|
||||
raise RuntimeError("seed failed due to failing to download wheels {}".format(", ".join(fail.keys())))
|
||||
yield name_to_whl
|
||||
|
||||
def installer_class(self, pip_version_tuple):
|
||||
if self.symlinks and pip_version_tuple:
|
||||
# symlink support requires pip 19.3+
|
||||
if pip_version_tuple >= (19, 3):
|
||||
return SymlinkPipInstall
|
||||
return CopyPipInstall
|
||||
|
||||
def __unicode__(self):
|
||||
base = super(FromAppData, self).__unicode__()
|
||||
msg = ", via={}, app_data_dir={}".format("symlink" if self.symlinks else "copy", self.app_data)
|
||||
return base[:-1] + msg + base[-1]
|
||||
40
.venv/lib/python3.8/site-packages/virtualenv/seed/seeder.py
Normal file
40
.venv/lib/python3.8/site-packages/virtualenv/seed/seeder.py
Normal file
|
|
@ -0,0 +1,40 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from abc import ABCMeta, abstractmethod
|
||||
|
||||
from six import add_metaclass
|
||||
|
||||
|
||||
@add_metaclass(ABCMeta)
|
||||
class Seeder(object):
|
||||
"""A seeder will install some seed packages into a virtual environment."""
|
||||
|
||||
# noinspection PyUnusedLocal
|
||||
def __init__(self, options, enabled):
|
||||
"""
|
||||
|
||||
:param options: the parsed options as defined within :meth:`add_parser_arguments`
|
||||
:param enabled: a flag weather the seeder is enabled or not
|
||||
"""
|
||||
self.enabled = enabled
|
||||
self.env = options.env
|
||||
|
||||
@classmethod
|
||||
def add_parser_arguments(cls, parser, interpreter, app_data):
|
||||
"""
|
||||
Add CLI arguments for this seed mechanisms.
|
||||
|
||||
:param parser: the CLI parser
|
||||
:param app_data: the CLI parser
|
||||
:param interpreter: the interpreter this virtual environment is based of
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@abstractmethod
|
||||
def run(self, creator):
|
||||
"""Perform the seed operation.
|
||||
|
||||
:param creator: the creator (based of :class:`virtualenv.create.creator.Creator`) we used to create this \
|
||||
virtual environment
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from .acquire import get_wheel, pip_wheel_env_run
|
||||
from .util import Version, Wheel
|
||||
|
||||
__all__ = (
|
||||
"get_wheel",
|
||||
"pip_wheel_env_run",
|
||||
"Version",
|
||||
"Wheel",
|
||||
)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,128 @@
|
|||
"""Bootstrap"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from operator import eq, lt
|
||||
|
||||
from virtualenv.util.path import Path
|
||||
from virtualenv.util.six import ensure_str
|
||||
from virtualenv.util.subprocess import Popen, subprocess
|
||||
|
||||
from .bundle import from_bundle
|
||||
from .periodic_update import add_wheel_to_update_log
|
||||
from .util import Version, Wheel, discover_wheels
|
||||
|
||||
|
||||
def get_wheel(distribution, version, for_py_version, search_dirs, download, app_data, do_periodic_update, env):
|
||||
"""
|
||||
Get a wheel with the given distribution-version-for_py_version trio, by using the extra search dir + download
|
||||
"""
|
||||
# not all wheels are compatible with all python versions, so we need to py version qualify it
|
||||
wheel = None
|
||||
|
||||
if not download or version != Version.bundle:
|
||||
# 1. acquire from bundle
|
||||
wheel = from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env)
|
||||
|
||||
if download and wheel is None and version != Version.embed:
|
||||
# 2. download from the internet
|
||||
wheel = download_wheel(
|
||||
distribution=distribution,
|
||||
version_spec=Version.as_version_spec(version),
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=search_dirs,
|
||||
app_data=app_data,
|
||||
to_folder=app_data.house,
|
||||
env=env,
|
||||
)
|
||||
if wheel is not None and app_data.can_update:
|
||||
add_wheel_to_update_log(wheel, for_py_version, app_data)
|
||||
|
||||
return wheel
|
||||
|
||||
|
||||
def download_wheel(distribution, version_spec, for_py_version, search_dirs, app_data, to_folder, env):
|
||||
to_download = "{}{}".format(distribution, version_spec or "")
|
||||
logging.debug("download wheel %s %s to %s", to_download, for_py_version, to_folder)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"download",
|
||||
"--progress-bar",
|
||||
"off",
|
||||
"--disable-pip-version-check",
|
||||
"--only-binary=:all:",
|
||||
"--no-deps",
|
||||
"--python-version",
|
||||
for_py_version,
|
||||
"-d",
|
||||
str(to_folder),
|
||||
to_download,
|
||||
]
|
||||
# pip has no interface in python - must be a new sub-process
|
||||
env = pip_wheel_env_run(search_dirs, app_data, env)
|
||||
process = Popen(cmd, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
|
||||
out, err = process.communicate()
|
||||
if process.returncode != 0:
|
||||
kwargs = {"output": out}
|
||||
if sys.version_info < (3, 5):
|
||||
kwargs["output"] += err
|
||||
else:
|
||||
kwargs["stderr"] = err
|
||||
raise subprocess.CalledProcessError(process.returncode, cmd, **kwargs)
|
||||
result = _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out)
|
||||
logging.debug("downloaded wheel %s", result.name)
|
||||
return result
|
||||
|
||||
|
||||
def _find_downloaded_wheel(distribution, version_spec, for_py_version, to_folder, out):
|
||||
for line in out.splitlines():
|
||||
line = line.lstrip()
|
||||
for marker in ("Saved ", "File was already downloaded "):
|
||||
if line.startswith(marker):
|
||||
return Wheel(Path(line[len(marker) :]).absolute())
|
||||
# if for some reason the output does not match fallback to latest version with that spec
|
||||
return find_compatible_in_house(distribution, version_spec, for_py_version, to_folder)
|
||||
|
||||
|
||||
def find_compatible_in_house(distribution, version_spec, for_py_version, in_folder):
|
||||
wheels = discover_wheels(in_folder, distribution, None, for_py_version)
|
||||
start, end = 0, len(wheels)
|
||||
if version_spec is not None:
|
||||
if version_spec.startswith("<"):
|
||||
from_pos, op = 1, lt
|
||||
elif version_spec.startswith("=="):
|
||||
from_pos, op = 2, eq
|
||||
else:
|
||||
raise ValueError(version_spec)
|
||||
version = Wheel.as_version_tuple(version_spec[from_pos:])
|
||||
start = next((at for at, w in enumerate(wheels) if op(w.version_tuple, version)), len(wheels))
|
||||
|
||||
return None if start == end else wheels[start]
|
||||
|
||||
|
||||
def pip_wheel_env_run(search_dirs, app_data, env):
|
||||
for_py_version = "{}.{}".format(*sys.version_info[0:2])
|
||||
env = env.copy()
|
||||
env.update(
|
||||
{
|
||||
ensure_str(k): str(v) # python 2 requires these to be string only (non-unicode)
|
||||
for k, v in {"PIP_USE_WHEEL": "1", "PIP_USER": "0", "PIP_NO_INPUT": "1"}.items()
|
||||
},
|
||||
)
|
||||
wheel = get_wheel(
|
||||
distribution="pip",
|
||||
version=None,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=search_dirs,
|
||||
download=False,
|
||||
app_data=app_data,
|
||||
do_periodic_update=False,
|
||||
env=env,
|
||||
)
|
||||
if wheel is None:
|
||||
raise RuntimeError("could not find the embedded pip")
|
||||
env[str("PYTHONPATH")] = str(wheel.path)
|
||||
return env
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from ..wheels.embed import get_embed_wheel
|
||||
from .periodic_update import periodic_update
|
||||
from .util import Version, Wheel, discover_wheels
|
||||
|
||||
|
||||
def from_bundle(distribution, version, for_py_version, search_dirs, app_data, do_periodic_update, env):
|
||||
"""
|
||||
Load the bundled wheel to a cache directory.
|
||||
"""
|
||||
of_version = Version.of_version(version)
|
||||
wheel = load_embed_wheel(app_data, distribution, for_py_version, of_version)
|
||||
|
||||
if version != Version.embed:
|
||||
# 2. check if we have upgraded embed
|
||||
if app_data.can_update:
|
||||
wheel = periodic_update(
|
||||
distribution, of_version, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env
|
||||
)
|
||||
|
||||
# 3. acquire from extra search dir
|
||||
found_wheel = from_dir(distribution, of_version, for_py_version, search_dirs)
|
||||
if found_wheel is not None:
|
||||
if wheel is None:
|
||||
wheel = found_wheel
|
||||
elif found_wheel.version_tuple > wheel.version_tuple:
|
||||
wheel = found_wheel
|
||||
return wheel
|
||||
|
||||
|
||||
def load_embed_wheel(app_data, distribution, for_py_version, version):
|
||||
wheel = get_embed_wheel(distribution, for_py_version)
|
||||
if wheel is not None:
|
||||
version_match = version == wheel.version
|
||||
if version is None or version_match:
|
||||
with app_data.ensure_extracted(wheel.path, lambda: app_data.house) as wheel_path:
|
||||
wheel = Wheel(wheel_path)
|
||||
else: # if version does not match ignore
|
||||
wheel = None
|
||||
return wheel
|
||||
|
||||
|
||||
def from_dir(distribution, version, for_py_version, directories):
|
||||
"""
|
||||
Load a compatible wheel from a given folder.
|
||||
"""
|
||||
for folder in directories:
|
||||
for wheel in discover_wheels(folder, distribution, version, for_py_version):
|
||||
return wheel
|
||||
return None
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from virtualenv.seed.wheels.util import Wheel
|
||||
from virtualenv.util.path import Path
|
||||
|
||||
BUNDLE_FOLDER = Path(__file__).absolute().parent
|
||||
BUNDLE_SUPPORT = {
|
||||
"3.11": {
|
||||
"pip": "pip-22.0.4-py3-none-any.whl",
|
||||
"setuptools": "setuptools-61.0.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"3.10": {
|
||||
"pip": "pip-22.0.4-py3-none-any.whl",
|
||||
"setuptools": "setuptools-61.0.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"3.9": {
|
||||
"pip": "pip-22.0.4-py3-none-any.whl",
|
||||
"setuptools": "setuptools-61.0.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"3.8": {
|
||||
"pip": "pip-22.0.4-py3-none-any.whl",
|
||||
"setuptools": "setuptools-61.0.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"3.7": {
|
||||
"pip": "pip-22.0.4-py3-none-any.whl",
|
||||
"setuptools": "setuptools-61.0.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"3.6": {
|
||||
"pip": "pip-21.3.1-py3-none-any.whl",
|
||||
"setuptools": "setuptools-59.6.0-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"3.5": {
|
||||
"pip": "pip-20.3.4-py2.py3-none-any.whl",
|
||||
"setuptools": "setuptools-50.3.2-py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
"2.7": {
|
||||
"pip": "pip-20.3.4-py2.py3-none-any.whl",
|
||||
"setuptools": "setuptools-44.1.1-py2.py3-none-any.whl",
|
||||
"wheel": "wheel-0.37.1-py2.py3-none-any.whl",
|
||||
},
|
||||
}
|
||||
MAX = "3.11"
|
||||
|
||||
|
||||
def get_embed_wheel(distribution, for_py_version):
|
||||
path = BUNDLE_FOLDER / (BUNDLE_SUPPORT.get(for_py_version, {}) or BUNDLE_SUPPORT[MAX]).get(distribution)
|
||||
return Wheel.from_path(path)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"get_embed_wheel",
|
||||
"BUNDLE_SUPPORT",
|
||||
"MAX",
|
||||
"BUNDLE_FOLDER",
|
||||
)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,428 @@
|
|||
"""
|
||||
Periodically update bundled versions.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import ssl
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from itertools import groupby
|
||||
from shutil import copy2
|
||||
from textwrap import dedent
|
||||
from threading import Thread
|
||||
|
||||
from six.moves.urllib.error import URLError
|
||||
from six.moves.urllib.request import urlopen
|
||||
|
||||
from virtualenv.app_data import AppDataDiskFolder
|
||||
from virtualenv.info import PY2
|
||||
from virtualenv.util.path import Path
|
||||
from virtualenv.util.subprocess import CREATE_NO_WINDOW, Popen
|
||||
|
||||
from ..wheels.embed import BUNDLE_SUPPORT
|
||||
from ..wheels.util import Wheel
|
||||
|
||||
if PY2:
|
||||
# on Python 2 datetime.strptime throws the error below if the import did not trigger on main thread
|
||||
# Failed to import _strptime because the import lock is held by
|
||||
try:
|
||||
import _strptime # noqa
|
||||
except ImportError: # pragma: no cov
|
||||
pass # pragma: no cov
|
||||
|
||||
|
||||
GRACE_PERIOD_CI = timedelta(hours=1) # prevent version switch in the middle of a CI run
|
||||
GRACE_PERIOD_MINOR = timedelta(days=28)
|
||||
UPDATE_PERIOD = timedelta(days=14)
|
||||
UPDATE_ABORTED_DELAY = timedelta(hours=1)
|
||||
|
||||
|
||||
def periodic_update(distribution, of_version, for_py_version, wheel, search_dirs, app_data, do_periodic_update, env):
|
||||
if do_periodic_update:
|
||||
handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env)
|
||||
|
||||
now = datetime.now()
|
||||
|
||||
def _update_wheel(ver):
|
||||
updated_wheel = Wheel(app_data.house / ver.filename)
|
||||
logging.debug("using %supdated wheel %s", "periodically " if updated_wheel else "", updated_wheel)
|
||||
return updated_wheel
|
||||
|
||||
u_log = UpdateLog.from_app_data(app_data, distribution, for_py_version)
|
||||
if of_version is None:
|
||||
for _, group in groupby(u_log.versions, key=lambda v: v.wheel.version_tuple[0:2]):
|
||||
# use only latest patch version per minor, earlier assumed to be buggy
|
||||
all_patches = list(group)
|
||||
ignore_grace_period_minor = any(version for version in all_patches if version.use(now))
|
||||
for version in all_patches:
|
||||
if wheel is not None and Path(version.filename).name == wheel.name:
|
||||
return wheel
|
||||
if version.use(now, ignore_grace_period_minor):
|
||||
return _update_wheel(version)
|
||||
else:
|
||||
for version in u_log.versions:
|
||||
if version.wheel.version == of_version:
|
||||
return _update_wheel(version)
|
||||
|
||||
return wheel
|
||||
|
||||
|
||||
def handle_auto_update(distribution, for_py_version, wheel, search_dirs, app_data, env):
|
||||
embed_update_log = app_data.embed_update_log(distribution, for_py_version)
|
||||
u_log = UpdateLog.from_dict(embed_update_log.read())
|
||||
if u_log.needs_update:
|
||||
u_log.periodic = True
|
||||
u_log.started = datetime.now()
|
||||
embed_update_log.write(u_log.to_dict())
|
||||
trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, periodic=True, env=env)
|
||||
|
||||
|
||||
def add_wheel_to_update_log(wheel, for_py_version, app_data):
|
||||
embed_update_log = app_data.embed_update_log(wheel.distribution, for_py_version)
|
||||
logging.debug("adding %s information to %s", wheel.name, embed_update_log.file)
|
||||
u_log = UpdateLog.from_dict(embed_update_log.read())
|
||||
if any(version.filename == wheel.name for version in u_log.versions):
|
||||
logging.warning("%s already present in %s", wheel.name, embed_update_log.file)
|
||||
return
|
||||
# we don't need a release date for sources other than "periodic"
|
||||
version = NewVersion(wheel.name, datetime.now(), None, "download")
|
||||
u_log.versions.append(version) # always write at the end for proper updates
|
||||
embed_update_log.write(u_log.to_dict())
|
||||
|
||||
|
||||
DATETIME_FMT = "%Y-%m-%dT%H:%M:%S.%fZ"
|
||||
|
||||
|
||||
def dump_datetime(value):
|
||||
return None if value is None else value.strftime(DATETIME_FMT)
|
||||
|
||||
|
||||
def load_datetime(value):
|
||||
return None if value is None else datetime.strptime(value, DATETIME_FMT)
|
||||
|
||||
|
||||
class NewVersion(object):
|
||||
def __init__(self, filename, found_date, release_date, source):
|
||||
self.filename = filename
|
||||
self.found_date = found_date
|
||||
self.release_date = release_date
|
||||
self.source = source
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, dictionary):
|
||||
return cls(
|
||||
filename=dictionary["filename"],
|
||||
found_date=load_datetime(dictionary["found_date"]),
|
||||
release_date=load_datetime(dictionary["release_date"]),
|
||||
source=dictionary["source"],
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"filename": self.filename,
|
||||
"release_date": dump_datetime(self.release_date),
|
||||
"found_date": dump_datetime(self.found_date),
|
||||
"source": self.source,
|
||||
}
|
||||
|
||||
def use(self, now, ignore_grace_period_minor=False, ignore_grace_period_ci=False):
|
||||
if self.source == "manual":
|
||||
return True
|
||||
elif self.source == "periodic":
|
||||
if self.found_date < now - GRACE_PERIOD_CI or ignore_grace_period_ci:
|
||||
if not ignore_grace_period_minor:
|
||||
compare_from = self.release_date or self.found_date
|
||||
return now - compare_from >= GRACE_PERIOD_MINOR
|
||||
return True
|
||||
return False
|
||||
|
||||
def __repr__(self):
|
||||
return "{}(filename={}), found_date={}, release_date={}, source={})".format(
|
||||
self.__class__.__name__,
|
||||
self.filename,
|
||||
self.found_date,
|
||||
self.release_date,
|
||||
self.source,
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
return type(self) == type(other) and all(
|
||||
getattr(self, k) == getattr(other, k) for k in ["filename", "release_date", "found_date", "source"]
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
@property
|
||||
def wheel(self):
|
||||
return Wheel(Path(self.filename))
|
||||
|
||||
|
||||
class UpdateLog(object):
|
||||
def __init__(self, started, completed, versions, periodic):
|
||||
self.started = started
|
||||
self.completed = completed
|
||||
self.versions = versions
|
||||
self.periodic = periodic
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, dictionary):
|
||||
if dictionary is None:
|
||||
dictionary = {}
|
||||
return cls(
|
||||
load_datetime(dictionary.get("started")),
|
||||
load_datetime(dictionary.get("completed")),
|
||||
[NewVersion.from_dict(v) for v in dictionary.get("versions", [])],
|
||||
dictionary.get("periodic"),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_app_data(cls, app_data, distribution, for_py_version):
|
||||
raw_json = app_data.embed_update_log(distribution, for_py_version).read()
|
||||
return cls.from_dict(raw_json)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"started": dump_datetime(self.started),
|
||||
"completed": dump_datetime(self.completed),
|
||||
"periodic": self.periodic,
|
||||
"versions": [r.to_dict() for r in self.versions],
|
||||
}
|
||||
|
||||
@property
|
||||
def needs_update(self):
|
||||
now = datetime.now()
|
||||
if self.completed is None: # never completed
|
||||
return self._check_start(now)
|
||||
else:
|
||||
if now - self.completed <= UPDATE_PERIOD:
|
||||
return False
|
||||
return self._check_start(now)
|
||||
|
||||
def _check_start(self, now):
|
||||
return self.started is None or now - self.started > UPDATE_ABORTED_DELAY
|
||||
|
||||
|
||||
def trigger_update(distribution, for_py_version, wheel, search_dirs, app_data, env, periodic):
|
||||
wheel_path = None if wheel is None else str(wheel.path)
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-c",
|
||||
dedent(
|
||||
"""
|
||||
from virtualenv.report import setup_report, MAX_LEVEL
|
||||
from virtualenv.seed.wheels.periodic_update import do_update
|
||||
setup_report(MAX_LEVEL, show_pid=True)
|
||||
do_update({!r}, {!r}, {!r}, {!r}, {!r}, {!r})
|
||||
""",
|
||||
)
|
||||
.strip()
|
||||
.format(distribution, for_py_version, wheel_path, str(app_data), [str(p) for p in search_dirs], periodic),
|
||||
]
|
||||
debug = env.get(str("_VIRTUALENV_PERIODIC_UPDATE_INLINE")) == str("1")
|
||||
pipe = None if debug else subprocess.PIPE
|
||||
kwargs = {"stdout": pipe, "stderr": pipe}
|
||||
if not debug and sys.platform == "win32":
|
||||
kwargs["creationflags"] = CREATE_NO_WINDOW
|
||||
process = Popen(cmd, **kwargs)
|
||||
logging.info(
|
||||
"triggered periodic upgrade of %s%s (for python %s) via background process having PID %d",
|
||||
distribution,
|
||||
"" if wheel is None else "=={}".format(wheel.version),
|
||||
for_py_version,
|
||||
process.pid,
|
||||
)
|
||||
if debug:
|
||||
process.communicate() # on purpose not called to make it a background process
|
||||
|
||||
|
||||
def do_update(distribution, for_py_version, embed_filename, app_data, search_dirs, periodic):
|
||||
versions = None
|
||||
try:
|
||||
versions = _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs)
|
||||
finally:
|
||||
logging.debug("done %s %s with %s", distribution, for_py_version, versions)
|
||||
return versions
|
||||
|
||||
|
||||
def _run_do_update(app_data, distribution, embed_filename, for_py_version, periodic, search_dirs):
|
||||
from virtualenv.seed.wheels import acquire
|
||||
|
||||
wheel_filename = None if embed_filename is None else Path(embed_filename)
|
||||
embed_version = None if wheel_filename is None else Wheel(wheel_filename).version_tuple
|
||||
app_data = AppDataDiskFolder(app_data) if isinstance(app_data, str) else app_data
|
||||
search_dirs = [Path(p) if isinstance(p, str) else p for p in search_dirs]
|
||||
wheelhouse = app_data.house
|
||||
embed_update_log = app_data.embed_update_log(distribution, for_py_version)
|
||||
u_log = UpdateLog.from_dict(embed_update_log.read())
|
||||
now = datetime.now()
|
||||
|
||||
update_versions, other_versions = [], []
|
||||
for version in u_log.versions:
|
||||
if version.source in {"periodic", "manual"}:
|
||||
update_versions.append(version)
|
||||
else:
|
||||
other_versions.append(version)
|
||||
|
||||
if periodic:
|
||||
source = "periodic"
|
||||
else:
|
||||
source = "manual"
|
||||
# mark the most recent one as source "manual"
|
||||
if update_versions:
|
||||
update_versions[0].source = source
|
||||
|
||||
if wheel_filename is not None:
|
||||
dest = wheelhouse / wheel_filename.name
|
||||
if not dest.exists():
|
||||
copy2(str(wheel_filename), str(wheelhouse))
|
||||
last, last_version, versions, filenames = None, None, [], set()
|
||||
while last is None or not last.use(now, ignore_grace_period_ci=True):
|
||||
download_time = datetime.now()
|
||||
dest = acquire.download_wheel(
|
||||
distribution=distribution,
|
||||
version_spec=None if last_version is None else "<{}".format(last_version),
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=search_dirs,
|
||||
app_data=app_data,
|
||||
to_folder=wheelhouse,
|
||||
env=os.environ,
|
||||
)
|
||||
if dest is None or (update_versions and update_versions[0].filename == dest.name):
|
||||
break
|
||||
release_date = release_date_for_wheel_path(dest.path)
|
||||
last = NewVersion(filename=dest.path.name, release_date=release_date, found_date=download_time, source=source)
|
||||
logging.info("detected %s in %s", last, datetime.now() - download_time)
|
||||
versions.append(last)
|
||||
filenames.add(last.filename)
|
||||
last_wheel = last.wheel
|
||||
last_version = last_wheel.version
|
||||
if embed_version is not None:
|
||||
if embed_version >= last_wheel.version_tuple: # stop download if we reach the embed version
|
||||
break
|
||||
u_log.periodic = periodic
|
||||
if not u_log.periodic:
|
||||
u_log.started = now
|
||||
# update other_versions by removing version we just found
|
||||
other_versions = [version for version in other_versions if version.filename not in filenames]
|
||||
u_log.versions = versions + update_versions + other_versions
|
||||
u_log.completed = datetime.now()
|
||||
embed_update_log.write(u_log.to_dict())
|
||||
return versions
|
||||
|
||||
|
||||
def release_date_for_wheel_path(dest):
|
||||
wheel = Wheel(dest)
|
||||
# the most accurate is to ask PyPi - e.g. https://pypi.org/pypi/pip/json,
|
||||
# see https://warehouse.pypa.io/api-reference/json/ for more details
|
||||
content = _pypi_get_distribution_info_cached(wheel.distribution)
|
||||
if content is not None:
|
||||
try:
|
||||
upload_time = content["releases"][wheel.version][0]["upload_time"]
|
||||
return datetime.strptime(upload_time, "%Y-%m-%dT%H:%M:%S")
|
||||
except Exception as exception:
|
||||
logging.error("could not load release date %s because %r", content, exception)
|
||||
return None
|
||||
|
||||
|
||||
def _request_context():
|
||||
yield None
|
||||
# fallback to non verified HTTPS (the information we request is not sensitive, so fallback)
|
||||
yield ssl._create_unverified_context() # noqa
|
||||
|
||||
|
||||
_PYPI_CACHE = {}
|
||||
|
||||
|
||||
def _pypi_get_distribution_info_cached(distribution):
|
||||
if distribution not in _PYPI_CACHE:
|
||||
_PYPI_CACHE[distribution] = _pypi_get_distribution_info(distribution)
|
||||
return _PYPI_CACHE[distribution]
|
||||
|
||||
|
||||
def _pypi_get_distribution_info(distribution):
|
||||
content, url = None, "https://pypi.org/pypi/{}/json".format(distribution)
|
||||
try:
|
||||
for context in _request_context():
|
||||
try:
|
||||
with urlopen(url, context=context) as file_handler:
|
||||
content = json.load(file_handler)
|
||||
break
|
||||
except URLError as exception:
|
||||
logging.error("failed to access %s because %r", url, exception)
|
||||
except Exception as exception:
|
||||
logging.error("failed to access %s because %r", url, exception)
|
||||
return content
|
||||
|
||||
|
||||
def manual_upgrade(app_data, env):
|
||||
threads = []
|
||||
|
||||
for for_py_version, distribution_to_package in BUNDLE_SUPPORT.items():
|
||||
# load extra search dir for the given for_py
|
||||
for distribution in distribution_to_package.keys():
|
||||
thread = Thread(target=_run_manual_upgrade, args=(app_data, distribution, for_py_version, env))
|
||||
thread.start()
|
||||
threads.append(thread)
|
||||
|
||||
for thread in threads:
|
||||
thread.join()
|
||||
|
||||
|
||||
def _run_manual_upgrade(app_data, distribution, for_py_version, env):
|
||||
start = datetime.now()
|
||||
from .bundle import from_bundle
|
||||
|
||||
current = from_bundle(
|
||||
distribution=distribution,
|
||||
version=None,
|
||||
for_py_version=for_py_version,
|
||||
search_dirs=[],
|
||||
app_data=app_data,
|
||||
do_periodic_update=False,
|
||||
env=env,
|
||||
)
|
||||
logging.warning(
|
||||
"upgrade %s for python %s with current %s",
|
||||
distribution,
|
||||
for_py_version,
|
||||
"" if current is None else current.name,
|
||||
)
|
||||
versions = do_update(
|
||||
distribution=distribution,
|
||||
for_py_version=for_py_version,
|
||||
embed_filename=current.path,
|
||||
app_data=app_data,
|
||||
search_dirs=[],
|
||||
periodic=False,
|
||||
)
|
||||
msg = "upgraded %s for python %s in %s {}".format(
|
||||
"new entries found:\n%s" if versions else "no new versions found",
|
||||
)
|
||||
args = [
|
||||
distribution,
|
||||
for_py_version,
|
||||
datetime.now() - start,
|
||||
]
|
||||
if versions:
|
||||
args.append("\n".join("\t{}".format(v) for v in versions))
|
||||
logging.warning(msg, *args)
|
||||
|
||||
|
||||
__all__ = (
|
||||
"add_wheel_to_update_log",
|
||||
"periodic_update",
|
||||
"do_update",
|
||||
"manual_upgrade",
|
||||
"NewVersion",
|
||||
"UpdateLog",
|
||||
"load_datetime",
|
||||
"dump_datetime",
|
||||
"trigger_update",
|
||||
"release_date_for_wheel_path",
|
||||
)
|
||||
116
.venv/lib/python3.8/site-packages/virtualenv/seed/wheels/util.py
Normal file
116
.venv/lib/python3.8/site-packages/virtualenv/seed/wheels/util.py
Normal file
|
|
@ -0,0 +1,116 @@
|
|||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from operator import attrgetter
|
||||
from zipfile import ZipFile
|
||||
|
||||
from virtualenv.util.six import ensure_text
|
||||
|
||||
|
||||
class Wheel(object):
|
||||
def __init__(self, path):
|
||||
# https://www.python.org/dev/peps/pep-0427/#file-name-convention
|
||||
# The wheel filename is {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl
|
||||
self.path = path
|
||||
self._parts = path.stem.split("-")
|
||||
|
||||
@classmethod
|
||||
def from_path(cls, path):
|
||||
if path is not None and path.suffix == ".whl" and len(path.stem.split("-")) >= 5:
|
||||
return cls(path)
|
||||
return None
|
||||
|
||||
@property
|
||||
def distribution(self):
|
||||
return self._parts[0]
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self._parts[1]
|
||||
|
||||
@property
|
||||
def version_tuple(self):
|
||||
return self.as_version_tuple(self.version)
|
||||
|
||||
@staticmethod
|
||||
def as_version_tuple(version):
|
||||
result = []
|
||||
for part in version.split(".")[0:3]:
|
||||
try:
|
||||
result.append(int(part))
|
||||
except ValueError:
|
||||
break
|
||||
if not result:
|
||||
raise ValueError(version)
|
||||
return tuple(result)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.path.name
|
||||
|
||||
def support_py(self, py_version):
|
||||
name = "{}.dist-info/METADATA".format("-".join(self.path.stem.split("-")[0:2]))
|
||||
with ZipFile(ensure_text(str(self.path)), "r") as zip_file:
|
||||
metadata = zip_file.read(name).decode("utf-8")
|
||||
marker = "Requires-Python:"
|
||||
requires = next((i[len(marker) :] for i in metadata.splitlines() if i.startswith(marker)), None)
|
||||
if requires is None: # if it does not specify a python requires the assumption is compatible
|
||||
return True
|
||||
py_version_int = tuple(int(i) for i in py_version.split("."))
|
||||
for require in (i.strip() for i in requires.split(",")):
|
||||
# https://www.python.org/dev/peps/pep-0345/#version-specifiers
|
||||
for operator, check in [
|
||||
("!=", lambda v: py_version_int != v),
|
||||
("==", lambda v: py_version_int == v),
|
||||
("<=", lambda v: py_version_int <= v),
|
||||
(">=", lambda v: py_version_int >= v),
|
||||
("<", lambda v: py_version_int < v),
|
||||
(">", lambda v: py_version_int > v),
|
||||
]:
|
||||
if require.startswith(operator):
|
||||
ver_str = require[len(operator) :].strip()
|
||||
version = tuple((int(i) if i != "*" else None) for i in ver_str.split("."))[0:2]
|
||||
if not check(version):
|
||||
return False
|
||||
break
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return "{}({})".format(self.__class__.__name__, self.path)
|
||||
|
||||
def __str__(self):
|
||||
return str(self.path)
|
||||
|
||||
|
||||
def discover_wheels(from_folder, distribution, version, for_py_version):
|
||||
wheels = []
|
||||
for filename in from_folder.iterdir():
|
||||
wheel = Wheel.from_path(filename)
|
||||
if wheel and wheel.distribution == distribution:
|
||||
if version is None or wheel.version == version:
|
||||
if wheel.support_py(for_py_version):
|
||||
wheels.append(wheel)
|
||||
return sorted(wheels, key=attrgetter("version_tuple", "distribution"), reverse=True)
|
||||
|
||||
|
||||
class Version:
|
||||
#: the version bundled with virtualenv
|
||||
bundle = "bundle"
|
||||
embed = "embed"
|
||||
#: custom version handlers
|
||||
non_version = (
|
||||
bundle,
|
||||
embed,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def of_version(value):
|
||||
return None if value in Version.non_version else value
|
||||
|
||||
@staticmethod
|
||||
def as_pip_req(distribution, version):
|
||||
return "{}{}".format(distribution, Version.as_version_spec(version))
|
||||
|
||||
@staticmethod
|
||||
def as_version_spec(version):
|
||||
of_version = Version.of_version(version)
|
||||
return "" if of_version is None else "=={}".format(of_version)
|
||||
Loading…
Add table
Add a link
Reference in a new issue