req_install.py 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902
  1. # The following comment should be removed at some point in the future.
  2. # mypy: strict-optional=False
  3. from __future__ import absolute_import
  4. import logging
  5. import os
  6. import shutil
  7. import sys
  8. import uuid
  9. import zipfile
  10. from pip._vendor import pkg_resources, six
  11. from pip._vendor.packaging.requirements import Requirement
  12. from pip._vendor.packaging.utils import canonicalize_name
  13. from pip._vendor.packaging.version import Version
  14. from pip._vendor.packaging.version import parse as parse_version
  15. from pip._vendor.pep517.wrappers import Pep517HookCaller
  16. from pip._internal.build_env import NoOpBuildEnvironment
  17. from pip._internal.exceptions import InstallationError
  18. from pip._internal.locations import get_scheme
  19. from pip._internal.models.link import Link
  20. from pip._internal.operations.build.metadata import generate_metadata
  21. from pip._internal.operations.build.metadata_legacy import \
  22. generate_metadata as generate_metadata_legacy
  23. from pip._internal.operations.install.editable_legacy import \
  24. install_editable as install_editable_legacy
  25. from pip._internal.operations.install.legacy import LegacyInstallFailure
  26. from pip._internal.operations.install.legacy import install as install_legacy
  27. from pip._internal.operations.install.wheel import install_wheel
  28. from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
  29. from pip._internal.req.req_uninstall import UninstallPathSet
  30. from pip._internal.utils.deprecation import deprecated
  31. from pip._internal.utils.direct_url_helpers import direct_url_from_link
  32. from pip._internal.utils.hashes import Hashes
  33. from pip._internal.utils.logging import indent_log
  34. from pip._internal.utils.misc import (
  35. ask_path_exists,
  36. backup_dir,
  37. display_path,
  38. dist_in_site_packages,
  39. dist_in_usersite,
  40. get_distribution,
  41. get_installed_version,
  42. hide_url,
  43. redact_auth_from_url,
  44. )
  45. from pip._internal.utils.packaging import get_metadata
  46. from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
  47. from pip._internal.utils.typing import MYPY_CHECK_RUNNING
  48. from pip._internal.utils.virtualenv import running_under_virtualenv
  49. from pip._internal.vcs import vcs
  50. if MYPY_CHECK_RUNNING:
  51. from typing import (
  52. Any, Dict, Iterable, List, Optional, Sequence, Union,
  53. )
  54. from pip._internal.build_env import BuildEnvironment
  55. from pip._vendor.pkg_resources import Distribution
  56. from pip._vendor.packaging.specifiers import SpecifierSet
  57. from pip._vendor.packaging.markers import Marker
  58. logger = logging.getLogger(__name__)
  59. def _get_dist(metadata_directory):
  60. # type: (str) -> Distribution
  61. """Return a pkg_resources.Distribution for the provided
  62. metadata directory.
  63. """
  64. dist_dir = metadata_directory.rstrip(os.sep)
  65. # Build a PathMetadata object, from path to metadata. :wink:
  66. base_dir, dist_dir_name = os.path.split(dist_dir)
  67. metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
  68. # Determine the correct Distribution object type.
  69. if dist_dir.endswith(".egg-info"):
  70. dist_cls = pkg_resources.Distribution
  71. dist_name = os.path.splitext(dist_dir_name)[0]
  72. else:
  73. assert dist_dir.endswith(".dist-info")
  74. dist_cls = pkg_resources.DistInfoDistribution
  75. dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
  76. return dist_cls(
  77. base_dir,
  78. project_name=dist_name,
  79. metadata=metadata,
  80. )
  81. class InstallRequirement(object):
  82. """
  83. Represents something that may be installed later on, may have information
  84. about where to fetch the relevant requirement and also contains logic for
  85. installing the said requirement.
  86. """
  87. def __init__(
  88. self,
  89. req, # type: Optional[Requirement]
  90. comes_from, # type: Optional[Union[str, InstallRequirement]]
  91. editable=False, # type: bool
  92. link=None, # type: Optional[Link]
  93. markers=None, # type: Optional[Marker]
  94. use_pep517=None, # type: Optional[bool]
  95. isolated=False, # type: bool
  96. install_options=None, # type: Optional[List[str]]
  97. global_options=None, # type: Optional[List[str]]
  98. hash_options=None, # type: Optional[Dict[str, List[str]]]
  99. constraint=False, # type: bool
  100. extras=(), # type: Iterable[str]
  101. user_supplied=False, # type: bool
  102. ):
  103. # type: (...) -> None
  104. assert req is None or isinstance(req, Requirement), req
  105. self.req = req
  106. self.comes_from = comes_from
  107. self.constraint = constraint
  108. self.editable = editable
  109. # source_dir is the local directory where the linked requirement is
  110. # located, or unpacked. In case unpacking is needed, creating and
  111. # populating source_dir is done by the RequirementPreparer. Note this
  112. # is not necessarily the directory where pyproject.toml or setup.py is
  113. # located - that one is obtained via unpacked_source_directory.
  114. self.source_dir = None # type: Optional[str]
  115. if self.editable:
  116. assert link
  117. if link.is_file:
  118. self.source_dir = os.path.normpath(
  119. os.path.abspath(link.file_path)
  120. )
  121. if link is None and req and req.url:
  122. # PEP 508 URL requirement
  123. link = Link(req.url)
  124. self.link = self.original_link = link
  125. self.original_link_is_in_wheel_cache = False
  126. # Path to any downloaded or already-existing package.
  127. self.local_file_path = None # type: Optional[str]
  128. if self.link and self.link.is_file:
  129. self.local_file_path = self.link.file_path
  130. if extras:
  131. self.extras = extras
  132. elif req:
  133. self.extras = {
  134. pkg_resources.safe_extra(extra) for extra in req.extras
  135. }
  136. else:
  137. self.extras = set()
  138. if markers is None and req:
  139. markers = req.marker
  140. self.markers = markers
  141. # This holds the pkg_resources.Distribution object if this requirement
  142. # is already available:
  143. self.satisfied_by = None # type: Optional[Distribution]
  144. # Whether the installation process should try to uninstall an existing
  145. # distribution before installing this requirement.
  146. self.should_reinstall = False
  147. # Temporary build location
  148. self._temp_build_dir = None # type: Optional[TempDirectory]
  149. # Set to True after successful installation
  150. self.install_succeeded = None # type: Optional[bool]
  151. # Supplied options
  152. self.install_options = install_options if install_options else []
  153. self.global_options = global_options if global_options else []
  154. self.hash_options = hash_options if hash_options else {}
  155. # Set to True after successful preparation of this requirement
  156. self.prepared = False
  157. # User supplied requirement are explicitly requested for installation
  158. # by the user via CLI arguments or requirements files, as opposed to,
  159. # e.g. dependencies, extras or constraints.
  160. self.user_supplied = user_supplied
  161. # Set by the legacy resolver when the requirement has been downloaded
  162. # TODO: This introduces a strong coupling between the resolver and the
  163. # requirement (the coupling was previously between the resolver
  164. # and the requirement set). This should be refactored to allow
  165. # the requirement to decide for itself when it has been
  166. # successfully downloaded - but that is more tricky to get right,
  167. # se we are making the change in stages.
  168. self.successfully_downloaded = False
  169. self.isolated = isolated
  170. self.build_env = NoOpBuildEnvironment() # type: BuildEnvironment
  171. # For PEP 517, the directory where we request the project metadata
  172. # gets stored. We need this to pass to build_wheel, so the backend
  173. # can ensure that the wheel matches the metadata (see the PEP for
  174. # details).
  175. self.metadata_directory = None # type: Optional[str]
  176. # The static build requirements (from pyproject.toml)
  177. self.pyproject_requires = None # type: Optional[List[str]]
  178. # Build requirements that we will check are available
  179. self.requirements_to_check = [] # type: List[str]
  180. # The PEP 517 backend we should use to build the project
  181. self.pep517_backend = None # type: Optional[Pep517HookCaller]
  182. # Are we using PEP 517 for this requirement?
  183. # After pyproject.toml has been loaded, the only valid values are True
  184. # and False. Before loading, None is valid (meaning "use the default").
  185. # Setting an explicit value before loading pyproject.toml is supported,
  186. # but after loading this flag should be treated as read only.
  187. self.use_pep517 = use_pep517
  188. def __str__(self):
  189. # type: () -> str
  190. if self.req:
  191. s = str(self.req)
  192. if self.link:
  193. s += ' from {}'.format(redact_auth_from_url(self.link.url))
  194. elif self.link:
  195. s = redact_auth_from_url(self.link.url)
  196. else:
  197. s = '<InstallRequirement>'
  198. if self.satisfied_by is not None:
  199. s += ' in {}'.format(display_path(self.satisfied_by.location))
  200. if self.comes_from:
  201. if isinstance(self.comes_from, six.string_types):
  202. comes_from = self.comes_from # type: Optional[str]
  203. else:
  204. comes_from = self.comes_from.from_path()
  205. if comes_from:
  206. s += ' (from {})'.format(comes_from)
  207. return s
  208. def __repr__(self):
  209. # type: () -> str
  210. return '<{} object: {} editable={!r}>'.format(
  211. self.__class__.__name__, str(self), self.editable)
  212. def format_debug(self):
  213. # type: () -> str
  214. """An un-tested helper for getting state, for debugging.
  215. """
  216. attributes = vars(self)
  217. names = sorted(attributes)
  218. state = (
  219. "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)
  220. )
  221. return '<{name} object: {{{state}}}>'.format(
  222. name=self.__class__.__name__,
  223. state=", ".join(state),
  224. )
  225. # Things that are valid for all kinds of requirements?
  226. @property
  227. def name(self):
  228. # type: () -> Optional[str]
  229. if self.req is None:
  230. return None
  231. return six.ensure_str(pkg_resources.safe_name(self.req.name))
  232. @property
  233. def specifier(self):
  234. # type: () -> SpecifierSet
  235. return self.req.specifier
  236. @property
  237. def is_pinned(self):
  238. # type: () -> bool
  239. """Return whether I am pinned to an exact version.
  240. For example, some-package==1.2 is pinned; some-package>1.2 is not.
  241. """
  242. specifiers = self.specifier
  243. return (len(specifiers) == 1 and
  244. next(iter(specifiers)).operator in {'==', '==='})
  245. @property
  246. def installed_version(self):
  247. # type: () -> Optional[str]
  248. return get_installed_version(self.name)
  249. def match_markers(self, extras_requested=None):
  250. # type: (Optional[Iterable[str]]) -> bool
  251. if not extras_requested:
  252. # Provide an extra to safely evaluate the markers
  253. # without matching any extra
  254. extras_requested = ('',)
  255. if self.markers is not None:
  256. return any(
  257. self.markers.evaluate({'extra': extra})
  258. for extra in extras_requested)
  259. else:
  260. return True
  261. @property
  262. def has_hash_options(self):
  263. # type: () -> bool
  264. """Return whether any known-good hashes are specified as options.
  265. These activate --require-hashes mode; hashes specified as part of a
  266. URL do not.
  267. """
  268. return bool(self.hash_options)
  269. def hashes(self, trust_internet=True):
  270. # type: (bool) -> Hashes
  271. """Return a hash-comparer that considers my option- and URL-based
  272. hashes to be known-good.
  273. Hashes in URLs--ones embedded in the requirements file, not ones
  274. downloaded from an index server--are almost peers with ones from
  275. flags. They satisfy --require-hashes (whether it was implicitly or
  276. explicitly activated) but do not activate it. md5 and sha224 are not
  277. allowed in flags, which should nudge people toward good algos. We
  278. always OR all hashes together, even ones from URLs.
  279. :param trust_internet: Whether to trust URL-based (#md5=...) hashes
  280. downloaded from the internet, as by populate_link()
  281. """
  282. good_hashes = self.hash_options.copy()
  283. link = self.link if trust_internet else self.original_link
  284. if link and link.hash:
  285. good_hashes.setdefault(link.hash_name, []).append(link.hash)
  286. return Hashes(good_hashes)
  287. def from_path(self):
  288. # type: () -> Optional[str]
  289. """Format a nice indicator to show where this "comes from"
  290. """
  291. if self.req is None:
  292. return None
  293. s = str(self.req)
  294. if self.comes_from:
  295. if isinstance(self.comes_from, six.string_types):
  296. comes_from = self.comes_from
  297. else:
  298. comes_from = self.comes_from.from_path()
  299. if comes_from:
  300. s += '->' + comes_from
  301. return s
  302. def ensure_build_location(self, build_dir, autodelete, parallel_builds):
  303. # type: (str, bool, bool) -> str
  304. assert build_dir is not None
  305. if self._temp_build_dir is not None:
  306. assert self._temp_build_dir.path
  307. return self._temp_build_dir.path
  308. if self.req is None:
  309. # Some systems have /tmp as a symlink which confuses custom
  310. # builds (such as numpy). Thus, we ensure that the real path
  311. # is returned.
  312. self._temp_build_dir = TempDirectory(
  313. kind=tempdir_kinds.REQ_BUILD, globally_managed=True
  314. )
  315. return self._temp_build_dir.path
  316. # When parallel builds are enabled, add a UUID to the build directory
  317. # name so multiple builds do not interfere with each other.
  318. dir_name = canonicalize_name(self.name)
  319. if parallel_builds:
  320. dir_name = "{}_{}".format(dir_name, uuid.uuid4().hex)
  321. # FIXME: Is there a better place to create the build_dir? (hg and bzr
  322. # need this)
  323. if not os.path.exists(build_dir):
  324. logger.debug('Creating directory %s', build_dir)
  325. os.makedirs(build_dir)
  326. actual_build_dir = os.path.join(build_dir, dir_name)
  327. # `None` indicates that we respect the globally-configured deletion
  328. # settings, which is what we actually want when auto-deleting.
  329. delete_arg = None if autodelete else False
  330. return TempDirectory(
  331. path=actual_build_dir,
  332. delete=delete_arg,
  333. kind=tempdir_kinds.REQ_BUILD,
  334. globally_managed=True,
  335. ).path
  336. def _set_requirement(self):
  337. # type: () -> None
  338. """Set requirement after generating metadata.
  339. """
  340. assert self.req is None
  341. assert self.metadata is not None
  342. assert self.source_dir is not None
  343. # Construct a Requirement object from the generated metadata
  344. if isinstance(parse_version(self.metadata["Version"]), Version):
  345. op = "=="
  346. else:
  347. op = "==="
  348. self.req = Requirement(
  349. "".join([
  350. self.metadata["Name"],
  351. op,
  352. self.metadata["Version"],
  353. ])
  354. )
  355. def warn_on_mismatching_name(self):
  356. # type: () -> None
  357. metadata_name = canonicalize_name(self.metadata["Name"])
  358. if canonicalize_name(self.req.name) == metadata_name:
  359. # Everything is fine.
  360. return
  361. # If we're here, there's a mismatch. Log a warning about it.
  362. logger.warning(
  363. 'Generating metadata for package %s '
  364. 'produced metadata for project name %s. Fix your '
  365. '#egg=%s fragments.',
  366. self.name, metadata_name, self.name
  367. )
  368. self.req = Requirement(metadata_name)
  369. def check_if_exists(self, use_user_site):
  370. # type: (bool) -> None
  371. """Find an installed distribution that satisfies or conflicts
  372. with this requirement, and set self.satisfied_by or
  373. self.should_reinstall appropriately.
  374. """
  375. if self.req is None:
  376. return
  377. # get_distribution() will resolve the entire list of requirements
  378. # anyway, and we've already determined that we need the requirement
  379. # in question, so strip the marker so that we don't try to
  380. # evaluate it.
  381. no_marker = Requirement(str(self.req))
  382. no_marker.marker = None
  383. # pkg_resources uses the canonical name to look up packages, but
  384. # the name passed passed to get_distribution is not canonicalized
  385. # so we have to explicitly convert it to a canonical name
  386. no_marker.name = canonicalize_name(no_marker.name)
  387. try:
  388. self.satisfied_by = pkg_resources.get_distribution(str(no_marker))
  389. except pkg_resources.DistributionNotFound:
  390. return
  391. except pkg_resources.VersionConflict:
  392. existing_dist = get_distribution(
  393. self.req.name
  394. )
  395. if use_user_site:
  396. if dist_in_usersite(existing_dist):
  397. self.should_reinstall = True
  398. elif (running_under_virtualenv() and
  399. dist_in_site_packages(existing_dist)):
  400. raise InstallationError(
  401. "Will not install to the user site because it will "
  402. "lack sys.path precedence to {} in {}".format(
  403. existing_dist.project_name, existing_dist.location)
  404. )
  405. else:
  406. self.should_reinstall = True
  407. else:
  408. if self.editable and self.satisfied_by:
  409. self.should_reinstall = True
  410. # when installing editables, nothing pre-existing should ever
  411. # satisfy
  412. self.satisfied_by = None
  413. # Things valid for wheels
  414. @property
  415. def is_wheel(self):
  416. # type: () -> bool
  417. if not self.link:
  418. return False
  419. return self.link.is_wheel
  420. # Things valid for sdists
  421. @property
  422. def unpacked_source_directory(self):
  423. # type: () -> str
  424. return os.path.join(
  425. self.source_dir,
  426. self.link and self.link.subdirectory_fragment or '')
  427. @property
  428. def setup_py_path(self):
  429. # type: () -> str
  430. assert self.source_dir, "No source dir for {}".format(self)
  431. setup_py = os.path.join(self.unpacked_source_directory, 'setup.py')
  432. # Python2 __file__ should not be unicode
  433. if six.PY2 and isinstance(setup_py, six.text_type):
  434. setup_py = setup_py.encode(sys.getfilesystemencoding())
  435. return setup_py
  436. @property
  437. def pyproject_toml_path(self):
  438. # type: () -> str
  439. assert self.source_dir, "No source dir for {}".format(self)
  440. return make_pyproject_path(self.unpacked_source_directory)
  441. def load_pyproject_toml(self):
  442. # type: () -> None
  443. """Load the pyproject.toml file.
  444. After calling this routine, all of the attributes related to PEP 517
  445. processing for this requirement have been set. In particular, the
  446. use_pep517 attribute can be used to determine whether we should
  447. follow the PEP 517 or legacy (setup.py) code path.
  448. """
  449. pyproject_toml_data = load_pyproject_toml(
  450. self.use_pep517,
  451. self.pyproject_toml_path,
  452. self.setup_py_path,
  453. str(self)
  454. )
  455. if pyproject_toml_data is None:
  456. self.use_pep517 = False
  457. return
  458. self.use_pep517 = True
  459. requires, backend, check, backend_path = pyproject_toml_data
  460. self.requirements_to_check = check
  461. self.pyproject_requires = requires
  462. self.pep517_backend = Pep517HookCaller(
  463. self.unpacked_source_directory, backend, backend_path=backend_path,
  464. )
  465. def _generate_metadata(self):
  466. # type: () -> str
  467. """Invokes metadata generator functions, with the required arguments.
  468. """
  469. if not self.use_pep517:
  470. assert self.unpacked_source_directory
  471. return generate_metadata_legacy(
  472. build_env=self.build_env,
  473. setup_py_path=self.setup_py_path,
  474. source_dir=self.unpacked_source_directory,
  475. isolated=self.isolated,
  476. details=self.name or "from {}".format(self.link)
  477. )
  478. assert self.pep517_backend is not None
  479. return generate_metadata(
  480. build_env=self.build_env,
  481. backend=self.pep517_backend,
  482. )
  483. def prepare_metadata(self):
  484. # type: () -> None
  485. """Ensure that project metadata is available.
  486. Under PEP 517, call the backend hook to prepare the metadata.
  487. Under legacy processing, call setup.py egg-info.
  488. """
  489. assert self.source_dir
  490. with indent_log():
  491. self.metadata_directory = self._generate_metadata()
  492. # Act on the newly generated metadata, based on the name and version.
  493. if not self.name:
  494. self._set_requirement()
  495. else:
  496. self.warn_on_mismatching_name()
  497. self.assert_source_matches_version()
  498. @property
  499. def metadata(self):
  500. # type: () -> Any
  501. if not hasattr(self, '_metadata'):
  502. self._metadata = get_metadata(self.get_dist())
  503. return self._metadata
  504. def get_dist(self):
  505. # type: () -> Distribution
  506. return _get_dist(self.metadata_directory)
  507. def assert_source_matches_version(self):
  508. # type: () -> None
  509. assert self.source_dir
  510. version = self.metadata['version']
  511. if self.req.specifier and version not in self.req.specifier:
  512. logger.warning(
  513. 'Requested %s, but installing version %s',
  514. self,
  515. version,
  516. )
  517. else:
  518. logger.debug(
  519. 'Source in %s has version %s, which satisfies requirement %s',
  520. display_path(self.source_dir),
  521. version,
  522. self,
  523. )
  524. # For both source distributions and editables
  525. def ensure_has_source_dir(
  526. self,
  527. parent_dir,
  528. autodelete=False,
  529. parallel_builds=False,
  530. ):
  531. # type: (str, bool, bool) -> None
  532. """Ensure that a source_dir is set.
  533. This will create a temporary build dir if the name of the requirement
  534. isn't known yet.
  535. :param parent_dir: The ideal pip parent_dir for the source_dir.
  536. Generally src_dir for editables and build_dir for sdists.
  537. :return: self.source_dir
  538. """
  539. if self.source_dir is None:
  540. self.source_dir = self.ensure_build_location(
  541. parent_dir,
  542. autodelete=autodelete,
  543. parallel_builds=parallel_builds,
  544. )
  545. # For editable installations
  546. def update_editable(self, obtain=True):
  547. # type: (bool) -> None
  548. if not self.link:
  549. logger.debug(
  550. "Cannot update repository at %s; repository location is "
  551. "unknown",
  552. self.source_dir,
  553. )
  554. return
  555. assert self.editable
  556. assert self.source_dir
  557. if self.link.scheme == 'file':
  558. # Static paths don't get updated
  559. return
  560. assert '+' in self.link.url, \
  561. "bad url: {self.link.url!r}".format(**locals())
  562. vc_type, url = self.link.url.split('+', 1)
  563. vcs_backend = vcs.get_backend(vc_type)
  564. if vcs_backend:
  565. if not self.link.is_vcs:
  566. reason = (
  567. "This form of VCS requirement is being deprecated: {}."
  568. ).format(
  569. self.link.url
  570. )
  571. replacement = None
  572. if self.link.url.startswith("git+git@"):
  573. replacement = (
  574. "git+https://git@example.com/..., "
  575. "git+ssh://git@example.com/..., "
  576. "or the insecure git+git://git@example.com/..."
  577. )
  578. deprecated(reason, replacement, gone_in="21.0", issue=7554)
  579. hidden_url = hide_url(self.link.url)
  580. if obtain:
  581. vcs_backend.obtain(self.source_dir, url=hidden_url)
  582. else:
  583. vcs_backend.export(self.source_dir, url=hidden_url)
  584. else:
  585. assert 0, (
  586. 'Unexpected version control type (in {}): {}'.format(
  587. self.link, vc_type))
  588. # Top-level Actions
  589. def uninstall(self, auto_confirm=False, verbose=False):
  590. # type: (bool, bool) -> Optional[UninstallPathSet]
  591. """
  592. Uninstall the distribution currently satisfying this requirement.
  593. Prompts before removing or modifying files unless
  594. ``auto_confirm`` is True.
  595. Refuses to delete or modify files outside of ``sys.prefix`` -
  596. thus uninstallation within a virtual environment can only
  597. modify that virtual environment, even if the virtualenv is
  598. linked to global site-packages.
  599. """
  600. assert self.req
  601. dist = get_distribution(self.req.name)
  602. if not dist:
  603. logger.warning("Skipping %s as it is not installed.", self.name)
  604. return None
  605. logger.info('Found existing installation: %s', dist)
  606. uninstalled_pathset = UninstallPathSet.from_dist(dist)
  607. uninstalled_pathset.remove(auto_confirm, verbose)
  608. return uninstalled_pathset
  609. def _get_archive_name(self, path, parentdir, rootdir):
  610. # type: (str, str, str) -> str
  611. def _clean_zip_name(name, prefix):
  612. # type: (str, str) -> str
  613. assert name.startswith(prefix + os.path.sep), (
  614. "name {name!r} doesn't start with prefix {prefix!r}"
  615. .format(**locals())
  616. )
  617. name = name[len(prefix) + 1:]
  618. name = name.replace(os.path.sep, '/')
  619. return name
  620. path = os.path.join(parentdir, path)
  621. name = _clean_zip_name(path, rootdir)
  622. return self.name + '/' + name
  623. def archive(self, build_dir):
  624. # type: (str) -> None
  625. """Saves archive to provided build_dir.
  626. Used for saving downloaded VCS requirements as part of `pip download`.
  627. """
  628. assert self.source_dir
  629. create_archive = True
  630. archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"])
  631. archive_path = os.path.join(build_dir, archive_name)
  632. if os.path.exists(archive_path):
  633. response = ask_path_exists(
  634. 'The file {} exists. (i)gnore, (w)ipe, '
  635. '(b)ackup, (a)bort '.format(
  636. display_path(archive_path)),
  637. ('i', 'w', 'b', 'a'))
  638. if response == 'i':
  639. create_archive = False
  640. elif response == 'w':
  641. logger.warning('Deleting %s', display_path(archive_path))
  642. os.remove(archive_path)
  643. elif response == 'b':
  644. dest_file = backup_dir(archive_path)
  645. logger.warning(
  646. 'Backing up %s to %s',
  647. display_path(archive_path),
  648. display_path(dest_file),
  649. )
  650. shutil.move(archive_path, dest_file)
  651. elif response == 'a':
  652. sys.exit(-1)
  653. if not create_archive:
  654. return
  655. zip_output = zipfile.ZipFile(
  656. archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True,
  657. )
  658. with zip_output:
  659. dir = os.path.normcase(
  660. os.path.abspath(self.unpacked_source_directory)
  661. )
  662. for dirpath, dirnames, filenames in os.walk(dir):
  663. for dirname in dirnames:
  664. dir_arcname = self._get_archive_name(
  665. dirname, parentdir=dirpath, rootdir=dir,
  666. )
  667. zipdir = zipfile.ZipInfo(dir_arcname + '/')
  668. zipdir.external_attr = 0x1ED << 16 # 0o755
  669. zip_output.writestr(zipdir, '')
  670. for filename in filenames:
  671. file_arcname = self._get_archive_name(
  672. filename, parentdir=dirpath, rootdir=dir,
  673. )
  674. filename = os.path.join(dirpath, filename)
  675. zip_output.write(filename, file_arcname)
  676. logger.info('Saved %s', display_path(archive_path))
  677. def install(
  678. self,
  679. install_options, # type: List[str]
  680. global_options=None, # type: Optional[Sequence[str]]
  681. root=None, # type: Optional[str]
  682. home=None, # type: Optional[str]
  683. prefix=None, # type: Optional[str]
  684. warn_script_location=True, # type: bool
  685. use_user_site=False, # type: bool
  686. pycompile=True # type: bool
  687. ):
  688. # type: (...) -> None
  689. scheme = get_scheme(
  690. self.name,
  691. user=use_user_site,
  692. home=home,
  693. root=root,
  694. isolated=self.isolated,
  695. prefix=prefix,
  696. )
  697. global_options = global_options if global_options is not None else []
  698. if self.editable:
  699. install_editable_legacy(
  700. install_options,
  701. global_options,
  702. prefix=prefix,
  703. home=home,
  704. use_user_site=use_user_site,
  705. name=self.name,
  706. setup_py_path=self.setup_py_path,
  707. isolated=self.isolated,
  708. build_env=self.build_env,
  709. unpacked_source_directory=self.unpacked_source_directory,
  710. )
  711. self.install_succeeded = True
  712. return
  713. if self.is_wheel:
  714. assert self.local_file_path
  715. direct_url = None
  716. if self.original_link:
  717. direct_url = direct_url_from_link(
  718. self.original_link,
  719. self.source_dir,
  720. self.original_link_is_in_wheel_cache,
  721. )
  722. install_wheel(
  723. self.name,
  724. self.local_file_path,
  725. scheme=scheme,
  726. req_description=str(self.req),
  727. pycompile=pycompile,
  728. warn_script_location=warn_script_location,
  729. direct_url=direct_url,
  730. requested=self.user_supplied,
  731. )
  732. self.install_succeeded = True
  733. return
  734. # TODO: Why don't we do this for editable installs?
  735. # Extend the list of global and install options passed on to
  736. # the setup.py call with the ones from the requirements file.
  737. # Options specified in requirements file override those
  738. # specified on the command line, since the last option given
  739. # to setup.py is the one that is used.
  740. global_options = list(global_options) + self.global_options
  741. install_options = list(install_options) + self.install_options
  742. try:
  743. success = install_legacy(
  744. install_options=install_options,
  745. global_options=global_options,
  746. root=root,
  747. home=home,
  748. prefix=prefix,
  749. use_user_site=use_user_site,
  750. pycompile=pycompile,
  751. scheme=scheme,
  752. setup_py_path=self.setup_py_path,
  753. isolated=self.isolated,
  754. req_name=self.name,
  755. build_env=self.build_env,
  756. unpacked_source_directory=self.unpacked_source_directory,
  757. req_description=str(self.req),
  758. )
  759. except LegacyInstallFailure as exc:
  760. self.install_succeeded = False
  761. six.reraise(*exc.parent)
  762. except Exception:
  763. self.install_succeeded = True
  764. raise
  765. self.install_succeeded = success
  766. def check_invalid_constraint_type(req):
  767. # type: (InstallRequirement) -> str
  768. # Check for unsupported forms
  769. problem = ""
  770. if not req.name:
  771. problem = "Unnamed requirements are not allowed as constraints"
  772. elif req.link:
  773. problem = "Links are not allowed as constraints"
  774. elif req.extras:
  775. problem = "Constraints cannot have extras"
  776. if problem:
  777. deprecated(
  778. reason=(
  779. "Constraints are only allowed to take the form of a package "
  780. "name and a version specifier. Other forms were originally "
  781. "permitted as an accident of the implementation, but were "
  782. "undocumented. The new implementation of the resolver no "
  783. "longer supports these forms."
  784. ),
  785. replacement=(
  786. "replacing the constraint with a requirement."
  787. ),
  788. # No plan yet for when the new resolver becomes default
  789. gone_in=None,
  790. issue=8210
  791. )
  792. return problem