@@ -376,109 +376,104 @@ def _download_should_save(self):
376
376
"Could not find or access download directory '{}'"
377
377
.format (self .download_dir ))
378
378
379
- def prepare_linked_requirement (
380
- self ,
381
- req , # type: InstallRequirement
382
- parallel_builds = False , # type: bool
383
- ):
384
- # type: (...) -> AbstractDistribution
385
- """Prepare a requirement that would be obtained from req.link
386
- """
387
- assert req .link
388
- link = req .link
389
-
390
- # TODO: Breakup into smaller functions
391
- if link .scheme == 'file' :
392
- path = link .file_path
379
+ def _log_preparing_link (self , req ):
380
+ # type: (InstallRequirement) -> None
381
+ """Log the way the link prepared."""
382
+ if req .link .is_file :
383
+ path = req .link .file_path
393
384
logger .info ('Processing %s' , display_path (path ))
394
385
else :
395
386
logger .info ('Collecting %s' , req .req or req )
396
387
397
- download_dir = self .download_dir
398
- if link .is_wheel and self .wheel_download_dir :
399
- # when doing 'pip wheel` we download wheels to a
400
- # dedicated dir.
401
- download_dir = self .wheel_download_dir
402
-
403
- if link .is_wheel :
388
+ def _ensure_link_req_src_dir (self , req , download_dir , parallel_builds ):
389
+ # type: (InstallRequirement, Optional[str], bool) -> None
390
+ """Ensure source_dir of a linked InstallRequirement."""
391
+ # Since source_dir is only set for editable requirements.
392
+ if req .link .is_wheel :
404
393
if download_dir :
405
394
# When downloading, we only unpack wheels to get
406
395
# metadata.
407
396
autodelete_unpacked = True
408
397
else :
409
- # When installing a wheel, we use the unpacked
410
- # wheel.
398
+ # When installing a wheel, we use the unpacked wheel.
411
399
autodelete_unpacked = False
412
400
else :
413
401
# We always delete unpacked sdists after pip runs.
414
402
autodelete_unpacked = True
403
+ assert req .source_dir is None
404
+ req .ensure_has_source_dir (
405
+ self .build_dir ,
406
+ autodelete = autodelete_unpacked ,
407
+ parallel_builds = parallel_builds ,
408
+ )
415
409
416
- with indent_log ():
417
- # Since source_dir is only set for editable requirements.
418
- assert req .source_dir is None
419
- req .ensure_has_source_dir (
420
- self .build_dir ,
421
- autodelete = autodelete_unpacked ,
422
- parallel_builds = parallel_builds ,
410
+ # If a checkout exists, it's unwise to keep going. version
411
+ # inconsistencies are logged later, but do not fail the
412
+ # installation.
413
+ # FIXME: this won't upgrade when there's an existing
414
+ # package unpacked in `req.source_dir`
415
+ if os .path .exists (os .path .join (req .source_dir , 'setup.py' )):
416
+ raise PreviousBuildDirError (
417
+ "pip can't proceed with requirements '{}' due to a"
418
+ "pre-existing build directory ({}). This is likely "
419
+ "due to a previous installation that failed . pip is "
420
+ "being responsible and not assuming it can delete this. "
421
+ "Please delete it and try again." .format (req , req .source_dir )
423
422
)
424
- # If a checkout exists, it's unwise to keep going. version
425
- # inconsistencies are logged later, but do not fail the
426
- # installation.
427
- # FIXME: this won't upgrade when there's an existing
428
- # package unpacked in `req.source_dir`
429
- if os .path .exists (os .path .join (req .source_dir , 'setup.py' )):
430
- raise PreviousBuildDirError (
431
- "pip can't proceed with requirements '{}' due to a"
432
- " pre-existing build directory ({}). This is "
433
- "likely due to a previous installation that failed"
434
- ". pip is being responsible and not assuming it "
435
- "can delete this. Please delete it and try again."
436
- .format (req , req .source_dir )
437
- )
438
423
439
- # Now that we have the real link, we can tell what kind of
440
- # requirements we have and raise some more informative errors
441
- # than otherwise. (For example, we can raise VcsHashUnsupported
442
- # for a VCS URL rather than HashMissing.)
443
- if self .require_hashes :
444
- # We could check these first 2 conditions inside
445
- # unpack_url and save repetition of conditions, but then
446
- # we would report less-useful error messages for
447
- # unhashable requirements, complaining that there's no
448
- # hash provided.
449
- if link .is_vcs :
450
- raise VcsHashUnsupported ()
451
- elif link .is_existing_dir ():
452
- raise DirectoryUrlHashUnsupported ()
453
- if not req .original_link and not req .is_pinned :
454
- # Unpinned packages are asking for trouble when a new
455
- # version is uploaded. This isn't a security check, but
456
- # it saves users a surprising hash mismatch in the
457
- # future.
458
- #
459
- # file:/// URLs aren't pinnable, so don't complain
460
- # about them not being pinned.
461
- raise HashUnpinned ()
462
-
463
- hashes = req .hashes (trust_internet = not self .require_hashes )
464
- if self .require_hashes and not hashes :
465
- # Known-good hashes are missing for this requirement, so
466
- # shim it with a facade object that will provoke hash
467
- # computation and then raise a HashMissing exception
468
- # showing the user what the hash should be.
469
- hashes = MissingHashes ()
424
+ def _get_linked_req_hashes (self , req ):
425
+ # type: (InstallRequirement) -> Hashes
426
+ # By the time this is called, the requirement's link should have
427
+ # been checked so we can tell what kind of requirements req is
428
+ # and raise some more informative errors than otherwise.
429
+ # (For example, we can raise VcsHashUnsupported for a VCS URL
430
+ # rather than HashMissing.)
431
+ if not self .require_hashes :
432
+ return req .hashes (trust_internet = True )
433
+
434
+ # We could check these first 2 conditions inside unpack_url
435
+ # and save repetition of conditions, but then we would
436
+ # report less-useful error messages for unhashable
437
+ # requirements, complaining that there's no hash provided.
438
+ if req .link .is_vcs :
439
+ raise VcsHashUnsupported ()
440
+ if req .link .is_existing_dir ():
441
+ raise DirectoryUrlHashUnsupported ()
442
+
443
+ # Unpinned packages are asking for trouble when a new version
444
+ # is uploaded. This isn't a security check, but it saves users
445
+ # a surprising hash mismatch in the future.
446
+ # file:/// URLs aren't pinnable, so don't complain about them
447
+ # not being pinned.
448
+ if req .original_link is None and not req .is_pinned :
449
+ raise HashUnpinned ()
450
+
451
+ # If known-good hashes are missing for this requirement,
452
+ # shim it with a facade object that will provoke hash
453
+ # computation and then raise a HashMissing exception
454
+ # showing the user what the hash should be.
455
+ return req .hashes (trust_internet = False ) or MissingHashes ()
456
+
457
+ def prepare_linked_requirement (self , req , parallel_builds = False ):
458
+ # type: (InstallRequirement, bool) -> AbstractDistribution
459
+ """Prepare a requirement to be obtained from req.link."""
460
+ assert req .link
461
+ link = req .link
462
+ self ._log_preparing_link (req )
463
+ if link .is_wheel and self .wheel_download_dir :
464
+ # Download wheels to a dedicated dir when doing `pip wheel`.
465
+ download_dir = self .wheel_download_dir
466
+ else :
467
+ download_dir = self .download_dir
470
468
469
+ with indent_log ():
470
+ self ._ensure_link_req_src_dir (req , download_dir , parallel_builds )
471
471
try :
472
472
local_file = unpack_url (
473
473
link , req .source_dir , self .downloader , download_dir ,
474
- hashes = hashes ,
474
+ hashes = self . _get_linked_req_hashes ( req )
475
475
)
476
476
except requests .HTTPError as exc :
477
- logger .critical (
478
- 'Could not install requirement %s because of error %s' ,
479
- req ,
480
- exc ,
481
- )
482
477
raise InstallationError (
483
478
'Could not install requirement {} because of HTTP '
484
479
'error {} for URL {}' .format (req , exc , link )
@@ -502,9 +497,8 @@ def prepare_linked_requirement(
502
497
)
503
498
if not os .path .exists (download_location ):
504
499
shutil .copy (local_file .path , download_location )
505
- logger .info (
506
- 'Saved %s' , display_path (download_location )
507
- )
500
+ download_path = display_path (download_location )
501
+ logger .info ('Saved %s' , download_path )
508
502
509
503
if self ._download_should_save :
510
504
# Make a .zip of the source_dir we already created.
0 commit comments