20
20
InstallationError ,
21
21
MetadataInconsistent ,
22
22
NetworkConnectionError ,
23
- PreviousBuildDirError ,
24
23
VcsHashUnsupported ,
25
24
)
26
25
from pip ._internal .index .package_finder import PackageFinder
47
46
display_path ,
48
47
hash_file ,
49
48
hide_url ,
50
- is_installable_dir ,
51
49
)
52
50
from pip ._internal .utils .temp_dir import TempDirectory
53
51
from pip ._internal .utils .unpacking import unpack_file
@@ -305,7 +303,6 @@ def _ensure_link_req_src_dir(
305
303
# We don't need to unpack wheels, so no need for a source
306
304
# directory.
307
305
return
308
- assert req .source_dir is None
309
306
if req .link .is_existing_dir ():
310
307
# build local directories in-tree
311
308
req .source_dir = req .link .file_path
@@ -318,21 +315,6 @@ def _ensure_link_req_src_dir(
318
315
parallel_builds = parallel_builds ,
319
316
)
320
317
321
- # If a checkout exists, it's unwise to keep going. version
322
- # inconsistencies are logged later, but do not fail the
323
- # installation.
324
- # FIXME: this won't upgrade when there's an existing
325
- # package unpacked in `req.source_dir`
326
- # TODO: this check is now probably dead code
327
- if is_installable_dir (req .source_dir ):
328
- raise PreviousBuildDirError (
329
- "pip can't proceed with requirements '{}' due to a"
330
- "pre-existing build directory ({}). This is likely "
331
- "due to a previous installation that failed . pip is "
332
- "being responsible and not assuming it can delete this. "
333
- "Please delete it and try again." .format (req , req .source_dir )
334
- )
335
-
336
318
def _get_linked_req_hashes (self , req : InstallRequirement ) -> Hashes :
337
319
# By the time this is called, the requirement's link should have
338
320
# been checked so we can tell what kind of requirements req is
@@ -479,20 +461,24 @@ def _complete_partial_requirements(
479
461
for link , (filepath , _ ) in batch_download :
480
462
logger .debug ("Downloading link %s to %s" , link , filepath )
481
463
req = links_to_fully_download [link ]
464
+ # Record the downloaded file path so wheel reqs can extract a Distribution
465
+ # in .get_dist().
482
466
req .local_file_path = filepath
483
- # TODO: This needs fixing for sdists
484
- # This is an emergency fix for #11847, which reports that
485
- # distributions get downloaded twice when metadata is loaded
486
- # from a PEP 658 standalone metadata file. Setting _downloaded
487
- # fixes this for wheels, but breaks the sdist case (tests
488
- # test_download_metadata). As PyPI is currently only serving
489
- # metadata for wheels, this is not an immediate issue.
490
- # Fixing the problem properly looks like it will require a
491
- # complete refactoring of the `prepare_linked_requirements_more`
492
- # logic, and I haven't a clue where to start on that, so for now
493
- # I have fixed the issue *just* for wheels.
494
- if req .is_wheel :
495
- self ._downloaded [req .link .url ] = filepath
467
+ # Record that the file is downloaded so we don't do it again in
468
+ # _prepare_linked_requirement().
469
+ self ._downloaded [req .link .url ] = filepath
470
+
471
+ # If this is an sdist, we need to unpack it and set the .source_dir
472
+ # immediately after downloading, as _prepare_linked_requirement() assumes
473
+ # the req is either not downloaded at all, or both downloaded and
474
+ # unpacked. The downloading and unpacking is is typically done with
475
+ # unpack_url(), but we separate the downloading and unpacking steps here in
476
+ # order to use the BatchDownloader.
477
+ if not req .is_wheel :
478
+ hashes = self ._get_linked_req_hashes (req )
479
+ assert filepath == _check_download_dir (req .link , temp_dir , hashes )
480
+ self ._ensure_link_req_src_dir (req , parallel_builds )
481
+ unpack_file (filepath , req .source_dir )
496
482
497
483
# This step is necessary to ensure all lazy wheels are processed
498
484
# successfully by the 'download', 'wheel', and 'install' commands.
0 commit comments