Skip to content

Commit 386d1b9

Browse files
committed
Merge pull request #1524 from qwcode/download_wheels
'pip wheel' should download wheels, when it finds them
2 parents 340d69e + eb7a31e commit 386d1b9

File tree

6 files changed

+257
-54
lines changed

6 files changed

+257
-54
lines changed

pip/commands/wheel.py

+6-5
Original file line numberDiff line numberDiff line change
@@ -165,13 +165,15 @@ def run(self, options, args):
165165
ignore_dependencies=options.ignore_dependencies,
166166
ignore_installed=True,
167167
session=session,
168+
wheel_download_dir=options.wheel_dir
168169
)
169170

171+
# make the wheelhouse
172+
if not os.path.exists(options.wheel_dir):
173+
os.makedirs(options.wheel_dir)
174+
170175
#parse args and/or requirements files
171176
for name in args:
172-
if name.endswith(".whl"):
173-
logger.notify("ignoring %s" % name)
174-
continue
175177
requirement_set.add_requirement(
176178
InstallRequirement.from_line(name, None))
177179

@@ -181,8 +183,7 @@ def run(self, options, args):
181183
finder=finder,
182184
options=options,
183185
session=session):
184-
if (req.editable
185-
or (req.name is None and req.url.endswith(".whl"))):
186+
if req.editable:
186187
logger.notify("ignoring %s" % req.url)
187188
continue
188189
requirement_set.add_requirement(req)

pip/download.py

+67-16
Original file line numberDiff line numberDiff line change
@@ -348,18 +348,6 @@ def unpack_vcs_link(link, location, only_download=False):
348348
vcs_backend.unpack(location)
349349

350350

351-
def unpack_file_url(link, location):
352-
source = url_to_path(link.url)
353-
content_type = mimetypes.guess_type(source)[0]
354-
if os.path.isdir(source):
355-
# delete the location since shutil will create it again :(
356-
if os.path.isdir(location):
357-
rmtree(location)
358-
shutil.copytree(source, location, symlinks=True)
359-
else:
360-
unpack_file(source, location, content_type, link)
361-
362-
363351
def _get_used_vcs_backend(link):
364352
for backend in vcs.backends:
365353
if link.scheme in backend.schemes:
@@ -507,7 +495,6 @@ def _copy_file(filename, location, content_type, link):
507495
shutil.move(download_location, dest_file)
508496
if copy:
509497
shutil.copy(filename, download_location)
510-
logger.indent -= 2
511498
logger.notify('Saved %s' % display_path(download_location))
512499

513500

@@ -519,11 +506,12 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
519506
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
520507
temp_location = None
521508
target_url = link.url.split('#', 1)[0]
522-
523509
already_cached = False
524510
cache_file = None
525511
cache_content_type_file = None
526512
download_hash = None
513+
514+
# If a download cache is specified, is the file cached there?
527515
if download_cache:
528516
cache_file = os.path.join(
529517
download_cache,
@@ -537,12 +525,14 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
537525
if not os.path.isdir(download_cache):
538526
create_download_cache_folder(download_cache)
539527

528+
# If a download dir is specified, is the file already downloaded there?
540529
already_downloaded = None
541530
if download_dir:
542531
already_downloaded = os.path.join(download_dir, link.filename)
543532
if not os.path.exists(already_downloaded):
544533
already_downloaded = None
545534

535+
# If already downloaded, does it's hash match?
546536
if already_downloaded:
547537
temp_location = already_downloaded
548538
content_type = mimetypes.guess_type(already_downloaded)[0]
@@ -560,8 +550,7 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
560550
os.unlink(already_downloaded)
561551
already_downloaded = None
562552

563-
# We have a cached file, and we haven't already found a good downloaded
564-
# copy
553+
# If not a valid download, let's confirm the cached file is valid
565554
if already_cached and not temp_location:
566555
with open(cache_content_type_file) as fp:
567556
content_type = fp.read().strip()
@@ -582,6 +571,7 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
582571
already_cached = False
583572

584573
# We don't have either a cached or a downloaded copy
574+
# let's download to a tmp dir
585575
if not temp_location:
586576
try:
587577
resp = session.get(target_url, stream=True)
@@ -614,11 +604,72 @@ def unpack_http_url(link, location, download_cache, download_dir=None,
614604
if link.hash and link.hash_name:
615605
_check_hash(download_hash, link)
616606

607+
# a download dir is specified; let's copy the archive there
617608
if download_dir and not already_downloaded:
618609
_copy_file(temp_location, download_dir, content_type, link)
610+
611+
# unpack the archive to the build dir location. even when only downloading
612+
# archives, they have to be unpacked to parse dependencies
619613
unpack_file(temp_location, location, content_type, link)
614+
615+
# if using a download cache, cache it, if needed
620616
if cache_file and not already_cached:
621617
cache_download(cache_file, temp_location, content_type)
618+
622619
if not (already_cached or already_downloaded):
623620
os.unlink(temp_location)
621+
624622
os.rmdir(temp_dir)
623+
624+
625+
def unpack_file_url(link, location, download_dir=None):
626+
627+
link_path = url_to_path(link.url_without_fragment)
628+
already_downloaded = False
629+
630+
# If it's a url to a local directory
631+
if os.path.isdir(link_path):
632+
if os.path.isdir(location):
633+
rmtree(location)
634+
shutil.copytree(link_path, location, symlinks=True)
635+
return
636+
637+
# if link has a hash, let's confirm it matches
638+
if link.hash:
639+
link_path_hash = _get_hash_from_file(link_path, link)
640+
_check_hash(link_path_hash, link)
641+
642+
# If a download dir is specified, is the file already there and valid?
643+
if download_dir:
644+
download_path = os.path.join(download_dir, link.filename)
645+
if os.path.exists(download_path):
646+
content_type = mimetypes.guess_type(download_path)[0]
647+
logger.notify('File was already downloaded %s' % download_path)
648+
if link.hash:
649+
download_hash = _get_hash_from_file(download_path, link)
650+
try:
651+
_check_hash(download_hash, link)
652+
already_downloaded = True
653+
except HashMismatch:
654+
logger.warn(
655+
'Previously-downloaded file %s has bad hash, '
656+
're-downloading.' % link_path
657+
)
658+
os.unlink(download_path)
659+
else:
660+
already_downloaded = True
661+
662+
if already_downloaded:
663+
from_path = download_path
664+
else:
665+
from_path = link_path
666+
667+
content_type = mimetypes.guess_type(from_path)[0]
668+
669+
# unpack the archive to the build dir location. even when only downloading
670+
# archives, they have to be unpacked to parse dependencies
671+
unpack_file(from_path, location, content_type, link)
672+
673+
# a download dir is specified and not already downloaded
674+
if download_dir and not already_downloaded:
675+
_copy_file(from_path, download_dir, content_type, link)

pip/req/req_set.py

+63-21
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,14 @@
99
from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled,
1010
DistributionNotFound, PreviousBuildDirError)
1111
from pip.index import Link
12-
from pip.locations import (
13-
PIP_DELETE_MARKER_FILENAME, write_delete_marker_file, build_prefix,
14-
)
12+
from pip.locations import (PIP_DELETE_MARKER_FILENAME, build_prefix,
13+
write_delete_marker_file)
1514
from pip.log import logger
1615
from pip.req.req_install import InstallRequirement
1716
from pip.util import (display_path, rmtree, dist_in_usersite, call_subprocess,
1817
_make_build_dir)
1918
from pip.vcs import vcs
19+
from pip.wheel import wheel_ext
2020

2121

2222
class Requirements(object):
@@ -53,10 +53,12 @@ def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
5353
upgrade=False, ignore_installed=False, as_egg=False,
5454
target_dir=None, ignore_dependencies=False,
5555
force_reinstall=False, use_user_site=False, session=None,
56-
pycompile=True):
56+
pycompile=True, wheel_download_dir=None):
5757
self.build_dir = build_dir
5858
self.src_dir = src_dir
5959
self.download_dir = download_dir
60+
if download_cache:
61+
download_cache = os.path.expanduser(download_cache)
6062
self.download_cache = download_cache
6163
self.upgrade = upgrade
6264
self.ignore_installed = ignore_installed
@@ -74,6 +76,7 @@ def __init__(self, build_dir, src_dir, download_dir, download_cache=None,
7476
self.target_dir = target_dir # set from --target option
7577
self.session = session or PipSession()
7678
self.pycompile = pycompile
79+
self.wheel_download_dir = wheel_download_dir
7780

7881
def __str__(self):
7982
reqs = [req for req in self.requirements.values()
@@ -209,6 +212,11 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
209212
install = True
210213
best_installed = False
211214
not_found = None
215+
216+
###############################################
217+
## Search for archive to fulfill requirement ##
218+
###############################################
219+
212220
if not self.ignore_installed and not req_to_install.editable:
213221
req_to_install.check_if_exists()
214222
if req_to_install.satisfied_by:
@@ -258,6 +266,11 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
258266
else:
259267
logger.notify('Downloading/unpacking %s' % req_to_install)
260268
logger.indent += 2
269+
270+
##################################
271+
## vcs update or unpack archive ##
272+
##################################
273+
261274
try:
262275
is_bundle = False
263276
is_wheel = False
@@ -323,9 +336,21 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
323336
assert url
324337
if url:
325338
try:
339+
340+
if (
341+
url.filename.endswith(wheel_ext)
342+
and self.wheel_download_dir
343+
):
344+
# when doing 'pip wheel`
345+
download_dir = self.wheel_download_dir
346+
do_download = True
347+
else:
348+
download_dir = self.download_dir
349+
do_download = self.is_download
326350
self.unpack_url(
327-
url, location, self.is_download,
328-
)
351+
url, location, download_dir,
352+
do_download,
353+
)
329354
except HTTPError as exc:
330355
logger.fatal(
331356
'Could not install requirement %s because '
@@ -340,7 +365,7 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
340365
unpack = False
341366
if unpack:
342367
is_bundle = req_to_install.is_bundle
343-
is_wheel = url and url.filename.endswith('.whl')
368+
is_wheel = url and url.filename.endswith(wheel_ext)
344369
if is_bundle:
345370
req_to_install.move_bundle_files(
346371
self.build_dir,
@@ -356,6 +381,11 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
356381
req_to_install.run_egg_info()
357382
if url and url.scheme in vcs.all_schemes:
358383
req_to_install.archive(self.download_dir)
384+
385+
##############################
386+
## parse wheel dependencies ##
387+
##############################
388+
359389
elif is_wheel:
360390
req_to_install.source_dir = location
361391
req_to_install.url = url.url
@@ -413,6 +443,11 @@ def prepare_files(self, finder, force_root_egg_info=False, bundle=False):
413443
req_to_install
414444
)
415445
install = False
446+
447+
##############################
448+
## parse sdist dependencies ##
449+
##############################
450+
416451
if not (is_bundle or is_wheel):
417452
## FIXME: shouldn't be globally added:
418453
finder.add_dependency_links(
@@ -503,29 +538,36 @@ def copy_to_build_dir(self, req_to_install):
503538
call_subprocess(["python", "%s/setup.py" % dest, "clean"], cwd=dest,
504539
command_desc='python setup.py clean')
505540

506-
def unpack_url(self, link, location, only_download=False):
507-
if only_download:
508-
loc = self.download_dir
509-
else:
510-
loc = location
541+
def unpack_url(self, link, location, download_dir=None,
542+
only_download=False):
543+
if download_dir is None:
544+
download_dir = self.download_dir
545+
546+
# non-editable vcs urls
511547
if is_vcs_url(link):
512-
return unpack_vcs_link(link, loc, only_download)
513-
# a local file:// index could have links with hashes
514-
elif not link.hash and is_file_url(link):
515-
return unpack_file_url(link, loc)
548+
if only_download:
549+
loc = download_dir
550+
else:
551+
loc = location
552+
unpack_vcs_link(link, loc, only_download)
553+
554+
# file urls
555+
elif is_file_url(link):
556+
unpack_file_url(link, location, download_dir)
557+
if only_download:
558+
write_delete_marker_file(location)
559+
560+
# http urls
516561
else:
517-
if self.download_cache:
518-
self.download_cache = os.path.expanduser(self.download_cache)
519-
retval = unpack_http_url(
562+
unpack_http_url(
520563
link,
521564
location,
522565
self.download_cache,
523-
self.download_dir,
566+
download_dir,
524567
self.session,
525568
)
526569
if only_download:
527570
write_delete_marker_file(location)
528-
return retval
529571

530572
def install(self, install_options, global_options=(), *args, **kwargs):
531573
"""

pip/wheel.py

+6-8
Original file line numberDiff line numberDiff line change
@@ -487,21 +487,19 @@ def build(self):
487487

488488
reqset = self.requirement_set.requirements.values()
489489

490-
#make the wheelhouse
491-
if not os.path.exists(self.wheel_dir):
492-
os.makedirs(self.wheel_dir)
490+
buildset = [req for req in reqset if not req.is_wheel]
491+
492+
if not buildset:
493+
return
493494

494495
#build the wheels
495496
logger.notify(
496497
'Building wheels for collected packages: %s' %
497-
','.join([req.name for req in reqset])
498+
','.join([req.name for req in buildset])
498499
)
499500
logger.indent += 2
500501
build_success, build_failure = [], []
501-
for req in reqset:
502-
if req.is_wheel:
503-
logger.notify("Skipping building wheel: %s", req.url)
504-
continue
502+
for req in buildset:
505503
if self._build_one(req):
506504
build_success.append(req)
507505
else:

0 commit comments

Comments
 (0)