From 6bb0d289f8e6a4109b8907f1e3b7b30e389b1b29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nguy=E1=BB=85n=20Gia=20Phong?= Date: Sun, 7 Jun 2020 16:15:05 +0700 Subject: [PATCH 1/4] Move link log from prepare_linked_requirement --- ...feb941-3e4e-4b33-bd43-8c47f67ea229.trivial | 0 src/pip/_internal/operations/prepare.py | 31 +++++++++---------- 2 files changed, 14 insertions(+), 17 deletions(-) create mode 100644 news/0cfeb941-3e4e-4b33-bd43-8c47f67ea229.trivial diff --git a/news/0cfeb941-3e4e-4b33-bd43-8c47f67ea229.trivial b/news/0cfeb941-3e4e-4b33-bd43-8c47f67ea229.trivial new file mode 100644 index 00000000000..e69de29bb2d diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index d8a4bde5ca0..3dc583cc253 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -376,29 +376,26 @@ def _download_should_save(self): "Could not find or access download directory '{}'" .format(self.download_dir)) - def prepare_linked_requirement( - self, - req, # type: InstallRequirement - parallel_builds=False, # type: bool - ): - # type: (...) -> AbstractDistribution - """Prepare a requirement that would be obtained from req.link - """ - assert req.link - link = req.link - - # TODO: Breakup into smaller functions - if link.scheme == 'file': - path = link.file_path + def _log_preparing_link(self, req): + # type: (InstallRequirement) -> None + """Log the way the link prepared.""" + if req.link.is_file: + path = req.link.file_path logger.info('Processing %s', display_path(path)) else: logger.info('Collecting %s', req.req or req) - download_dir = self.download_dir + def prepare_linked_requirement(self, req, parallel_builds=False): + # type: (InstallRequirement, bool) -> AbstractDistribution + """Prepare a requirement to be obtained from req.link.""" + assert req.link + link = req.link + self._log_preparing_link(req) if link.is_wheel and self.wheel_download_dir: - # when doing 'pip wheel` we download wheels to a - # dedicated dir. + # Download wheels to a dedicated dir when doing `pip wheel`. download_dir = self.wheel_download_dir + else: + download_dir = self.download_dir if link.is_wheel: if download_dir: From eb2deab2f64664553db95ff6fbd03103445dfe78 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nguy=E1=BB=85n=20Gia=20Phong?= Date: Fri, 12 Jun 2020 11:49:09 +0700 Subject: [PATCH 2/4] Move req.source_dir ensure out --- src/pip/_internal/operations/prepare.py | 72 +++++++++++++------------ 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 3dc583cc253..b499db7bbed 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -385,6 +385,42 @@ def _log_preparing_link(self, req): else: logger.info('Collecting %s', req.req or req) + def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds): + # type: (InstallRequirement, Optional[str], bool) -> None + """Ensure source_dir of a linked InstallRequirement.""" + # Since source_dir is only set for editable requirements. + if req.link.is_wheel: + if download_dir: + # When downloading, we only unpack wheels to get + # metadata. + autodelete_unpacked = True + else: + # When installing a wheel, we use the unpacked wheel. + autodelete_unpacked = False + else: + # We always delete unpacked sdists after pip runs. + autodelete_unpacked = True + assert req.source_dir is None + req.ensure_has_source_dir( + self.build_dir, + autodelete=autodelete_unpacked, + parallel_builds=parallel_builds, + ) + + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req.source_dir` + if os.path.exists(os.path.join(req.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '{}' due to a" + "pre-existing build directory ({}). This is likely " + "due to a previous installation that failed . pip is " + "being responsible and not assuming it can delete this. " + "Please delete it and try again.".format(req, req.source_dir) + ) + def prepare_linked_requirement(self, req, parallel_builds=False): # type: (InstallRequirement, bool) -> AbstractDistribution """Prepare a requirement to be obtained from req.link.""" @@ -397,42 +433,8 @@ def prepare_linked_requirement(self, req, parallel_builds=False): else: download_dir = self.download_dir - if link.is_wheel: - if download_dir: - # When downloading, we only unpack wheels to get - # metadata. - autodelete_unpacked = True - else: - # When installing a wheel, we use the unpacked - # wheel. - autodelete_unpacked = False - else: - # We always delete unpacked sdists after pip runs. - autodelete_unpacked = True - with indent_log(): - # Since source_dir is only set for editable requirements. - assert req.source_dir is None - req.ensure_has_source_dir( - self.build_dir, - autodelete=autodelete_unpacked, - parallel_builds=parallel_builds, - ) - # If a checkout exists, it's unwise to keep going. version - # inconsistencies are logged later, but do not fail the - # installation. - # FIXME: this won't upgrade when there's an existing - # package unpacked in `req.source_dir` - if os.path.exists(os.path.join(req.source_dir, 'setup.py')): - raise PreviousBuildDirError( - "pip can't proceed with requirements '{}' due to a" - " pre-existing build directory ({}). This is " - "likely due to a previous installation that failed" - ". pip is being responsible and not assuming it " - "can delete this. Please delete it and try again." - .format(req, req.source_dir) - ) - + self._ensure_link_req_src_dir(req, download_dir, parallel_builds) # Now that we have the real link, we can tell what kind of # requirements we have and raise some more informative errors # than otherwise. (For example, we can raise VcsHashUnsupported From 8b5ff72a13b29d86ca4e8a5d475f46acd1637d35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Nguy=E1=BB=85n=20Gia=20Phong?= Date: Fri, 12 Jun 2020 11:52:17 +0700 Subject: [PATCH 3/4] Make linked req hashes an independant method --- src/pip/_internal/operations/prepare.py | 81 +++++++++++-------------- 1 file changed, 37 insertions(+), 44 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index b499db7bbed..07e8de9c729 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -421,6 +421,39 @@ def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds): "Please delete it and try again.".format(req, req.source_dir) ) + def _get_linked_req_hashes(self, req): + # type: (InstallRequirement) -> Hashes + # By the time this is called, the requirement's link should have + # been checked so we can tell what kind of requirements req is + # and raise some more informative errors than otherwise. + # (For example, we can raise VcsHashUnsupported for a VCS URL + # rather than HashMissing.) + if not self.require_hashes: + return req.hashes(trust_internet=True) + + # We could check these first 2 conditions inside unpack_url + # and save repetition of conditions, but then we would + # report less-useful error messages for unhashable + # requirements, complaining that there's no hash provided. + if req.link.is_vcs: + raise VcsHashUnsupported() + if req.link.is_existing_dir(): + raise DirectoryUrlHashUnsupported() + + # Unpinned packages are asking for trouble when a new version + # is uploaded. This isn't a security check, but it saves users + # a surprising hash mismatch in the future. + # file:/// URLs aren't pinnable, so don't complain about them + # not being pinned. + if req.original_link is None and not req.is_pinned: + raise HashUnpinned() + + # If known-good hashes are missing for this requirement, + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + return req.hashes(trust_internet=False) or MissingHashes() + def prepare_linked_requirement(self, req, parallel_builds=False): # type: (InstallRequirement, bool) -> AbstractDistribution """Prepare a requirement to be obtained from req.link.""" @@ -435,49 +468,11 @@ def prepare_linked_requirement(self, req, parallel_builds=False): with indent_log(): self._ensure_link_req_src_dir(req, download_dir, parallel_builds) - # Now that we have the real link, we can tell what kind of - # requirements we have and raise some more informative errors - # than otherwise. (For example, we can raise VcsHashUnsupported - # for a VCS URL rather than HashMissing.) - if self.require_hashes: - # We could check these first 2 conditions inside - # unpack_url and save repetition of conditions, but then - # we would report less-useful error messages for - # unhashable requirements, complaining that there's no - # hash provided. - if link.is_vcs: - raise VcsHashUnsupported() - elif link.is_existing_dir(): - raise DirectoryUrlHashUnsupported() - if not req.original_link and not req.is_pinned: - # Unpinned packages are asking for trouble when a new - # version is uploaded. This isn't a security check, but - # it saves users a surprising hash mismatch in the - # future. - # - # file:/// URLs aren't pinnable, so don't complain - # about them not being pinned. - raise HashUnpinned() - - hashes = req.hashes(trust_internet=not self.require_hashes) - if self.require_hashes and not hashes: - # Known-good hashes are missing for this requirement, so - # shim it with a facade object that will provoke hash - # computation and then raise a HashMissing exception - # showing the user what the hash should be. - hashes = MissingHashes() - try: local_file = unpack_url( link, req.source_dir, self.downloader, download_dir, - hashes=hashes, - ) + hashes=self._get_linked_req_hashes(req)) except requests.HTTPError as exc: - logger.critical( - 'Could not install requirement %s because of error %s', - req, - exc, - ) raise InstallationError( 'Could not install requirement {} because of HTTP ' 'error {} for URL {}'.format(req, exc, link) @@ -497,13 +492,11 @@ def prepare_linked_requirement(self, req, parallel_builds=False): logger.info('Link is a directory, ignoring download_dir') elif local_file: download_location = os.path.join( - download_dir, link.filename - ) + download_dir, link.filename) if not os.path.exists(download_location): shutil.copy(local_file.path, download_location) - logger.info( - 'Saved %s', display_path(download_location) - ) + download_path = display_path(download_location) + logger.info('Saved %s', download_path) if self._download_should_save: # Make a .zip of the source_dir we already created. From 343f863785bd1477c5bf2b5686577a8da7f455e0 Mon Sep 17 00:00:00 2001 From: Pradyun Gedam Date: Wed, 17 Jun 2020 23:20:55 +0530 Subject: [PATCH 4/4] Apply suggestions from code review --- src/pip/_internal/operations/prepare.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index 07e8de9c729..6f35897d16e 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -471,7 +471,8 @@ def prepare_linked_requirement(self, req, parallel_builds=False): try: local_file = unpack_url( link, req.source_dir, self.downloader, download_dir, - hashes=self._get_linked_req_hashes(req)) + hashes=self._get_linked_req_hashes(req) + ) except requests.HTTPError as exc: raise InstallationError( 'Could not install requirement {} because of HTTP ' @@ -492,7 +493,8 @@ def prepare_linked_requirement(self, req, parallel_builds=False): logger.info('Link is a directory, ignoring download_dir') elif local_file: download_location = os.path.join( - download_dir, link.filename) + download_dir, link.filename + ) if not os.path.exists(download_location): shutil.copy(local_file.path, download_location) download_path = display_path(download_location)