From 6c3db098ff0b6e537157eff53b1aba79fa7fa4b0 Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Wed, 7 Jun 2023 20:58:40 +0100
Subject: [PATCH 1/9] Fix parsing of JSON index dist-info-metadata values
---
src/pip/_internal/models/link.py | 97 +++++++++++++++++++++-----------
tests/unit/test_collector.py | 37 +++++++-----
2 files changed, 89 insertions(+), 45 deletions(-)
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index e741c3283cd..ee3045166bb 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -69,18 +69,6 @@ class LinkHash:
def __post_init__(self) -> None:
assert self.name in _SUPPORTED_HASHES
- @classmethod
- def parse_pep658_hash(cls, dist_info_metadata: str) -> Optional["LinkHash"]:
- """Parse a PEP 658 data-dist-info-metadata hash."""
- if dist_info_metadata == "true":
- return None
- name, sep, value = dist_info_metadata.partition("=")
- if not sep:
- return None
- if name not in _SUPPORTED_HASHES:
- return None
- return cls(name=name, value=value)
-
@classmethod
@functools.lru_cache(maxsize=None)
def find_hash_url_fragment(cls, url: str) -> Optional["LinkHash"]:
@@ -107,6 +95,20 @@ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
return hashes.is_hash_allowed(self.name, hex_digest=self.value)
+@dataclass(frozen=True)
+class MetadataFile:
+ """Information about a core metadata file associated with a distribution."""
+
+ hashes: Optional[dict[str, str]]
+
+ # TODO: Do we care about stripping out unsupported hash methods?
+ def __init__(self, hashes: Optional[dict[str, str]]):
+ if hashes:
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
+ # We need to use this as this is a frozen dataclass
+ object.__setattr__(self, "hashes", hashes)
+
+
def _clean_url_path_part(part: str) -> str:
"""
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
@@ -179,7 +181,7 @@ class Link(KeyBasedCompareMixin):
"comes_from",
"requires_python",
"yanked_reason",
- "dist_info_metadata",
+ "metadata_file_data",
"cache_link_parsing",
"egg_fragment",
]
@@ -190,7 +192,7 @@ def __init__(
comes_from: Optional[Union[str, "IndexContent"]] = None,
requires_python: Optional[str] = None,
yanked_reason: Optional[str] = None,
- dist_info_metadata: Optional[str] = None,
+ metadata_file_data: Optional[MetadataFile] = None,
cache_link_parsing: bool = True,
hashes: Optional[Mapping[str, str]] = None,
) -> None:
@@ -208,11 +210,10 @@ def __init__(
a simple repository HTML link. If the file has been yanked but
no reason was provided, this should be the empty string. See
PEP 592 for more information and the specification.
- :param dist_info_metadata: the metadata attached to the file, or None if no such
- metadata is provided. This is the value of the "data-dist-info-metadata"
- attribute, if present, in a simple repository HTML link. This may be parsed
- into its own `Link` by `self.metadata_link()`. See PEP 658 for more
- information and the specification.
+ :param metadata_file_data: the metadata attached to the file, or None if
+ no such metadata is provided. This argument, if not None, indicates
+ that a separate metadata file exists, and also optionally supplies
+ hashes for that file.
:param cache_link_parsing: A flag that is used elsewhere to determine
whether resources retrieved from this link should be cached. PyPI
URLs should generally have this set to False, for example.
@@ -220,6 +221,10 @@ def __init__(
determine the validity of a download.
"""
+ # The comes_from, requires_python, and metadata_file_data arguments are
+ # only used by classmethods of this class, and are not used in client
+ # code directly.
+
# url can be a UNC windows share
if url.startswith("\\\\"):
url = path_to_url(url)
@@ -239,7 +244,7 @@ def __init__(
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
self.yanked_reason = yanked_reason
- self.dist_info_metadata = dist_info_metadata
+ self.metadata_file_data = metadata_file_data
super().__init__(key=url, defining_class=Link)
@@ -262,9 +267,20 @@ def from_json(
url = _ensure_quoted_url(urllib.parse.urljoin(page_url, file_url))
pyrequire = file_data.get("requires-python")
yanked_reason = file_data.get("yanked")
- dist_info_metadata = file_data.get("dist-info-metadata")
hashes = file_data.get("hashes", {})
+ # The dist-info-metadata value may be a boolean, or a dict of hashes.
+ metadata_info = file_data.get("dist-info-metadata", False)
+ if isinstance(metadata_info, dict):
+ # The file exists, and hashes have been supplied
+ metadata_file_data = MetadataFile(metadata_info)
+ elif metadata_info:
+ # The file exists, but there are no hashes
+ metadata_file_data = MetadataFile(None)
+ else:
+ # The file does not exist
+ metadata_file_data = None
+
# The Link.yanked_reason expects an empty string instead of a boolean.
if yanked_reason and not isinstance(yanked_reason, str):
yanked_reason = ""
@@ -278,7 +294,7 @@ def from_json(
requires_python=pyrequire,
yanked_reason=yanked_reason,
hashes=hashes,
- dist_info_metadata=dist_info_metadata,
+ metadata_file_data=metadata_file_data,
)
@classmethod
@@ -298,14 +314,35 @@ def from_element(
url = _ensure_quoted_url(urllib.parse.urljoin(base_url, href))
pyrequire = anchor_attribs.get("data-requires-python")
yanked_reason = anchor_attribs.get("data-yanked")
- dist_info_metadata = anchor_attribs.get("data-dist-info-metadata")
+
+ # The dist-info-metadata value may be the string "true", or a string of
+ # the form "hashname=hashval"
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
+ if metadata_info == "true":
+ # The file exists, but there are no hashes
+ metadata_file_data = MetadataFile(None)
+ elif metadata_info is None:
+ # The file does not exist
+ metadata_file_data = None
+ else:
+ # The file exists, and hashes have been supplied
+ hashname, sep, hashval = metadata_info.partition("=")
+ if sep == "=":
+ metadata_file_data = MetadataFile({hashname: hashval})
+ else:
+ # Error - data is wrong. Treat as no hashes supplied.
+ logger.debug(
+ "Index returned invalid data-dist-info-metadata value: %s",
+ metadata_info,
+ )
+ metadata_file_data = MetadataFile(None)
return cls(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
- dist_info_metadata=dist_info_metadata,
+ metadata_file_data=metadata_file_data,
)
def __str__(self) -> str:
@@ -407,17 +444,13 @@ def subdirectory_fragment(self) -> Optional[str]:
return match.group(1)
def metadata_link(self) -> Optional["Link"]:
- """Implementation of PEP 658 parsing."""
- # Note that Link.from_element() parsing the "data-dist-info-metadata" attribute
- # from an HTML anchor tag is typically how the Link.dist_info_metadata attribute
- # gets set.
- if self.dist_info_metadata is None:
+ """Return a link to the associated core metadata file (if any)."""
+ if self.metadata_file_data is None:
return None
metadata_url = f"{self.url_without_fragment}.metadata"
- metadata_link_hash = LinkHash.parse_pep658_hash(self.dist_info_metadata)
- if metadata_link_hash is None:
+ if self.metadata_file_data.hashes is None:
return Link(metadata_url)
- return Link(metadata_url, hashes=metadata_link_hash.as_dict())
+ return Link(metadata_url, hashes=self.metadata_file_data.hashes)
def as_hashes(self) -> Hashes:
return Hashes({k: [v] for k, v in self._hashes.items()})
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index e855d78e126..b3c9fcf1f5b 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -30,6 +30,7 @@
from pip._internal.models.link import (
Link,
LinkHash,
+ MetadataFile,
_clean_url_path,
_ensure_quoted_url,
)
@@ -527,7 +528,7 @@ def test_parse_links_json() -> None:
requires_python=">=3.7",
yanked_reason=None,
hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"},
- dist_info_metadata="sha512=aabdd41",
+ metadata_file_data=MetadataFile({"sha512": "aabdd41"}),
),
]
@@ -603,12 +604,12 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) -
),
],
)
-def test_parse_links__dist_info_metadata(
+def test_parse_links__metadata_file_data(
anchor_html: str,
expected: Optional[str],
hashes: Dict[str, str],
) -> None:
- link = _test_parse_links_data_attribute(anchor_html, "dist_info_metadata", expected)
+ link = _test_parse_links_data_attribute(anchor_html, "metadata_file_data", expected)
assert link._hashes == hashes
@@ -1080,17 +1081,27 @@ def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None:
@pytest.mark.parametrize(
- "dist_info_metadata, result",
+ "metadata_attrib, expected",
[
- ("sha256=aa113592bbe", LinkHash("sha256", "aa113592bbe")),
- ("sha256=", LinkHash("sha256", "")),
- ("sha500=aa113592bbe", None),
- ("true", None),
- ("", None),
- ("aa113592bbe", None),
+ ("sha256=aa113592bbe", MetadataFile({"sha256": "aa113592bbe"})),
+ ("sha256=", MetadataFile({"sha256": ""})),
+ ("sha500=aa113592bbe", MetadataFile({})),
+ ("true", MetadataFile(None)),
+ (None, None),
+ # TODO: Are these correct?
+ ("", MetadataFile(None)),
+ ("aa113592bbe", MetadataFile(None)),
],
)
-def test_pep658_hash_parsing(
- dist_info_metadata: str, result: Optional[LinkHash]
+def test_metadata_file_info_parsing_html(
+ metadata_attrib: str, expected: Optional[MetadataFile]
) -> None:
- assert LinkHash.parse_pep658_hash(dist_info_metadata) == result
+ attribs: Dict[str, Optional[str]] = {
+ "href": "something",
+ "data-dist-info-metadata": metadata_attrib,
+ }
+ page_url = "dummy_for_comes_from"
+ base_url = "https://index.url/simple"
+ link = Link.from_element(attribs, page_url, base_url)
+ assert link is not None and link.metadata_file_data == expected
+ # TODO: Do we need to do something for the JSON data?
From cc554edab8897749c87495333018754080d06781 Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Wed, 7 Jun 2023 21:01:10 +0100
Subject: [PATCH 2/9] Add a news file
---
news/12042.bugfix.rst | 1 +
1 file changed, 1 insertion(+)
create mode 100644 news/12042.bugfix.rst
diff --git a/news/12042.bugfix.rst b/news/12042.bugfix.rst
new file mode 100644
index 00000000000..34d97743540
--- /dev/null
+++ b/news/12042.bugfix.rst
@@ -0,0 +1 @@
+Correctly parse ``dist-info-metadata`` values from JSON-format index data.
From 8f89997d0dad1644b258297e2e3b9cc70d44e51d Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Wed, 7 Jun 2023 21:11:34 +0100
Subject: [PATCH 3/9] Fix types to be 3.7-compatible
---
src/pip/_internal/models/link.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index ee3045166bb..9630448bcfb 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -99,10 +99,10 @@ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
class MetadataFile:
"""Information about a core metadata file associated with a distribution."""
- hashes: Optional[dict[str, str]]
+ hashes: Optional[Dict[str, str]]
# TODO: Do we care about stripping out unsupported hash methods?
- def __init__(self, hashes: Optional[dict[str, str]]):
+ def __init__(self, hashes: Optional[Dict[str, str]]):
if hashes:
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
# We need to use this as this is a frozen dataclass
From cfb4923d5d016dc58dc4e4b896992c476f0ddce8 Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Wed, 7 Jun 2023 21:21:32 +0100
Subject: [PATCH 4/9] Fix bad test data in test_parse_links_json
---
tests/unit/test_collector.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index b3c9fcf1f5b..838dd2efb88 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -492,7 +492,7 @@ def test_parse_links_json() -> None:
"url": "/files/holygrail-1.0-py3-none-any.whl",
"hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"},
"requires-python": ">=3.7",
- "dist-info-metadata": "sha512=aabdd41",
+ "dist-info-metadata": {"sha512": "aabdd41"},
},
],
}
From 93b274eee79b9c114728f0864a29751ee7698fca Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Wed, 7 Jun 2023 21:44:48 +0100
Subject: [PATCH 5/9] Missed a change to one of the tests
---
tests/unit/test_collector.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index 838dd2efb88..513e4b1347b 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -587,19 +587,19 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) -
# Test with value "true".
(
'',
- "true",
+ MetadataFile(None),
{},
),
# Test with a provided hash value.
(
'', # noqa: E501
- "sha256=aa113592bbe",
+ MetadataFile({"sha256": "aa113592bbe"}),
{},
),
# Test with a provided hash value for both the requirement as well as metadata.
(
'', # noqa: E501
- "sha256=aa113592bbe",
+ MetadataFile({"sha256": "aa113592bbe"}),
{"sha512": "abc132409cb"},
),
],
From 232cc9dd5284fbc7554bcd291bf14e31413da78a Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Thu, 8 Jun 2023 09:52:51 +0100
Subject: [PATCH 6/9] Parse hash data before passing to MetadataFile
---
src/pip/_internal/models/link.py | 24 ++++++++++++++++--------
tests/unit/test_collector.py | 5 ++---
2 files changed, 18 insertions(+), 11 deletions(-)
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index 9630448bcfb..7b45f3f3ed4 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -101,12 +101,20 @@ class MetadataFile:
hashes: Optional[Dict[str, str]]
- # TODO: Do we care about stripping out unsupported hash methods?
- def __init__(self, hashes: Optional[Dict[str, str]]):
- if hashes:
- hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
- # We need to use this as this is a frozen dataclass
- object.__setattr__(self, "hashes", hashes)
+ def __post_init__(self) -> None:
+ if self.hashes is not None:
+ assert all(name in _SUPPORTED_HASHES for name in self.hashes)
+
+
+def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
+ # Remove any unsupported hash types from the mapping. If this leaves no
+ # supported hashes, return None
+ if hashes is None:
+ return None
+ hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
+ if len(hashes) > 0:
+ return hashes
+ return None
def _clean_url_path_part(part: str) -> str:
@@ -273,7 +281,7 @@ def from_json(
metadata_info = file_data.get("dist-info-metadata", False)
if isinstance(metadata_info, dict):
# The file exists, and hashes have been supplied
- metadata_file_data = MetadataFile(metadata_info)
+ metadata_file_data = MetadataFile(supported_hashes(metadata_info))
elif metadata_info:
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
@@ -328,7 +336,7 @@ def from_element(
# The file exists, and hashes have been supplied
hashname, sep, hashval = metadata_info.partition("=")
if sep == "=":
- metadata_file_data = MetadataFile({hashname: hashval})
+ metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
else:
# Error - data is wrong. Treat as no hashes supplied.
logger.debug(
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index 513e4b1347b..d1e68fab76f 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -1085,10 +1085,10 @@ def test_link_hash_parsing(url: str, result: Optional[LinkHash]) -> None:
[
("sha256=aa113592bbe", MetadataFile({"sha256": "aa113592bbe"})),
("sha256=", MetadataFile({"sha256": ""})),
- ("sha500=aa113592bbe", MetadataFile({})),
+ ("sha500=aa113592bbe", MetadataFile(None)),
("true", MetadataFile(None)),
(None, None),
- # TODO: Are these correct?
+ # Attribute is present but invalid
("", MetadataFile(None)),
("aa113592bbe", MetadataFile(None)),
],
@@ -1104,4 +1104,3 @@ def test_metadata_file_info_parsing_html(
base_url = "https://index.url/simple"
link = Link.from_element(attribs, page_url, base_url)
assert link is not None and link.metadata_file_data == expected
- # TODO: Do we need to do something for the JSON data?
From 5168881b438b2851ae4c9459a8c06beee2058639 Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Thu, 8 Jun 2023 10:10:15 +0100
Subject: [PATCH 7/9] Implement PEP 714 - rename dist-info-metadata
---
src/pip/_internal/models/link.py | 19 +++++++++---
tests/unit/test_collector.py | 53 +++++++++++++++++++++++++++++---
2 files changed, 63 insertions(+), 9 deletions(-)
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index 7b45f3f3ed4..3cfc3e8c4fe 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -277,8 +277,13 @@ def from_json(
yanked_reason = file_data.get("yanked")
hashes = file_data.get("hashes", {})
- # The dist-info-metadata value may be a boolean, or a dict of hashes.
- metadata_info = file_data.get("dist-info-metadata", False)
+ # PEP 714: Indexes must use the name core-metadata, but
+ # clients should support the old name as a fallback for compatibility.
+ metadata_info = file_data.get("core-metadata")
+ if metadata_info is None:
+ metadata_info = file_data.get("dist-info-metadata")
+
+ # The metadata info value may be a boolean, or a dict of hashes.
if isinstance(metadata_info, dict):
# The file exists, and hashes have been supplied
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
@@ -286,7 +291,7 @@ def from_json(
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
else:
- # The file does not exist
+ # False or not present: the file does not exist
metadata_file_data = None
# The Link.yanked_reason expects an empty string instead of a boolean.
@@ -323,9 +328,13 @@ def from_element(
pyrequire = anchor_attribs.get("data-requires-python")
yanked_reason = anchor_attribs.get("data-yanked")
- # The dist-info-metadata value may be the string "true", or a string of
+ # PEP 714: Indexes must use the name data-core-metadata, but
+ # clients should support the old name as a fallback for compatibility.
+ metadata_info = anchor_attribs.get("data-core-metadata")
+ if metadata_info is None:
+ metadata_info = anchor_attribs.get("data-dist-info-metadata")
+ # The metadata info value may be the string "true", or a string of
# the form "hashname=hashval"
- metadata_info = anchor_attribs.get("data-dist-info-metadata")
if metadata_info == "true":
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
diff --git a/tests/unit/test_collector.py b/tests/unit/test_collector.py
index d1e68fab76f..5410a4afc03 100644
--- a/tests/unit/test_collector.py
+++ b/tests/unit/test_collector.py
@@ -486,7 +486,15 @@ def test_parse_links_json() -> None:
"requires-python": ">=3.7",
"dist-info-metadata": False,
},
- # Same as above, but parsing dist-info-metadata.
+ # Same as above, but parsing core-metadata.
+ {
+ "filename": "holygrail-1.0-py3-none-any.whl",
+ "url": "/files/holygrail-1.0-py3-none-any.whl",
+ "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"},
+ "requires-python": ">=3.7",
+ "core-metadata": {"sha512": "aabdd41"},
+ },
+ # Ensure fallback to dist-info-metadata works
{
"filename": "holygrail-1.0-py3-none-any.whl",
"url": "/files/holygrail-1.0-py3-none-any.whl",
@@ -494,6 +502,15 @@ def test_parse_links_json() -> None:
"requires-python": ">=3.7",
"dist-info-metadata": {"sha512": "aabdd41"},
},
+ # Ensure that core-metadata gets priority.
+ {
+ "filename": "holygrail-1.0-py3-none-any.whl",
+ "url": "/files/holygrail-1.0-py3-none-any.whl",
+ "hashes": {"sha256": "sha256 hash", "blake2b": "blake2b hash"},
+ "requires-python": ">=3.7",
+ "core-metadata": {"sha512": "aabdd41"},
+ "dist-info-metadata": {"sha512": "this_is_wrong"},
+ },
],
}
).encode("utf8")
@@ -530,6 +547,22 @@ def test_parse_links_json() -> None:
hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"},
metadata_file_data=MetadataFile({"sha512": "aabdd41"}),
),
+ Link(
+ "https://example.com/files/holygrail-1.0-py3-none-any.whl",
+ comes_from=page.url,
+ requires_python=">=3.7",
+ yanked_reason=None,
+ hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"},
+ metadata_file_data=MetadataFile({"sha512": "aabdd41"}),
+ ),
+ Link(
+ "https://example.com/files/holygrail-1.0-py3-none-any.whl",
+ comes_from=page.url,
+ requires_python=">=3.7",
+ yanked_reason=None,
+ hashes={"sha256": "sha256 hash", "blake2b": "blake2b hash"},
+ metadata_file_data=MetadataFile({"sha512": "aabdd41"}),
+ ),
]
# Ensure the metadata info can be parsed into the correct link.
@@ -586,22 +619,34 @@ def test_parse_links__yanked_reason(anchor_html: str, expected: Optional[str]) -
),
# Test with value "true".
(
- '',
+ '',
MetadataFile(None),
{},
),
# Test with a provided hash value.
(
- '', # noqa: E501
+ '', # noqa: E501
MetadataFile({"sha256": "aa113592bbe"}),
{},
),
# Test with a provided hash value for both the requirement as well as metadata.
(
- '', # noqa: E501
+ '', # noqa: E501
MetadataFile({"sha256": "aa113592bbe"}),
{"sha512": "abc132409cb"},
),
+ # Ensure the fallback to the old name works.
+ (
+ '', # noqa: E501
+ MetadataFile({"sha256": "aa113592bbe"}),
+ {},
+ ),
+ # Ensure that the data-core-metadata name gets priority.
+ (
+ '', # noqa: E501
+ MetadataFile({"sha256": "aa113592bbe"}),
+ {},
+ ),
],
)
def test_parse_links__metadata_file_data(
From a0976d8832f52c2f14472f7b20b1cf1776a63ac8 Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Tue, 27 Jun 2023 14:47:09 +0100
Subject: [PATCH 8/9] Fix lint issues
---
tests/unit/test_network_auth.py | 14 +++++++-------
1 file changed, 7 insertions(+), 7 deletions(-)
diff --git a/tests/unit/test_network_auth.py b/tests/unit/test_network_auth.py
index 5dde6da57c5..e3cb772bb05 100644
--- a/tests/unit/test_network_auth.py
+++ b/tests/unit/test_network_auth.py
@@ -193,7 +193,7 @@ def test_keyring_get_password(
expect: Tuple[Optional[str], Optional[str]],
) -> None:
keyring = KeyringModuleV1()
- monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", keyring)
auth = MultiDomainBasicAuth(
index_urls=["http://example.com/path2", "http://example.com/path3"],
keyring_provider="import",
@@ -205,7 +205,7 @@ def test_keyring_get_password(
def test_keyring_get_password_after_prompt(monkeypatch: pytest.MonkeyPatch) -> None:
keyring = KeyringModuleV1()
- monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", keyring)
auth = MultiDomainBasicAuth(keyring_provider="import")
def ask_input(prompt: str) -> str:
@@ -221,7 +221,7 @@ def test_keyring_get_password_after_prompt_when_none(
monkeypatch: pytest.MonkeyPatch,
) -> None:
keyring = KeyringModuleV1()
- monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", keyring)
auth = MultiDomainBasicAuth(keyring_provider="import")
def ask_input(prompt: str) -> str:
@@ -242,7 +242,7 @@ def test_keyring_get_password_username_in_index(
monkeypatch: pytest.MonkeyPatch,
) -> None:
keyring = KeyringModuleV1()
- monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", keyring)
auth = MultiDomainBasicAuth(
index_urls=["http://user@example.com/path2", "http://example.com/path4"],
keyring_provider="import",
@@ -278,7 +278,7 @@ def test_keyring_set_password(
expect_save: bool,
) -> None:
keyring = KeyringModuleV1()
- monkeypatch.setitem(sys.modules, "keyring", keyring) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", keyring)
auth = MultiDomainBasicAuth(prompting=True, keyring_provider="import")
monkeypatch.setattr(auth, "_get_url_and_credentials", lambda u: (u, None, None))
monkeypatch.setattr(auth, "_prompt_for_password", lambda *a: creds)
@@ -354,7 +354,7 @@ def get_credential(self, system: str, username: str) -> Optional[Credential]:
def test_keyring_get_credential(
monkeypatch: pytest.MonkeyPatch, url: str, expect: str
) -> None:
- monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2()) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", KeyringModuleV2())
auth = MultiDomainBasicAuth(
index_urls=["http://example.com/path1", "http://example.com/path2"],
keyring_provider="import",
@@ -378,7 +378,7 @@ def get_credential(self, system: str, username: str) -> None:
def test_broken_keyring_disables_keyring(monkeypatch: pytest.MonkeyPatch) -> None:
keyring_broken = KeyringModuleBroken()
- monkeypatch.setitem(sys.modules, "keyring", keyring_broken) # type: ignore[misc]
+ monkeypatch.setitem(sys.modules, "keyring", keyring_broken)
auth = MultiDomainBasicAuth(
index_urls=["http://example.com/"], keyring_provider="import"
From c7daa07f6a65c73173f623c1be34ed2956628715 Mon Sep 17 00:00:00 2001
From: Paul Moore
Date: Tue, 27 Jun 2023 14:47:39 +0100
Subject: [PATCH 9/9] Reword the check for no hashes
---
src/pip/_internal/models/link.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py
index 3cfc3e8c4fe..4453519ad02 100644
--- a/src/pip/_internal/models/link.py
+++ b/src/pip/_internal/models/link.py
@@ -112,9 +112,9 @@ def supported_hashes(hashes: Optional[Dict[str, str]]) -> Optional[Dict[str, str
if hashes is None:
return None
hashes = {n: v for n, v in hashes.items() if n in _SUPPORTED_HASHES}
- if len(hashes) > 0:
- return hashes
- return None
+ if not hashes:
+ return None
+ return hashes
def _clean_url_path_part(part: str) -> str: