Skip to content

Commit 71e44d4

Browse files
scbeddmccoyp
authored andcommitted
Improvements to mindependency dev_requirement conflict resolution (Azure#37669)
* during mindependency runs, dev_requirements on local relative paths are now checked for conflict with the targeted set of minimum dependencies * multiple type clarifications within azure-sdk-tools * added tests for new conflict resolution logic --------- Co-authored-by: McCoy Patiño <[email protected]>
1 parent 22f081c commit 71e44d4

File tree

8 files changed

+315
-64
lines changed

8 files changed

+315
-64
lines changed

eng/tox/install_depend_packages.py

Lines changed: 38 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -5,21 +5,21 @@
55
# Licensed under the MIT License. See License.txt in the project root for license information.
66
# --------------------------------------------------------------------------------------------
77

8-
98
import argparse
109
import os
1110
import sys
1211
import logging
1312
import re
13+
1414
from subprocess import check_call
15-
from typing import TYPE_CHECKING
15+
from typing import TYPE_CHECKING, Callable, Optional
1616
from pkg_resources import parse_version, Requirement
1717
from pypi_tools.pypi import PyPIClient
1818
from packaging.specifiers import SpecifierSet
19-
from packaging.version import Version, parse
19+
from packaging.version import Version
2020

2121
from ci_tools.parsing import ParsedSetup, parse_require
22-
from ci_tools.functions import compare_python_version
22+
from ci_tools.functions import compare_python_version, handle_incompatible_minimum_dev_reqs
2323

2424
from typing import List
2525

@@ -59,20 +59,15 @@
5959
"azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"},
6060
"azure-identity": {"msal": "1.23.0"},
6161
"azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"},
62-
"azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}
62+
"azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"},
6363
}
6464

6565
MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {}
6666

6767
# PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whos dependencies are being processed)
6868
# filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compatbility
6969
PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = {
70-
">=3.12.0": {
71-
"azure-core": "1.23.1",
72-
"aiohttp": "3.8.6",
73-
"six": "1.16.0",
74-
"requests": "2.30.0"
75-
}
70+
">=3.12.0": {"azure-core": "1.23.1", "aiohttp": "3.8.6", "six": "1.16.0", "requests": "2.30.0"}
7671
}
7772

7873
PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {}
@@ -101,8 +96,15 @@ def install_dependent_packages(setup_py_file_path, dependency_type, temp_dir):
10196
override_added_packages.extend(check_pkg_against_overrides(pkg_spec))
10297

10398
logging.info("%s released packages: %s", dependency_type, released_packages)
104-
# filter released packages from dev_requirements and create a new file "new_dev_requirements.txt"
105-
dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, dependency_type)
99+
100+
additional_filter_fn = None
101+
if dependency_type == "Minimum":
102+
additional_filter_fn = handle_incompatible_minimum_dev_reqs
103+
104+
# before september 2024, filter_dev_requirements only would remove any packages present in released_packages from the dev_requirements,
105+
# then create a new file "new_dev_requirements.txt" without the problematic packages.
106+
# after september 2024, filter_dev_requirements will also check for **compatibility** with the packages being installed when filtering the dev_requirements.
107+
dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, additional_filter_fn)
106108

107109
if override_added_packages:
108110
logging.info(f"Expanding the requirement set by the packages {override_added_packages}.")
@@ -157,6 +159,7 @@ def find_released_packages(setup_py_path, dependency_type):
157159

158160
return avlble_packages
159161

162+
160163
def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]:
161164
"""
162165
Processes a target package based on an originating package (target is a dep of originating) and the versions available from pypi for the target package.
@@ -180,9 +183,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
180183
restrictions = PLATFORM_SPECIFIC_MINIMUM_OVERRIDES[platform_bound]
181184

182185
if pkg_name in restrictions:
183-
versions = [
184-
v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])
185-
]
186+
versions = [v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])]
186187

187188
# lower bound package-specific
188189
if (
@@ -207,9 +208,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions:
207208
restrictions = PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES[platform_bound]
208209

209210
if pkg_name in restrictions:
210-
versions = [
211-
v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])
212-
]
211+
versions = [v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])]
213212

214213
# upper bound package-specific
215214
if (
@@ -249,7 +248,6 @@ def process_requirement(req, dependency_type, orig_pkg_name):
249248

250249
# think of the various versions that come back from pypi as the top of a funnel
251250
# We apply generic overrides -> platform specific overrides -> package specific overrides
252-
253251
versions = process_bounded_versions(orig_pkg_name, pkg_name, versions)
254252

255253
# Search from lowest to latest in case of finding minimum dependency
@@ -301,17 +299,20 @@ def check_req_against_exclusion(req, req_to_exclude):
301299

302300
return req_id == req_to_exclude
303301

304-
# todo: remove when merging #37450
305-
def replace_identity(dev_requirement_line) -> str:
306-
regex = r"azure[-_]identity"
307-
308-
if re.search(regex, dev_requirement_line):
309-
return "azure-identity==1.17.0\n"
310-
else:
311-
return dev_requirement_line
312302

303+
def filter_dev_requirements(
304+
setup_py_path,
305+
released_packages,
306+
temp_dir,
307+
additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None,
308+
):
309+
"""
310+
This function takes an existing package path, a list of specific package specifiers that we have resolved, a temporary directory to write
311+
the modified dev_requirements to, and an optional additional_filter_fn that can be used to further filter the dev_requirements file if necessary.
313312
314-
def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependency_type):
313+
The function will filter out any requirements present in the dev_requirements file that are present in the released_packages list (aka are required
314+
by the package).
315+
"""
315316
# This method returns list of requirements from dev_requirements by filtering out packages in given list
316317
dev_req_path = os.path.join(os.path.dirname(setup_py_path), DEV_REQ_FILE)
317318
requirements = []
@@ -320,12 +321,13 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen
320321

321322
# filter out any package available on PyPI (released_packages)
322323
# include packages without relative reference and packages not available on PyPI
323-
released_packages = [p.split("==")[0] for p in released_packages]
324+
released_packages = [parse_require(p) for p in released_packages]
325+
released_package_names = [p.key for p in released_packages]
324326
# find prebuilt whl paths in dev requiremente
325327
prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req]
326328
# filter any req if wheel is for a released package
327-
req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_packages]
328-
req_to_exclude.extend(released_packages)
329+
req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names]
330+
req_to_exclude.extend(released_package_names)
329331

330332
filtered_req = [
331333
req
@@ -334,9 +336,9 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen
334336
and not any([check_req_against_exclusion(req, i) for i in req_to_exclude])
335337
]
336338

337-
if dependency_type == "Minimum":
338-
# replace identity with the minimum version of the package
339-
filtered_req = [replace_identity(req) for req in filtered_req]
339+
if additional_filter_fn:
340+
# this filter function handles the case where a dev requirement is incompatible with the current set of targeted packages
341+
filtered_req = additional_filter_fn(setup_py_path, filtered_req, released_packages)
340342

341343
logging.info("Filtered dev requirements: %s", filtered_req)
342344

@@ -345,7 +347,7 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen
345347
# create new dev requirements file with different name for filtered requirements
346348
new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE)
347349
with open(new_dev_req_path, "w") as dev_req_file:
348-
dev_req_file.writelines(filtered_req)
350+
dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req)
349351

350352
return new_dev_req_path
351353

scripts/devops_tasks/common_tasks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,7 @@ def is_required_version_on_pypi(package_name: str, spec: str) -> bool:
205205
versions = [str(v) for v in versions if v in specifier]
206206
except:
207207
logging.error("Package {} is not found on PyPI".format(package_name))
208-
return versions
208+
return bool(versions)
209209

210210

211211
def find_packages_missing_on_pypi(path: str) -> Iterable[str]:
Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
[tool.azure-sdk-build]
22
type_check_samples = false
3-
pyright = false
4-
mindependency = false
3+
pyright = false

0 commit comments

Comments
 (0)