diff --git a/eng/tox/install_depend_packages.py b/eng/tox/install_depend_packages.py index d4831a0b9350..8b5c946e3a8b 100644 --- a/eng/tox/install_depend_packages.py +++ b/eng/tox/install_depend_packages.py @@ -5,21 +5,21 @@ # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- - import argparse import os import sys import logging import re + from subprocess import check_call -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Callable, Optional from pkg_resources import parse_version, Requirement from pypi_tools.pypi import PyPIClient from packaging.specifiers import SpecifierSet -from packaging.version import Version, parse +from packaging.version import Version from ci_tools.parsing import ParsedSetup, parse_require -from ci_tools.functions import compare_python_version +from ci_tools.functions import compare_python_version, handle_incompatible_minimum_dev_reqs from typing import List @@ -59,7 +59,7 @@ "azure-eventhub-checkpointstoretable": {"azure-core": "1.25.0", "azure-eventhub": "5.11.0"}, "azure-identity": {"msal": "1.23.0"}, "azure-core-tracing-opentelemetry": {"azure-core": "1.28.0"}, - "azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"} + "azure-storage-file-datalake": {"azure-storage-blob": "12.22.0"}, } MAXIMUM_VERSION_SPECIFIC_OVERRIDES = {} @@ -67,12 +67,7 @@ # PLATFORM SPECIFIC OVERRIDES provide additional generic (EG not tied to the package whos dependencies are being processed) # filtering on a _per platform_ basis. Primarily used to limit certain packages due to platform compatbility PLATFORM_SPECIFIC_MINIMUM_OVERRIDES = { - ">=3.12.0": { - "azure-core": "1.23.1", - "aiohttp": "3.8.6", - "six": "1.16.0", - "requests": "2.30.0" - } + ">=3.12.0": {"azure-core": "1.23.1", "aiohttp": "3.8.6", "six": "1.16.0", "requests": "2.30.0"} } PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES = {} @@ -101,8 +96,15 @@ def install_dependent_packages(setup_py_file_path, dependency_type, temp_dir): override_added_packages.extend(check_pkg_against_overrides(pkg_spec)) logging.info("%s released packages: %s", dependency_type, released_packages) - # filter released packages from dev_requirements and create a new file "new_dev_requirements.txt" - dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, dependency_type) + + additional_filter_fn = None + if dependency_type == "Minimum": + additional_filter_fn = handle_incompatible_minimum_dev_reqs + + # before september 2024, filter_dev_requirements only would remove any packages present in released_packages from the dev_requirements, + # then create a new file "new_dev_requirements.txt" without the problematic packages. + # after september 2024, filter_dev_requirements will also check for **compatibility** with the packages being installed when filtering the dev_requirements. + dev_req_file_path = filter_dev_requirements(setup_py_file_path, released_packages, temp_dir, additional_filter_fn) if override_added_packages: logging.info(f"Expanding the requirement set by the packages {override_added_packages}.") @@ -157,6 +159,7 @@ def find_released_packages(setup_py_path, dependency_type): return avlble_packages + def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: List[str]) -> List[str]: """ Processes a target package based on an originating package (target is a dep of originating) and the versions available from pypi for the target package. @@ -180,9 +183,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: restrictions = PLATFORM_SPECIFIC_MINIMUM_OVERRIDES[platform_bound] if pkg_name in restrictions: - versions = [ - v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name]) - ] + versions = [v for v in versions if parse_version(v) >= parse_version(restrictions[pkg_name])] # lower bound package-specific if ( @@ -207,9 +208,7 @@ def process_bounded_versions(originating_pkg_name: str, pkg_name: str, versions: restrictions = PLATFORM_SPECIFIC_MAXIMUM_OVERRIDES[platform_bound] if pkg_name in restrictions: - versions = [ - v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name]) - ] + versions = [v for v in versions if parse_version(v) <= parse_version(restrictions[pkg_name])] # upper bound package-specific if ( @@ -249,7 +248,6 @@ def process_requirement(req, dependency_type, orig_pkg_name): # think of the various versions that come back from pypi as the top of a funnel # We apply generic overrides -> platform specific overrides -> package specific overrides - versions = process_bounded_versions(orig_pkg_name, pkg_name, versions) # Search from lowest to latest in case of finding minimum dependency @@ -301,17 +299,20 @@ def check_req_against_exclusion(req, req_to_exclude): return req_id == req_to_exclude -# todo: remove when merging #37450 -def replace_identity(dev_requirement_line) -> str: - regex = r"azure[-_]identity" - - if re.search(regex, dev_requirement_line): - return "azure-identity==1.17.0\n" - else: - return dev_requirement_line +def filter_dev_requirements( + setup_py_path, + released_packages, + temp_dir, + additional_filter_fn: Optional[Callable[[str, List[str], List[Requirement]], List[str]]] = None, +): + """ + This function takes an existing package path, a list of specific package specifiers that we have resolved, a temporary directory to write + the modified dev_requirements to, and an optional additional_filter_fn that can be used to further filter the dev_requirements file if necessary. -def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependency_type): + The function will filter out any requirements present in the dev_requirements file that are present in the released_packages list (aka are required + by the package). + """ # This method returns list of requirements from dev_requirements by filtering out packages in given list dev_req_path = os.path.join(os.path.dirname(setup_py_path), DEV_REQ_FILE) requirements = [] @@ -320,12 +321,13 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen # filter out any package available on PyPI (released_packages) # include packages without relative reference and packages not available on PyPI - released_packages = [p.split("==")[0] for p in released_packages] + released_packages = [parse_require(p) for p in released_packages] + released_package_names = [p.key for p in released_packages] # find prebuilt whl paths in dev requiremente prebuilt_dev_reqs = [os.path.basename(req.replace("\n", "")) for req in requirements if os.path.sep in req] # filter any req if wheel is for a released package - req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_packages] - req_to_exclude.extend(released_packages) + req_to_exclude = [req for req in prebuilt_dev_reqs if req.split("-")[0].replace("_", "-") in released_package_names] + req_to_exclude.extend(released_package_names) filtered_req = [ req @@ -334,9 +336,9 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen and not any([check_req_against_exclusion(req, i) for i in req_to_exclude]) ] - if dependency_type == "Minimum": - # replace identity with the minimum version of the package - filtered_req = [replace_identity(req) for req in filtered_req] + if additional_filter_fn: + # this filter function handles the case where a dev requirement is incompatible with the current set of targeted packages + filtered_req = additional_filter_fn(setup_py_path, filtered_req, released_packages) logging.info("Filtered dev requirements: %s", filtered_req) @@ -345,7 +347,7 @@ def filter_dev_requirements(setup_py_path, released_packages, temp_dir, dependen # create new dev requirements file with different name for filtered requirements new_dev_req_path = os.path.join(temp_dir, NEW_DEV_REQ_FILE) with open(new_dev_req_path, "w") as dev_req_file: - dev_req_file.writelines(filtered_req) + dev_req_file.writelines(line if line.endswith("\n") else line + "\n" for line in filtered_req) return new_dev_req_path diff --git a/scripts/devops_tasks/common_tasks.py b/scripts/devops_tasks/common_tasks.py index ab6737362c8b..864bcaccc39f 100644 --- a/scripts/devops_tasks/common_tasks.py +++ b/scripts/devops_tasks/common_tasks.py @@ -205,7 +205,7 @@ def is_required_version_on_pypi(package_name: str, spec: str) -> bool: versions = [str(v) for v in versions if v in specifier] except: logging.error("Package {} is not found on PyPI".format(package_name)) - return versions + return bool(versions) def find_packages_missing_on_pypi(path: str) -> Iterable[str]: diff --git a/sdk/identity/azure-identity-broker/pyproject.toml b/sdk/identity/azure-identity-broker/pyproject.toml index cc83baa914bb..ea31fd0986d0 100644 --- a/sdk/identity/azure-identity-broker/pyproject.toml +++ b/sdk/identity/azure-identity-broker/pyproject.toml @@ -1,4 +1,3 @@ [tool.azure-sdk-build] type_check_samples = false -pyright = false -mindependency = false \ No newline at end of file +pyright = false \ No newline at end of file diff --git a/tools/azure-sdk-tools/ci_tools/functions.py b/tools/azure-sdk-tools/ci_tools/functions.py index 882284e6d6a9..c02929bfe743 100644 --- a/tools/azure-sdk-tools/ci_tools/functions.py +++ b/tools/azure-sdk-tools/ci_tools/functions.py @@ -14,7 +14,7 @@ from pypi_tools.pypi import PyPIClient import os, sys, platform, glob, re, logging -from typing import List, Any +from typing import List, Any, Optional INACTIVE_CLASSIFIER = "Development Status :: 7 - Inactive" @@ -192,9 +192,13 @@ def discover_targeted_packages( :param str glob_string: The basic glob used to query packages within the repo. Defaults to "azure-*" :param str target_root_dir: The root directory in which globbing will begin. - :param str additional_contains_filter: Additional filter option. Used when needing to provide one-off filtration that doesn't merit an additional filter_type. Defaults to empty string. - :param str filter_type: One a string representing a filter function as a set of options. Options [ "Build", "Docs", "Regression", "Omit_management" ] Defaults to "Build". - :param bool compatibility_filter: Enables or disables compatibility filtering of found packages. If the invoking python executable does not match a found package's specifiers, the package will be omitted. Defaults to True. + :param str additional_contains_filter: Additional filter option. + Used when needing to provide one-off filtration that doesn't merit an additional filter_type. Defaults to empty string. + :param str filter_type: One a string representing a filter function as a set of options. + Options [ "Build", "Docs", "Regression", "Omit_management" ] Defaults to "Build". + :param bool compatibility_filter: Enables or disables compatibility filtering of found packages. + If the invoking python executable does not match a found package's specifiers, the package will be omitted. + Defaults to True. """ # glob the starting package set @@ -269,7 +273,7 @@ def is_required_version_on_pypi(package_name, spec): return versions -def get_package_from_repo(pkg_name: str, repo_root: str = None) -> ParsedSetup: +def get_package_from_repo(pkg_name: str, repo_root: Optional[str] = None) -> Optional[ParsedSetup]: root_dir = discover_repo_root(repo_root) glob_path = os.path.join(root_dir, "sdk", "*", pkg_name, "setup.py") @@ -283,7 +287,7 @@ def get_package_from_repo(pkg_name: str, repo_root: str = None) -> ParsedSetup: return None -def get_package_from_repo_or_folder(req: str, prebuilt_wheel_dir: str = None) -> str: +def get_package_from_repo_or_folder(req: str, prebuilt_wheel_dir: Optional[str] = None) -> Optional[str]: """Takes a package name and a possible prebuilt wheel directory. Attempts to resolve a wheel that matches the package name, and if it can't, attempts to find the package within the repo to install directly from path on disk. @@ -293,7 +297,7 @@ def get_package_from_repo_or_folder(req: str, prebuilt_wheel_dir: str = None) -> local_package = get_package_from_repo(req) - if prebuilt_wheel_dir and os.path.exists(prebuilt_wheel_dir): + if prebuilt_wheel_dir and os.path.exists(prebuilt_wheel_dir) and local_package: prebuilt_package = discover_prebuilt_package(prebuilt_wheel_dir, local_package.setup_filename, "wheel") if prebuilt_package: # return the first package found, there should only be a single one matching given that our prebuilt wheel directory @@ -301,10 +305,13 @@ def get_package_from_repo_or_folder(req: str, prebuilt_wheel_dir: str = None) -> # ref tox_harness replace_dev_reqs() calls return os.path.join(prebuilt_wheel_dir, prebuilt_package[0]) - return local_package.folder + if local_package: + return local_package.folder + else: + return None -def get_version_from_repo(pkg_name: str, repo_root: str = None) -> str: +def get_version_from_repo(pkg_name: str, repo_root: Optional[str] = None) -> str: pkg_info = get_package_from_repo(pkg_name, repo_root) if pkg_info: # Remove dev build part if version for this package is already updated to dev build @@ -387,7 +394,7 @@ def process_requires(setup_py_path: str, is_dev_build: bool = False): logging.info("Package requirement is updated in setup.py") -def find_sdist(dist_dir: str, pkg_name: str, pkg_version: str) -> str: +def find_sdist(dist_dir: str, pkg_name: str, pkg_version: str) -> Optional[str]: """This function attempts to look within a directory (and all subdirs therein) and find a source distribution for the targeted package and version.""" # This function will find a sdist for given package name if not os.path.exists(dist_dir): @@ -416,7 +423,9 @@ def find_sdist(dist_dir: str, pkg_name: str, pkg_version: str) -> str: return packages[0] -def pip_install(requirements: List[str], include_dependencies: bool = True, python_executable: str = None) -> bool: +def pip_install( + requirements: List[str], include_dependencies: bool = True, python_executable: Optional[str] = None +) -> bool: """ Attempts to invoke an install operation using the invoking python's pip. Empty requirements are auto-success. """ @@ -457,11 +466,11 @@ def pip_uninstall(requirements: List[str], python_executable: str) -> bool: return False -def pip_install_requirements_file(requirements_file: str, python_executable: str = None) -> bool: +def pip_install_requirements_file(requirements_file: str, python_executable: Optional[str] = None) -> bool: return pip_install(["-r", requirements_file], True, python_executable) -def get_pip_list_output(python_executable: str = None): +def get_pip_list_output(python_executable: Optional[str] = None): """Uses the invoking python executable to get the output from pip list.""" exe = python_executable or sys.executable @@ -487,7 +496,7 @@ def get_pip_list_output(python_executable: str = None): return collected_output -def pytest(args: [], cwd: str = None, python_executable: str = None) -> bool: +def pytest(args: list, cwd: Optional[str] = None, python_executable: Optional[str] = None) -> bool: """ Invokes a set of tests, returns true if successful, false otherwise. """ @@ -526,6 +535,7 @@ def get_interpreter_compatible_tags() -> List[str]: tag_strings = output.split(os.linesep) + index = 0 for index, value in enumerate(tag_strings): if "Compatible tags" in value: break @@ -542,7 +552,7 @@ def check_whl_against_tags(whl_name: str, tags: List[str]) -> bool: return False -def find_whl(whl_dir: str, pkg_name: str, pkg_version: str) -> str: +def find_whl(whl_dir: str, pkg_name: str, pkg_version: str) -> Optional[str]: """This function attempts to look within a directory (and all subdirs therein) and find a wheel that matches our targeted name and version AND whose compilation is compatible with the invoking interpreter.""" if not os.path.exists(whl_dir): @@ -625,3 +635,209 @@ def discover_prebuilt_package(dist_directory: str, setup_path: str, package_type if prebuilt_package is not None: packages.append(prebuilt_package) return packages + + +def is_package_compatible( + package_name: str, + package_requirements: List[Requirement], + immutable_requirements: List[Requirement], + should_log: bool = True, +) -> bool: + """ + This function accepts a set of requirements for a package, and ensures that the package is compatible with the + immutable_requirements. + + It is factored this way because we retrieve requirements differently according to the source of the package. + If published, we can get requires() from PyPI + If locally built wheel, we can get requires() from the metadata of the package + If local relative requirement, we can get requires() from a ParsedSetup of the setup.py for the package + + :param List[Requirement] package_requirements: The dependencies of a dev_requirement file. This is the set of + requirements that we are checking compatibility for. + :param List[Requirement] immutable_requirements: A list of requirements that the other packages must be compatible + with. + """ + + for immutable_requirement in immutable_requirements: + for package_requirement in package_requirements: + if package_requirement.key == immutable_requirement.key: + # if the dev_req line has a requirement that conflicts with the immutable requirement, + # we need to resolve it. We KNOW that the immutable requirement will be of form package==version, + # so we can reliably pull out the version and check it against the specifier of the dev_req line. + try: + immutable_version = next(iter(immutable_requirement.specifier)).version + # we have no specifier set, so we don't even need to check this + except StopIteration: + continue + + if not package_requirement.specifier.contains(immutable_version): + if should_log: + logging.info( + f"Dev req dependency {package_name}'s requirement specifier of {package_requirement}" + f"is not compatible with immutable requirement {immutable_requirement}." + ) + return False + + return True + + +def resolve_compatible_package(package_name: str, immutable_requirements: List[Requirement]) -> Optional[str]: + """ + This function attempts to resolve a compatible package version for whatever set of immutable_requirements that + the package must be compatible with. + + It should only be utilized when a package is found to be incompatible with the immutable_requirements. + It will attempt to resolve the incompatibility by walking backwards through different versions of + until a compatible version is found that works with the immutable_requirements. + """ + + pypi = PyPIClient() + immovable_pkgs = {req.key: req for req in immutable_requirements} + + # Let's use a real use-case to walk through this function. We're going to use the azure-ai-language-conversations + # package as an example. + + # immovable_pkgs = the selected mindependency for azure-ai-language-conversations + # -> "azure-core==1.28.0", + # -> "isodate==0.6.1", + # -> "typing-extensions==4.0.1", + # we have the following dev_reqs for azure-ai-language-conversations + # -> ../azure-sdk-tools + # -> ../azure-identity + # -> ../azure-core + + # as we walk each of the dev reqs, we check for compatibility with the immovable_packages. + # (this happens in is_package_compatible) if the dev req is incompatible, we need to resolve it. + # THIS function is what resolves it! + + # since we already know that package_name is incompatible with the immovable_pkgs, we need to walk backwards + # through the versions of package_name checking to ensure that each version is compatible with the immovable_pkgs. + # if we find one that is, we will return a new requirement string for that package which will replace this dev_req line. + for pkg in immovable_pkgs: + required_package = immovable_pkgs[pkg].name + try: + required_pkg_version = next(iter(immovable_pkgs[pkg].specifier)).version + except StopIteration: + required_pkg_version = None + + versions = pypi.get_ordered_versions(package_name, True) + versions.reverse() + + # only allow prerelease versions if the dev_req we're targeting is also prerelease + if required_pkg_version: + if not Version(required_pkg_version).is_prerelease: + versions = [v for v in versions if not v.is_prerelease] + + for version in versions: + version_release = pypi.project_release(package_name, version).get("info", {}).get("requires_dist", []) + + if version_release: + requirements_for_dev_req = [Requirement(r) for r in version_release] + + compatible = is_package_compatible( + required_package, requirements_for_dev_req, immutable_requirements, should_log=False + ) + if compatible: + # we have found a compatible version. We can return this as the new requirement line for the dev_req file. + return f"{package_name}=={version}" + + # no changes necessary + return None + + +def handle_incompatible_minimum_dev_reqs( + setup_path: str, filtered_requirement_list: List[str], packages_for_install: List[Requirement] +) -> List[str]: + """ + This function is used to handle the case where a dev requirement is incompatible with the current set of packages + being installed. This is used to update or remove dev_requirements that are incompatible with a targeted set of + packages. + + :param str setup_path: The path to the setup.py file whose dev_requirements are being filtered. + + :param List[str] filtered_requirement_list: A filtered copy of the dev_requirements.txt for the targeted package. + This list will be + modified in place to remove any requirements incompatible with the packages_for_install. + + :param List[Requirement] packages_for_install: A list of packages that dev_requirements MUST be compatible with. + """ + + cleansed_reqs = [] + + for dev_requirement_line in filtered_requirement_list: + cleansed_dev_requirement_line = dev_requirement_line.strip().replace("-e ", "").split("#")[0].split(";")[0] + + if cleansed_dev_requirement_line: + dev_req_package = None + dev_req_version = None + requirements_for_dev_req = [] + + # this is a locally built wheel file, ise pkginfo to get the metadata + if os.path.exists(cleansed_dev_requirement_line) and os.path.isfile(cleansed_dev_requirement_line): + logging.info( + f"We are processing a replaced relative requirement built into a wheel: {cleansed_dev_requirement_line}" + ) + import pkginfo + + try: + local_package_metadata = pkginfo.get_metadata(cleansed_dev_requirement_line) + if local_package_metadata: + dev_req_package = local_package_metadata.name + dev_req_version = local_package_metadata.version + requirements_for_dev_req = [Requirement(r) for r in local_package_metadata.requires_dist] + else: + logging.error( + f"Error while processing locally built requirement {cleansed_dev_requirement_line}. Unable to resolve metadata." + ) + cleansed_reqs.append(cleansed_dev_requirement_line) + except Exception as e: + logging.error( + f"Unable to determine metadata for locally built requirement {cleansed_dev_requirement_line}: {e}" + ) + cleansed_reqs.append(cleansed_dev_requirement_line) + continue + + # this is a relative requirement to a package path in the repo, use our ParsedSetup class to get data from setup.py or pyproject.toml + elif cleansed_dev_requirement_line.startswith("."): + logging.info(f"We are processing a relative requirement: {cleansed_dev_requirement_line}") + try: + local_package = ParsedSetup.from_path(os.path.join(setup_path, cleansed_dev_requirement_line)) + + if local_package: + dev_req_package = local_package.name + dev_req_version = local_package.version + requirements_for_dev_req = [Requirement(r) for r in local_package.requires] + else: + logging.error( + f"Error while processing relative requirement {cleansed_dev_requirement_line}. Unable to resolve metadata." + ) + cleansed_reqs.append(cleansed_dev_requirement_line) + + except Exception as e: + logging.error( + f'Unable to determine metadata for relative requirement "{cleansed_dev_requirement_line}", not modifying: {e}' + ) + cleansed_reqs.append(cleansed_dev_requirement_line) + continue + # If we got here, this has to be a standard requirement, attempt to parse it as a specifier and if unable to do so, + # simply add it to the list as a last fallback. we will log so that we can implement a fix for the edge case later. + else: + logging.info(f"We are processing a standard requirement: {cleansed_dev_requirement_line}") + cleansed_reqs.append(dev_requirement_line) + + # we understand how to parse it, so we should handle it + if dev_req_package: + if not is_package_compatible(dev_req_package, requirements_for_dev_req, packages_for_install): + new_req = resolve_compatible_package(dev_req_package, packages_for_install) + + if new_req: + cleansed_reqs.append(new_req) + else: + logging.error( + f'Found incompatible dev requirement {dev_req_package}, but unable to locate a compatible version. Not modifying the line: "{dev_requirement_line}".' + ) + cleansed_reqs.append(cleansed_dev_requirement_line) + else: + cleansed_reqs.append(cleansed_dev_requirement_line) + + return cleansed_reqs diff --git a/tools/azure-sdk-tools/ci_tools/variables.py b/tools/azure-sdk-tools/ci_tools/variables.py index d74417ad8f7d..0b84dae8b2b8 100644 --- a/tools/azure-sdk-tools/ci_tools/variables.py +++ b/tools/azure-sdk-tools/ci_tools/variables.py @@ -1,5 +1,5 @@ import os - +from typing import Optional def str_to_bool(input_string: str) -> bool: """ @@ -15,7 +15,7 @@ def str_to_bool(input_string: str) -> bool: return False -def discover_repo_root(input_repo: str = None): +def discover_repo_root(input_repo: Optional[str] = None): """ Resolves the root of the repository given a current working directory. This function should be used if a target repo argument is not provided. If the value of input_repo has value, that will supplant the path ascension logic. @@ -38,7 +38,7 @@ def discover_repo_root(input_repo: str = None): ) -def get_artifact_directory(input_directory: str = None) -> str: +def get_artifact_directory(input_directory: Optional[str] = None) -> str: """ Resolves the root of an artifact directory that the \"sdk_build\" action will output to! """ @@ -49,7 +49,7 @@ def get_artifact_directory(input_directory: str = None) -> str: return os.getenv("SDK_ARTIFACT_DIRECTORY", os.path.join(discover_repo_root(), ".artifacts")) -def get_log_directory(input_directory: str = None) -> str: +def get_log_directory(input_directory: Optional[str] = None) -> str: """ Resolves the location of the log directory. """ diff --git a/tools/azure-sdk-tools/pypi_tools/pypi.py b/tools/azure-sdk-tools/pypi_tools/pypi.py index a4a4dcb18aef..b68e5daae8d1 100644 --- a/tools/azure-sdk-tools/pypi_tools/pypi.py +++ b/tools/azure-sdk-tools/pypi_tools/pypi.py @@ -1,5 +1,5 @@ import logging -from packaging.version import InvalidVersion, parse as Version +from packaging.version import InvalidVersion, Version, parse import sys import pdb from urllib3 import Retry, PoolManager @@ -41,13 +41,13 @@ def filter_packages_for_compatibility(self, package_name, version_set): # only need the packaging.specifiers import if we're actually executing this filter. from packaging.specifiers import InvalidSpecifier, SpecifierSet - results = [] + results: List[Version] = [] for version in version_set: requires_python = self.project_release(package_name, version)["info"]["requires_python"] if requires_python: try: - if Version(".".join(map(str, sys.version_info[:3]))) in SpecifierSet(requires_python): + if parse(".".join(map(str, sys.version_info[:3]))) in SpecifierSet(requires_python): results.append(version) except InvalidSpecifier: logging.warn(f"Invalid python_requires {requires_python!r} for package {package_name}=={version}") @@ -60,10 +60,10 @@ def filter_packages_for_compatibility(self, package_name, version_set): def get_ordered_versions(self, package_name, filter_by_compatibility=False) -> List[Version]: project = self.project(package_name) - versions = [] + versions: List[Version] = [] for package_version in project["releases"].keys(): try: - versions.append(Version(package_version)) + versions.append(parse(package_version)) except InvalidVersion as e: logging.warn(f"Invalid version {package_version} for package {package_name}") continue diff --git a/tools/azure-sdk-tools/setup.py b/tools/azure-sdk-tools/setup.py index cb40aa24f1e2..3d4a803ad1e8 100644 --- a/tools/azure-sdk-tools/setup.py +++ b/tools/azure-sdk-tools/setup.py @@ -57,7 +57,7 @@ extras_require={ ":python_version>='3.5'": ["pytest-asyncio>=0.9.0"], ":python_version<'3.11'": ["tomli==2.0.1"], - "build": ["six", "setuptools", "pyparsing", "certifi", "cibuildwheel"], + "build": ["six", "setuptools", "pyparsing", "certifi", "cibuildwheel", "pkginfo"], "conda": ["beautifulsoup4"], "systemperf": ["aiohttp>=3.0", "requests>=2.0", "tornado==6.0.3", "httpx>=0.21", "azure-core"], "ghtools": ["GitPython", "PyGithub>=1.59.0", "requests>=2.0"], diff --git a/tools/azure-sdk-tools/tests/test_conflict_resolution.py b/tools/azure-sdk-tools/tests/test_conflict_resolution.py new file mode 100644 index 000000000000..9884f338d896 --- /dev/null +++ b/tools/azure-sdk-tools/tests/test_conflict_resolution.py @@ -0,0 +1,34 @@ +import pytest + +from unittest.mock import patch +from tempfile import TemporaryDirectory +from ci_tools.functions import resolve_compatible_package, is_package_compatible +from typing import Optional, List +from packaging.version import Version +from pkg_resources import Requirement + + +@pytest.mark.parametrize( + "fake_package_input_requirements, immutable_requirements, expected_result", + [([Requirement("sphinx==1.0.0")], [Requirement("sphinx>=1.0.0")], True), + ([Requirement("sphinx==1.0.0")], [Requirement("sphinx>=1.1.0")], False)], +) +def test_incompatible_specifier(fake_package_input_requirements, immutable_requirements, expected_result): + result = is_package_compatible("fake-package", fake_package_input_requirements, immutable_requirements) + assert result == expected_result + + +def test_identity_resolution(): + result = resolve_compatible_package( + "azure-identity", + [Requirement("azure-core>=1.28.0"), Requirement("isodate>=0.6.1"), Requirement("typing-extensions>=4.0.1")], + ) + assert result == "azure-identity==1.16.0" + + +def test_resolution_no_requirement(): + result = resolve_compatible_package( + "azure-identity", + [Requirement("azure-core")], + ) + assert result == "azure-identity==1.18.0"