diff --git a/src/pip/_internal/build_env.py b/src/pip/_internal/build_env.py index f99fdf522f7..e25f22b134a 100644 --- a/src/pip/_internal/build_env.py +++ b/src/pip/_internal/build_env.py @@ -14,14 +14,20 @@ from pip import __file__ as pip_location from pip._internal.utils.misc import call_subprocess from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.ui import open_spinner +if MYPY_CHECK_RUNNING: + from typing import Tuple, Set, Iterable, Optional, List # noqa: F401 + from pip._internal.index import PackageFinder # noqa: F401 + logger = logging.getLogger(__name__) class _Prefix: def __init__(self, path): + # type: (str) -> None self.path = path self.setup = False self.bin_dir = get_paths( @@ -30,8 +36,8 @@ def __init__(self, path): )['scripts'] # Note: prefer distutils' sysconfig to get the # library paths so PyPy is correctly supported. - purelib = get_python_lib(plat_specific=0, prefix=path) - platlib = get_python_lib(plat_specific=1, prefix=path) + purelib = get_python_lib(plat_specific=False, prefix=path) + platlib = get_python_lib(plat_specific=True, prefix=path) if purelib == platlib: self.lib_dirs = [purelib] else: @@ -43,6 +49,7 @@ class BuildEnvironment(object): """ def __init__(self): + # type: () -> None self._temp_dir = TempDirectory(kind="build-env") self._temp_dir.create() @@ -51,8 +58,8 @@ def __init__(self): for name in ('normal', 'overlay') )) - self._bin_dirs = [] - self._lib_dirs = [] + self._bin_dirs = [] # type: List[str] + self._lib_dirs = [] # type: List[str] for prefix in reversed(list(self._prefixes.values())): self._bin_dirs.append(prefix.bin_dir) self._lib_dirs.extend(prefix.lib_dirs) @@ -62,8 +69,8 @@ def __init__(self): # - prevent access to system site packages system_sites = { os.path.normcase(site) for site in ( - get_python_lib(plat_specific=0), - get_python_lib(plat_specific=1), + get_python_lib(plat_specific=False), + get_python_lib(plat_specific=True), ) } self._site_dir = os.path.join(self._temp_dir.path, 'site') @@ -124,9 +131,11 @@ def __exit__(self, exc_type, exc_val, exc_tb): os.environ[varname] = old_value def cleanup(self): + # type: () -> None self._temp_dir.cleanup() def check_requirements(self, reqs): + # type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]] """Return 2 sets: - conflicting requirements: set of (installed, wanted) reqs tuples - missing requirements: set of reqs @@ -144,8 +153,15 @@ def check_requirements(self, reqs): str(e.args[1]))) return conflicting, missing - def install_requirements(self, finder, requirements, prefix, message): - prefix = self._prefixes[prefix] + def install_requirements( + self, + finder, # type: PackageFinder + requirements, # type: Iterable[str] + prefix_as_string, # type: str + message # type: Optional[str] + ): + # type: (...) -> None + prefix = self._prefixes[prefix_as_string] assert not prefix.setup prefix.setup = True if not requirements: @@ -154,7 +170,7 @@ def install_requirements(self, finder, requirements, prefix, message): sys.executable, os.path.dirname(pip_location), 'install', '--ignore-installed', '--no-user', '--prefix', prefix.path, '--no-warn-script-location', - ] + ] # type: List[str] if logger.getEffectiveLevel() <= logging.DEBUG: args.append('-v') for format_control in ('no_binary', 'only_binary'): diff --git a/src/pip/_internal/cli/base_command.py b/src/pip/_internal/cli/base_command.py index 70cc6375f0c..1212cd5f447 100644 --- a/src/pip/_internal/cli/base_command.py +++ b/src/pip/_internal/cli/base_command.py @@ -32,7 +32,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Optional, List, Union, Tuple, Any # noqa: F401 + from typing import Optional, List, Tuple, Any # noqa: F401 from optparse import Values # noqa: F401 from pip._internal.cache import WheelCache # noqa: F401 from pip._internal.req.req_set import RequirementSet # noqa: F401 diff --git a/src/pip/_internal/cli/cmdoptions.py b/src/pip/_internal/cli/cmdoptions.py index d0d5bf91698..563cd58cd31 100644 --- a/src/pip/_internal/cli/cmdoptions.py +++ b/src/pip/_internal/cli/cmdoptions.py @@ -114,7 +114,7 @@ def check_dist_restriction(options, check_target=False): dest='help', action='help', help='Show help.', -) # type: partial[Option] +) # type: Callable[..., Option] isolated_mode = partial( Option, @@ -126,7 +126,7 @@ def check_dist_restriction(options, check_target=False): "Run pip in an isolated mode, ignoring environment variables and user " "configuration." ), -) # type: partial[Option] +) # type: Callable[..., Option] require_virtualenv = partial( Option, @@ -136,7 +136,7 @@ def check_dist_restriction(options, check_target=False): action='store_true', default=False, help=SUPPRESS_HELP -) # type: partial[Option] +) # type: Callable[..., Option] verbose = partial( Option, @@ -145,7 +145,7 @@ def check_dist_restriction(options, check_target=False): action='count', default=0, help='Give more output. Option is additive, and can be used up to 3 times.' -) # type: partial[Option] +) # type: Callable[..., Option] no_color = partial( Option, @@ -154,7 +154,7 @@ def check_dist_restriction(options, check_target=False): action='store_true', default=False, help="Suppress colored output", -) # type: partial[Option] +) # type: Callable[..., Option] version = partial( Option, @@ -162,7 +162,7 @@ def check_dist_restriction(options, check_target=False): dest='version', action='store_true', help='Show version and exit.', -) # type: partial[Option] +) # type: Callable[..., Option] quiet = partial( Option, @@ -175,7 +175,7 @@ def check_dist_restriction(options, check_target=False): ' times (corresponding to WARNING, ERROR, and CRITICAL logging' ' levels).' ), -) # type: partial[Option] +) # type: Callable[..., Option] progress_bar = partial( Option, @@ -188,7 +188,7 @@ def check_dist_restriction(options, check_target=False): 'Specify type of progress to be displayed [' + '|'.join(BAR_TYPES.keys()) + '] (default: %default)' ), -) # type: partial[Option] +) # type: Callable[..., Option] log = partial( Option, @@ -196,7 +196,7 @@ def check_dist_restriction(options, check_target=False): dest="log", metavar="path", help="Path to a verbose appending log." -) # type: partial[Option] +) # type: Callable[..., Option] no_input = partial( Option, @@ -206,7 +206,7 @@ def check_dist_restriction(options, check_target=False): action='store_true', default=False, help=SUPPRESS_HELP -) # type: partial[Option] +) # type: Callable[..., Option] proxy = partial( Option, @@ -215,7 +215,7 @@ def check_dist_restriction(options, check_target=False): type='str', default='', help="Specify a proxy in the form [user:passwd@]proxy.server:port." -) # type: partial[Option] +) # type: Callable[..., Option] retries = partial( Option, @@ -225,7 +225,7 @@ def check_dist_restriction(options, check_target=False): default=5, help="Maximum number of retries each connection should attempt " "(default %default times).", -) # type: partial[Option] +) # type: Callable[..., Option] timeout = partial( Option, @@ -235,7 +235,7 @@ def check_dist_restriction(options, check_target=False): type='float', default=15, help='Set the socket timeout (default %default seconds).', -) # type: partial[Option] +) # type: Callable[..., Option] skip_requirements_regex = partial( Option, @@ -245,7 +245,7 @@ def check_dist_restriction(options, check_target=False): type='str', default='', help=SUPPRESS_HELP, -) # type: partial[Option] +) # type: Callable[..., Option] def exists_action(): @@ -271,7 +271,7 @@ def exists_action(): type='str', metavar='path', help="Path to alternate CA bundle.", -) # type: partial[Option] +) # type: Callable[..., Option] client_cert = partial( Option, @@ -282,7 +282,7 @@ def exists_action(): metavar='path', help="Path to SSL client certificate, a single file containing the " "private key and the certificate in PEM format.", -) # type: partial[Option] +) # type: Callable[..., Option] index_url = partial( Option, @@ -294,7 +294,7 @@ def exists_action(): "This should point to a repository compliant with PEP 503 " "(the simple repository API) or a local directory laid out " "in the same format.", -) # type: partial[Option] +) # type: Callable[..., Option] def extra_index_url(): @@ -317,7 +317,7 @@ def extra_index_url(): action='store_true', default=False, help='Ignore package index (only looking at --find-links URLs instead).', -) # type: partial[Option] +) # type: Callable[..., Option] def find_links(): @@ -355,7 +355,7 @@ def trusted_host(): action="store_true", default=False, help="Enable the processing of dependency links.", -) # type: partial[Option] +) # type: Callable[..., Option] def constraints(): @@ -406,7 +406,7 @@ def editable(): help='Directory to check out editable projects into. ' 'The default in a virtualenv is "/src". ' 'The default for global installs is "/src".' -) # type: partial[Option] +) # type: Callable[..., Option] def _get_format_control(values, option): @@ -471,7 +471,7 @@ def only_binary(): default=None, help=("Only use wheels compatible with . " "Defaults to the platform of the running system."), -) # type: partial[Option] +) # type: Callable[..., Option] python_version = partial( @@ -486,7 +486,7 @@ def only_binary(): "version (e.g. '2') can be specified to match all " "minor revs of that major version. A minor version " "(e.g. '34') can also be specified."), -) # type: partial[Option] +) # type: Callable[..., Option] implementation = partial( @@ -500,7 +500,7 @@ def only_binary(): " or 'ip'. If not specified, then the current " "interpreter implementation is used. Use 'py' to force " "implementation-agnostic wheels."), -) # type: partial[Option] +) # type: Callable[..., Option] abi = partial( @@ -515,7 +515,7 @@ def only_binary(): "you will need to specify --implementation, " "--platform, and --python-version when using " "this option."), -) # type: partial[Option] +) # type: Callable[..., Option] def prefer_binary(): @@ -536,7 +536,7 @@ def prefer_binary(): default=USER_CACHE_DIR, metavar="dir", help="Store the cache data in ." -) # type: partial[Option] +) # type: Callable[..., Option] def no_cache_dir_callback(option, opt, value, parser): @@ -570,7 +570,7 @@ def no_cache_dir_callback(option, opt, value, parser): action="callback", callback=no_cache_dir_callback, help="Disable the cache.", -) # type: partial[Option] +) # type: Callable[..., Option] no_deps = partial( Option, @@ -579,7 +579,7 @@ def no_cache_dir_callback(option, opt, value, parser): action='store_true', default=False, help="Don't install package dependencies.", -) # type: partial[Option] +) # type: Callable[..., Option] build_dir = partial( Option, @@ -591,7 +591,7 @@ def no_cache_dir_callback(option, opt, value, parser): 'The location of temporary directories can be controlled by setting ' 'the TMPDIR environment variable (TEMP on Windows) appropriately. ' 'When passed, build directories are not cleaned in case of failures.' -) # type: partial[Option] +) # type: Callable[..., Option] ignore_requires_python = partial( Option, @@ -599,7 +599,7 @@ def no_cache_dir_callback(option, opt, value, parser): dest='ignore_requires_python', action='store_true', help='Ignore the Requires-Python information.' -) # type: partial[Option] +) # type: Callable[..., Option] no_build_isolation = partial( Option, @@ -610,7 +610,7 @@ def no_cache_dir_callback(option, opt, value, parser): help='Disable isolation when building a modern source distribution. ' 'Build dependencies specified by PEP 518 must be already installed ' 'if this option is used.' -) # type: partial[Option] +) # type: Callable[..., Option] use_pep517 = partial( Option, @@ -642,7 +642,7 @@ def no_cache_dir_callback(option, opt, value, parser): "bin\"). Use multiple --install-option options to pass multiple " "options to setup.py install. If you are using an option with a " "directory path, be sure to use absolute path.", -) # type: partial[Option] +) # type: Callable[..., Option] global_options = partial( Option, @@ -652,7 +652,7 @@ def no_cache_dir_callback(option, opt, value, parser): metavar='options', help="Extra global options to be supplied to the setup.py " "call before the install command.", -) # type: partial[Option] +) # type: Callable[..., Option] no_clean = partial( Option, @@ -660,7 +660,7 @@ def no_cache_dir_callback(option, opt, value, parser): action='store_true', default=False, help="Don't clean up build directories." -) # type: partial[Option] +) # type: Callable[..., Option] pre = partial( Option, @@ -669,7 +669,7 @@ def no_cache_dir_callback(option, opt, value, parser): default=False, help="Include pre-release and development versions. By default, " "pip only finds stable versions.", -) # type: partial[Option] +) # type: Callable[..., Option] disable_pip_version_check = partial( Option, @@ -679,7 +679,7 @@ def no_cache_dir_callback(option, opt, value, parser): default=False, help="Don't periodically check PyPI to determine whether a new version " "of pip is available for download. Implied with --no-index.", -) # type: partial[Option] +) # type: Callable[..., Option] # Deprecated, Remove later @@ -689,7 +689,7 @@ def no_cache_dir_callback(option, opt, value, parser): dest='always_unzip', action='store_true', help=SUPPRESS_HELP, -) # type: partial[Option] +) # type: Callable[..., Option] def _merge_hash(option, opt_str, value, parser): @@ -697,7 +697,7 @@ def _merge_hash(option, opt_str, value, parser): """Given a value spelled "algo:digest", append the digest to a list pointed to in a dict by the algo name.""" if not parser.values.hashes: - parser.values.hashes = {} + parser.values.hashes = {} # type: ignore try: algo, digest = value.split(':', 1) except ValueError: @@ -721,7 +721,7 @@ def _merge_hash(option, opt_str, value, parser): type='string', help="Verify that the package's archive matches this " 'hash before installing. Example: --hash=sha256:abcdef...', -) # type: partial[Option] +) # type: Callable[..., Option] require_hashes = partial( @@ -733,7 +733,7 @@ def _merge_hash(option, opt_str, value, parser): help='Require a hash to check each requirement against, for ' 'repeatable installs. This option is implied when any package in a ' 'requirements file has a --hash option.', -) # type: partial[Option] +) # type: Callable[..., Option] ########## diff --git a/src/pip/_internal/commands/install.py b/src/pip/_internal/commands/install.py index 1058a56d9d6..eb0f5f1d40d 100644 --- a/src/pip/_internal/commands/install.py +++ b/src/pip/_internal/commands/install.py @@ -28,8 +28,13 @@ protect_pip_from_modification_on_windows, ) from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.wheel import WheelBuilder +if MYPY_CHECK_RUNNING: + from typing import List # noqa: F401 + from pip._internal.req import InstallRequirement # noqa: F401 + logger = logging.getLogger(__name__) @@ -499,6 +504,7 @@ def _handle_target_dir(self, target_dir, target_temp_dir, upgrade): ) def _warn_about_conflicts(self, to_install): + # type: (List[InstallRequirement]) -> None try: package_set, _dep_info = check_install_conflicts(to_install) except Exception: diff --git a/src/pip/_internal/download.py b/src/pip/_internal/download.py index 4cc0774e7cc..2e4cc38216d 100644 --- a/src/pip/_internal/download.py +++ b/src/pip/_internal/download.py @@ -48,7 +48,9 @@ from pip._internal.vcs import vcs if MYPY_CHECK_RUNNING: - from typing import Optional # noqa: F401 + from typing import Optional, Tuple, Dict, IO, Text, Union # noqa: F401 + from pip._internal.models.link import Link # noqa: F401 + from pip._internal.utils.hashes import Hashes # noqa: F401 try: import ssl # noqa @@ -139,8 +141,9 @@ def user_agent(): class MultiDomainBasicAuth(AuthBase): def __init__(self, prompting=True): + # type: (bool) -> None self.prompting = prompting - self.passwords = {} + self.passwords = {} # type: Dict[str, Tuple[Optional[str], Optional[str]]] # noqa: E501 def __call__(self, req): parsed = urllib_parse.urlparse(req.url) @@ -398,6 +401,7 @@ def request(self, method, url, *args, **kwargs): def get_file_content(url, comes_from=None, session=None): + # type: (str, Optional[str], Optional[PipSession]) -> Tuple[str, Text] """Gets the content of a file; it may be a filename, file: URL, or http: URL. Returns (location, content). Content is unicode. @@ -448,6 +452,7 @@ def get_file_content(url, comes_from=None, session=None): def is_url(name): + # type: (Union[str, Text]) -> bool """Returns true if the name looks like a URL""" if ':' not in name: return False @@ -456,6 +461,7 @@ def is_url(name): def url_to_path(url): + # type: (str) -> str """ Convert a file: URL to a path. """ @@ -473,6 +479,7 @@ def url_to_path(url): def path_to_url(path): + # type: (Union[str, Text]) -> str """ Convert a path to a file: URL. The path will be made absolute and have quoted path parts. @@ -483,6 +490,7 @@ def path_to_url(path): def is_archive_file(name): + # type: (str) -> bool """Return True if `name` is a considered as an archive file.""" ext = splitext(name)[1].lower() if ext in ARCHIVE_EXTENSIONS: @@ -503,14 +511,17 @@ def _get_used_vcs_backend(link): def is_vcs_url(link): + # type: (Link) -> bool return bool(_get_used_vcs_backend(link)) def is_file_url(link): + # type: (Link) -> bool return link.url.lower().startswith('file:') def is_dir_url(link): + # type: (Link) -> bool """Return whether a file:// Link points to a directory. ``link`` must not have any other scheme but file://. Call is_file_url() @@ -525,7 +536,14 @@ def _progress_indicator(iterable, *args, **kwargs): return iterable -def _download_url(resp, link, content_file, hashes, progress_bar): +def _download_url( + resp, # type: Response + link, # type: Link + content_file, # type: IO + hashes, # type: Hashes + progress_bar # type: str +): + # type: (...) -> None try: total_length = int(resp.headers['content-length']) except (ValueError, KeyError, TypeError): @@ -647,8 +665,15 @@ def _copy_file(filename, location, link): logger.info('Saved %s', display_path(download_location)) -def unpack_http_url(link, location, download_dir=None, - session=None, hashes=None, progress_bar="on"): +def unpack_http_url( + link, # type: Link + location, # type: str + download_dir=None, # type: Optional[str] + session=None, # type: Optional[PipSession] + hashes=None, # type: Optional[Hashes] + progress_bar="on" # type: str +): + # type: (...) -> None if session is None: raise TypeError( "unpack_http_url() missing 1 required keyword argument: 'session'" @@ -685,7 +710,13 @@ def unpack_http_url(link, location, download_dir=None, os.unlink(from_path) -def unpack_file_url(link, location, download_dir=None, hashes=None): +def unpack_file_url( + link, # type: Link + location, # type: str + download_dir=None, # type: Optional[str] + hashes=None # type: Optional[Hashes] +): + # type: (...) -> None """Unpack link into location. If download_dir is provided and link points to a file, make a copy @@ -798,9 +829,16 @@ def request(self, host, handler, request_body, verbose=False): raise -def unpack_url(link, location, download_dir=None, - only_download=False, session=None, hashes=None, - progress_bar="on"): +def unpack_url( + link, # type: Optional[Link] + location, # type: Optional[str] + download_dir=None, # type: Optional[str] + only_download=False, # type: bool + session=None, # type: Optional[PipSession] + hashes=None, # type: Optional[Hashes] + progress_bar="on" # type: str +): + # type: (...) -> None """Unpack link. If link is a VCS link: if only_download, export into download_dir and ignore location @@ -840,7 +878,14 @@ def unpack_url(link, location, download_dir=None, write_delete_marker_file(location) -def _download_http_url(link, session, temp_dir, hashes, progress_bar): +def _download_http_url( + link, # type: Link + session, # type: PipSession + temp_dir, # type: str + hashes, # type: Hashes + progress_bar # type: str +): + # type: (...) -> Tuple[str, str] """Download link url into temp_dir using provided session""" target_url = link.url.split('#', 1)[0] try: @@ -900,6 +945,7 @@ def _download_http_url(link, session, temp_dir, hashes, progress_bar): def _check_download_dir(link, download_dir, hashes): + # type: (Link, str, Hashes) -> Optional[str] """ Check download_dir for previously downloaded file with correct hash If a correct file is found return its path else None """ diff --git a/src/pip/_internal/exceptions.py b/src/pip/_internal/exceptions.py index f1ca6f36ded..38ceeea92e3 100644 --- a/src/pip/_internal/exceptions.py +++ b/src/pip/_internal/exceptions.py @@ -5,6 +5,12 @@ from pip._vendor.six import iteritems +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional # noqa: F401 + from pip._internal.req.req_install import InstallRequirement # noqa: F401 + class PipError(Exception): """Base pip exception""" @@ -96,7 +102,7 @@ class HashError(InstallationError): typically available earlier. """ - req = None + req = None # type: Optional[InstallRequirement] head = '' def body(self): diff --git a/src/pip/_internal/index.py b/src/pip/_internal/index.py index 693c79b5158..85afea177c5 100644 --- a/src/pip/_internal/index.py +++ b/src/pip/_internal/index.py @@ -38,8 +38,23 @@ redact_password_from_url, ) from pip._internal.utils.packaging import check_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.wheel import Wheel, wheel_ext +if MYPY_CHECK_RUNNING: + from logging import Logger # noqa: F401 + from typing import (Tuple, Optional, Any, List, # noqa: F401 + Union, Callable, Set, Sequence, Iterable, + MutableMapping) + from pip._vendor.packaging.version import _BaseVersion # noqa: F401 + from pip._vendor.requests import Response # noqa: F401 + from pip._internal.req import InstallRequirement # noqa: F401 + from pip._internal.download import PipSession # noqa: F401 + + SecureOrigin = Tuple[str, str, Optional[str]] + BuildTag = Tuple[Any, ...] # possibly empty Tuple[int, str] + CandidateSortingKey = Tuple[int, _BaseVersion, BuildTag, Optional[int]] + __all__ = ['FormatControl', 'PackageFinder'] @@ -53,13 +68,14 @@ ("file", "*", None), # ssh is always secure. ("ssh", "*", "*"), -] +] # type: List[SecureOrigin] logger = logging.getLogger(__name__) def _match_vcs_scheme(url): + # type: (str) -> Optional[str] """Look for VCS schemes in the URL. Returns the matched VCS scheme, or None if there's no match. @@ -72,6 +88,7 @@ def _match_vcs_scheme(url): def _is_url_like_archive(url): + # type: (str) -> bool """Return whether the URL looks like an archive. """ filename = Link(url).filename @@ -83,12 +100,14 @@ def _is_url_like_archive(url): class _NotHTML(Exception): def __init__(self, content_type, request_desc): + # type: (str, str) -> None super(_NotHTML, self).__init__(content_type, request_desc) self.content_type = content_type self.request_desc = request_desc def _ensure_html_header(response): + # type: (Response) -> None """Check the Content-Type header to ensure the response contains HTML. Raises `_NotHTML` if the content type is not text/html. @@ -103,6 +122,7 @@ class _NotHTTP(Exception): def _ensure_html_response(url, session): + # type: (str, PipSession) -> None """Send a HEAD request to the URL, and ensure the response contains HTML. Raises `_NotHTTP` if the URL is not available for a HEAD request, or @@ -119,6 +139,7 @@ def _ensure_html_response(url, session): def _get_html_response(url, session): + # type: (str, PipSession) -> Response """Access an HTML page with GET, and return the response. This consists of three parts: @@ -168,13 +189,19 @@ def _get_html_response(url, session): return resp -def _handle_get_page_fail(link, reason, url, meth=None): +def _handle_get_page_fail( + link, # type: Link + reason, # type: Union[str, Exception] + meth=None # type: Optional[Callable[..., None]] +): + # type: (...) -> None if meth is None: meth = logger.debug meth("Could not fetch URL %s: %s - skipping", link, reason) def _get_html_page(link, session=None): + # type: (Link, Optional[PipSession]) -> Optional[HTMLPage] if session is None: raise TypeError( "_get_html_page() missing 1 required keyword argument: 'session'" @@ -211,19 +238,20 @@ def _get_html_page(link, session=None): link, exc.request_desc, exc.content_type, ) except requests.HTTPError as exc: - _handle_get_page_fail(link, exc, url) + _handle_get_page_fail(link, exc) except RetryError as exc: - _handle_get_page_fail(link, exc, url) + _handle_get_page_fail(link, exc) except SSLError as exc: reason = "There was a problem confirming the ssl certificate: " reason += str(exc) - _handle_get_page_fail(link, reason, url, meth=logger.info) + _handle_get_page_fail(link, reason, meth=logger.info) except requests.ConnectionError as exc: - _handle_get_page_fail(link, "connection error: %s" % exc, url) + _handle_get_page_fail(link, "connection error: %s" % exc) except requests.Timeout: - _handle_get_page_fail(link, "timed out", url) + _handle_get_page_fail(link, "timed out") else: return HTMLPage(resp.content, resp.url, resp.headers) + return None class PackageFinder(object): @@ -233,11 +261,22 @@ class PackageFinder(object): packages, by reading pages and looking for appropriate links. """ - def __init__(self, find_links, index_urls, allow_all_prereleases=False, - trusted_hosts=None, process_dependency_links=False, - session=None, format_control=None, platform=None, - versions=None, abi=None, implementation=None, - prefer_binary=False): + def __init__( + self, + find_links, # type: List[str] + index_urls, # type: List[str] + allow_all_prereleases=False, # type: bool + trusted_hosts=None, # type: Optional[Iterable[str]] + process_dependency_links=False, # type: bool + session=None, # type: Optional[PipSession] + format_control=None, # type: Optional[FormatControl] + platform=None, # type: Optional[str] + versions=None, # type: Optional[List[str]] + abi=None, # type: Optional[str] + implementation=None, # type: Optional[str] + prefer_binary=False # type: bool + ): + # type: (...) -> None """Create a PackageFinder. :param format_control: A FormatControl object or None. Used to control @@ -266,7 +305,7 @@ def __init__(self, find_links, index_urls, allow_all_prereleases=False, # it and if it exists, use the normalized version. # This is deliberately conservative - it might be fine just to # blindly normalize anything starting with a ~... - self.find_links = [] + self.find_links = [] # type: List[str] for link in find_links: if link.startswith('~'): new_link = normalize_path(link) @@ -275,10 +314,10 @@ def __init__(self, find_links, index_urls, allow_all_prereleases=False, self.find_links.append(link) self.index_urls = index_urls - self.dependency_links = [] + self.dependency_links = [] # type: List[str] # These are boring links that have already been logged somehow: - self.logged_links = set() + self.logged_links = set() # type: Set[Link] self.format_control = format_control or FormatControl(set(), set()) @@ -286,7 +325,7 @@ def __init__(self, find_links, index_urls, allow_all_prereleases=False, self.secure_origins = [ ("*", host, "*") for host in (trusted_hosts if trusted_hosts else []) - ] + ] # type: List[SecureOrigin] # Do we want to allow _all_ pre-releases? self.allow_all_prereleases = allow_all_prereleases @@ -322,6 +361,7 @@ def __init__(self, find_links, index_urls, allow_all_prereleases=False, break def get_formatted_locations(self): + # type: () -> str lines = [] if self.index_urls and self.index_urls != [PyPI.simple_url]: lines.append( @@ -335,6 +375,7 @@ def get_formatted_locations(self): return "\n".join(lines) def add_dependency_links(self, links): + # type: (Iterable[str]) -> None # FIXME: this shouldn't be global list this, it should only # apply to requirements of the package that specifies the # dependency_links value @@ -351,6 +392,7 @@ def add_dependency_links(self, links): @staticmethod def _sort_locations(locations, expand_dir=False): + # type: (Sequence[str], bool) -> Tuple[List[str], List[str]] """ Sort locations into "files" (archives) and "urls", and return a pair of lists (files,urls) @@ -407,6 +449,7 @@ def sort_path(path): return files, urls def _candidate_sort_key(self, candidate): + # type: (InstallationCandidate) -> CandidateSortingKey """ Function used to generate link sort key for link tuples. The greater the return value, the more preferred it is. @@ -421,7 +464,7 @@ def _candidate_sort_key(self, candidate): with the same version, would have to be considered equal """ support_num = len(self.valid_tags) - build_tag = tuple() + build_tag = tuple() # type: BuildTag binary_preference = 0 if candidate.location.is_wheel: # can raise InvalidWheelFilename @@ -443,6 +486,7 @@ def _candidate_sort_key(self, candidate): return (binary_preference, candidate.version, build_tag, pri) def _validate_secure_origin(self, logger, location): + # type: (Logger, Link) -> bool # Determine if this url used a secure transport mechanism parsed = urllib_parse.urlparse(str(location)) origin = (parsed.scheme, parsed.hostname, parsed.port) @@ -474,7 +518,7 @@ def _validate_secure_origin(self, logger, location): network = ipaddress.ip_network( secure_origin[1] if isinstance(secure_origin[1], six.text_type) - else secure_origin[1].decode("utf8") + else secure_origin[1].decode("utf8") # type: ignore ) except ValueError: # We don't have both a valid address or a valid network, so @@ -514,6 +558,7 @@ def _validate_secure_origin(self, logger, location): return False def _get_index_urls_locations(self, project_name): + # type: (str) -> List[str] """Returns the locations found via self.index_urls Checks the url_name on the main (first in the list) index and @@ -536,6 +581,7 @@ def mkurl_pypi_url(url): return [mkurl_pypi_url(url) for url in self.index_urls] def find_all_candidates(self, project_name): + # type: (str) -> List[Optional[InstallationCandidate]] """Find all available InstallationCandidate for project_name This checks index_urls, find_links and dependency_links. @@ -619,6 +665,7 @@ def find_all_candidates(self, project_name): ) def find_requirement(self, req, upgrade): + # type: (InstallRequirement, bool) -> Optional[Link] """Try to find a Link matching req Expects req, an InstallRequirement and upgrade, a boolean @@ -656,7 +703,8 @@ def find_requirement(self, req, upgrade): best_candidate = None if req.satisfied_by is not None: - installed_version = parse_version(req.satisfied_by.version) + # type error fixed in mypy==0.641, remove after update + installed_version = parse_version(req.satisfied_by.version) # type: ignore # noqa: E501 else: installed_version = None @@ -718,11 +766,12 @@ def find_requirement(self, req, upgrade): return best_candidate.location def _get_pages(self, locations, project_name): + # type: (Iterable[Link], str) -> Iterable[HTMLPage] """ Yields (page, page_url) from the given locations, skipping locations that have errors. """ - seen = set() + seen = set() # type: Set[Link] for location in locations: if location in seen: continue @@ -737,12 +786,13 @@ def _get_pages(self, locations, project_name): _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') def _sort_links(self, links): + # type: (Iterable[Link]) -> List[Link] """ Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates """ eggs, no_eggs = [], [] - seen = set() + seen = set() # type: Set[Link] for link in links: if link not in seen: seen.add(link) @@ -752,7 +802,12 @@ def _sort_links(self, links): no_eggs.append(link) return no_eggs + eggs - def _package_versions(self, links, search): + def _package_versions( + self, + links, # type: Iterable[Link] + search # type: Search + ): + # type: (...) -> List[Optional[InstallationCandidate]] result = [] for link in self._sort_links(links): v = self._link_package_versions(link, search) @@ -761,11 +816,13 @@ def _package_versions(self, links, search): return result def _log_skipped_link(self, link, reason): + # type: (Link, str) -> None if link not in self.logged_links: logger.debug('Skipping link %s; %s', link, reason) self.logged_links.add(link) def _link_package_versions(self, link, search): + # type: (Link, Search) -> Optional[InstallationCandidate] """Return an InstallationCandidate or None""" version = None if link.egg_fragment: @@ -775,35 +832,35 @@ def _link_package_versions(self, link, search): egg_info, ext = link.splitext() if not ext: self._log_skipped_link(link, 'not a file') - return + return None if ext not in SUPPORTED_EXTENSIONS: self._log_skipped_link( link, 'unsupported archive format: %s' % ext, ) - return + return None if "binary" not in search.formats and ext == wheel_ext: self._log_skipped_link( link, 'No binaries permitted for %s' % search.supplied, ) - return + return None if "macosx10" in link.path and ext == '.zip': self._log_skipped_link(link, 'macosx10 one') - return + return None if ext == wheel_ext: try: wheel = Wheel(link.filename) except InvalidWheelFilename: self._log_skipped_link(link, 'invalid wheel filename') - return + return None if canonicalize_name(wheel.name) != search.canonical: self._log_skipped_link( link, 'wrong project name (not %s)' % search.supplied) - return + return None if not wheel.supported(self.valid_tags): self._log_skipped_link( link, 'it is not compatible with this Python') - return + return None version = wheel.version @@ -812,14 +869,14 @@ def _link_package_versions(self, link, search): self._log_skipped_link( link, 'No sources permitted for %s' % search.supplied, ) - return + return None if not version: version = _egg_info_matches(egg_info, search.canonical) if not version: self._log_skipped_link( link, 'Missing project version for %s' % search.supplied) - return + return None match = self._py_version_re.search(version) if match: @@ -828,7 +885,7 @@ def _link_package_versions(self, link, search): if py_version != sys.version[:3]: self._log_skipped_link( link, 'Python version is incorrect') - return + return None try: support_this_python = check_requires_python(link.requires_python) except specifiers.InvalidSpecifier: @@ -840,13 +897,14 @@ def _link_package_versions(self, link, search): logger.debug("The package %s is incompatible with the python " "version in use. Acceptable python versions are: %s", link, link.requires_python) - return + return None logger.debug('Found link %s, version: %s', link, version) return InstallationCandidate(search.supplied, version, link) def _find_name_version_sep(egg_info, canonical_name): + # type: (str, str) -> int """Find the separator's index based on the package's canonical name. `egg_info` must be an egg info string for the given package, and @@ -872,6 +930,7 @@ def _find_name_version_sep(egg_info, canonical_name): def _egg_info_matches(egg_info, canonical_name): + # type: (str, str) -> Optional[str] """Pull the version part out of a string. :param egg_info: The string to parse. E.g. foo-2.1 @@ -921,16 +980,19 @@ def _get_encoding_from_headers(headers): def _clean_link(url): + # type: (str) -> str """Makes sure a link is fully encoded. That is, if a ' ' shows up in the link, it will be rewritten to %20 (while not over-quoting % or other characters).""" - return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url) + # type error fixed in mypy==0.641, remove after update + return _CLEAN_LINK_RE.sub(lambda match: '%%%2x' % ord(match.group(0)), url) # type: ignore # noqa: E501 class HTMLPage(object): """Represents one page, along with its URL""" def __init__(self, content, url, headers=None): + # type: (bytes, str, MutableMapping[str, str]) -> None self.content = content self.url = url self.headers = headers @@ -939,6 +1001,7 @@ def __str__(self): return redact_password_from_url(self.url) def iter_links(self): + # type: () -> Iterable[Link] """Yields all links in the page""" document = html5lib.parse( self.content, diff --git a/src/pip/_internal/models/candidate.py b/src/pip/_internal/models/candidate.py index c736de6c9af..4475458ab3d 100644 --- a/src/pip/_internal/models/candidate.py +++ b/src/pip/_internal/models/candidate.py @@ -1,6 +1,12 @@ from pip._vendor.packaging.version import parse as parse_version from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from pip._vendor.packaging.version import _BaseVersion # noqa: F401 + from pip._internal.models.link import Link # noqa: F401 + from typing import Any, Union # noqa: F401 class InstallationCandidate(KeyBasedCompareMixin): @@ -8,8 +14,9 @@ class InstallationCandidate(KeyBasedCompareMixin): """ def __init__(self, project, version, location): + # type: (Any, str, Link) -> None self.project = project - self.version = parse_version(version) + self.version = parse_version(version) # type: _BaseVersion self.location = location super(InstallationCandidate, self).__init__( @@ -18,6 +25,7 @@ def __init__(self, project, version, location): ) def __repr__(self): + # type: () -> str return "".format( self.project, self.version, self.location, ) diff --git a/src/pip/_internal/models/index.py b/src/pip/_internal/models/index.py index 870a315ed1d..ead1efbda76 100644 --- a/src/pip/_internal/models/index.py +++ b/src/pip/_internal/models/index.py @@ -6,6 +6,7 @@ class PackageIndex(object): """ def __init__(self, url, file_storage_domain): + # type: (str, str) -> None super(PackageIndex, self).__init__() self.url = url self.netloc = urllib_parse.urlsplit(url).netloc @@ -18,6 +19,7 @@ def __init__(self, url, file_storage_domain): self.file_storage_domain = file_storage_domain def _url_for_path(self, path): + # type: (str) -> str return urllib_parse.urljoin(self.url, path) diff --git a/src/pip/_internal/models/link.py b/src/pip/_internal/models/link.py index a7b803f6ee4..73b4ee9af1f 100644 --- a/src/pip/_internal/models/link.py +++ b/src/pip/_internal/models/link.py @@ -6,14 +6,20 @@ from pip._internal.download import path_to_url from pip._internal.utils.misc import redact_password_from_url, splitext from pip._internal.utils.models import KeyBasedCompareMixin +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.wheel import wheel_ext +if MYPY_CHECK_RUNNING: + from typing import Optional, Tuple, Union, Text # noqa: F401 + from pip._internal.index import HTMLPage # noqa: F401 + class Link(KeyBasedCompareMixin): """Represents a parsed link from a Package Index's simple URL """ def __init__(self, url, comes_from=None, requires_python=None): + # type: (str, Optional[Union[str, HTMLPage]], Optional[str]) -> None """ url: url of the resource pointed to (href of the link) @@ -54,6 +60,7 @@ def __repr__(self): @property def filename(self): + # type: () -> str _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) name = posixpath.basename(path.rstrip('/')) or netloc name = urllib_parse.unquote(name) @@ -62,25 +69,31 @@ def filename(self): @property def scheme(self): + # type: () -> str return urllib_parse.urlsplit(self.url)[0] @property def netloc(self): + # type: () -> str return urllib_parse.urlsplit(self.url)[1] @property def path(self): + # type: () -> str return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) def splitext(self): + # type: () -> Tuple[str, str] return splitext(posixpath.basename(self.path.rstrip('/'))) @property def ext(self): + # type: () -> str return self.splitext()[1] @property def url_without_fragment(self): + # type: () -> str scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) @@ -88,6 +101,7 @@ def url_without_fragment(self): @property def egg_fragment(self): + # type: () -> Optional[str] match = self._egg_fragment_re.search(self.url) if not match: return None @@ -97,6 +111,7 @@ def egg_fragment(self): @property def subdirectory_fragment(self): + # type: () -> Optional[str] match = self._subdirectory_fragment_re.search(self.url) if not match: return None @@ -108,6 +123,7 @@ def subdirectory_fragment(self): @property def hash(self): + # type: () -> Optional[str] match = self._hash_re.search(self.url) if match: return match.group(2) @@ -115,6 +131,7 @@ def hash(self): @property def hash_name(self): + # type: () -> Optional[str] match = self._hash_re.search(self.url) if match: return match.group(1) @@ -122,14 +139,17 @@ def hash_name(self): @property def show_url(self): + # type: () -> Optional[str] return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) @property def is_wheel(self): + # type: () -> bool return self.ext == wheel_ext @property def is_artifact(self): + # type: () -> bool """ Determines if this points to an actual artifact (e.g. a tarball) or if it points to an "abstract" thing like a path or a VCS location. diff --git a/src/pip/_internal/operations/freeze.py b/src/pip/_internal/operations/freeze.py index 4c2f348f4dd..e7cf77a00a2 100644 --- a/src/pip/_internal/operations/freeze.py +++ b/src/pip/_internal/operations/freeze.py @@ -17,17 +17,31 @@ from pip._internal.utils.misc import ( dist_is_editable, get_installed_distributions, ) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import (Iterator, Optional, List, # noqa: F401 + Container, Set, Dict, Tuple, Iterable, Union) + from pip._internal.cache import WheelCache # noqa: F401 + from pip._vendor.pkg_resources import (Distribution, # noqa: F401 + Requirement) + logger = logging.getLogger(__name__) def freeze( - requirement=None, - find_links=None, local_only=None, user_only=None, skip_regex=None, - isolated=False, - wheel_cache=None, - exclude_editable=False, - skip=()): + requirement=None, # type: Optional[List[str]] + find_links=None, # type: Optional[List[str]] + local_only=None, # type: Optional[bool] + user_only=None, # type: Optional[bool] + skip_regex=None, # type: Optional[str] + isolated=False, # type: bool + wheel_cache=None, # type: Optional[WheelCache] + exclude_editable=False, # type: bool + skip=() # type: Container[str] +): + # type: (...) -> Iterator[str] find_links = find_links or [] skip_match = None @@ -36,7 +50,7 @@ def freeze( for link in find_links: yield '-f %s' % link - installations = {} + installations = {} # type: Dict[str, FrozenRequirement] for dist in get_installed_distributions(local_only=local_only, skip=(), user_only=user_only): @@ -57,10 +71,10 @@ def freeze( # should only be emitted once, even if the same option is in multiple # requirements files, so we need to keep track of what has been emitted # so that we don't emit it again if it's seen again - emitted_options = set() + emitted_options = set() # type: Set[str] # keep track of which files a requirement is in so that we can # give an accurate warning if a requirement appears multiple times. - req_files = collections.defaultdict(list) + req_files = collections.defaultdict(list) # type: Dict[str, List[str]] for req_file_path in requirement: with open(req_file_path) as req_file: for line in req_file: @@ -144,6 +158,7 @@ def freeze( def get_requirement_info(dist): + # type: (Distribution) -> Tuple[Optional[Union[str, Requirement]], bool, List[str]] # noqa: E501 """ Compute and return values (req, editable, comments) for use in FrozenRequirement.from_dist(). @@ -197,6 +212,7 @@ def get_requirement_info(dist): class FrozenRequirement(object): def __init__(self, name, req, editable, comments=()): + # type: (str, Union[str, Requirement], bool, Iterable[str]) -> None self.name = name self.req = req self.editable = editable @@ -204,6 +220,7 @@ def __init__(self, name, req, editable, comments=()): @classmethod def from_dist(cls, dist): + # type: (Distribution) -> FrozenRequirement req, editable, comments = get_requirement_info(dist) if req is None: req = dist.as_requirement() diff --git a/src/pip/_internal/operations/prepare.py b/src/pip/_internal/operations/prepare.py index e0585db872b..a9e3f37b3cf 100644 --- a/src/pip/_internal/operations/prepare.py +++ b/src/pip/_internal/operations/prepare.py @@ -18,12 +18,21 @@ from pip._internal.utils.hashes import MissingHashes from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import display_path, normalize_path +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.vcs import vcs +if MYPY_CHECK_RUNNING: + from typing import Any, Optional # noqa: F401 + from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from pip._internal.index import PackageFinder # noqa: F401 + from pip._internal.download import PipSession # noqa: F401 + from pip._internal.req.req_tracker import RequirementTracker # noqa: F401 + logger = logging.getLogger(__name__) def make_abstract_dist(req): + # type: (InstallRequirement) -> DistAbstraction """Factory to make an abstract dist object. Preconditions: Either an editable req with a source_dir, or satisfied_by or @@ -59,13 +68,16 @@ class DistAbstraction(object): """ def __init__(self, req): - self.req = req + # type: (InstallRequirement) -> None + self.req = req # type: InstallRequirement def dist(self, finder): + # type: (PackageFinder) -> Any """Return a setuptools Dist object.""" raise NotImplementedError(self.dist) def prep_for_dist(self, finder, build_isolation): + # type: (PackageFinder, bool) -> Any """Ensure that we can get a Dist for this requirement.""" raise NotImplementedError(self.dist) @@ -73,10 +85,12 @@ def prep_for_dist(self, finder, build_isolation): class IsWheel(DistAbstraction): def dist(self, finder): + # type: (PackageFinder) -> pkg_resources.Distribution return list(pkg_resources.find_distributions( self.req.source_dir))[0] def prep_for_dist(self, finder, build_isolation): + # type: (PackageFinder, bool) -> Any # FIXME:https://github.com/pypa/pip/issues/1112 pass @@ -84,6 +98,7 @@ def prep_for_dist(self, finder, build_isolation): class IsSDist(DistAbstraction): def dist(self, finder): + # type: (PackageFinder) -> pkg_resources.Distribution dist = self.req.get_dist() # FIXME: shouldn't be globally added. if finder and dist.has_metadata('dependency_links.txt'): @@ -93,6 +108,7 @@ def dist(self, finder): return dist def prep_for_dist(self, finder, build_isolation): + # type: (PackageFinder, bool) -> None # Prepare for building. We need to: # 1. Load pyproject.toml (if it exists) # 2. Set up the build environment @@ -110,7 +126,8 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): if should_isolate: # Isolate in a BuildEnvironment and install the build-time # requirements. - self.req.build_env = BuildEnvironment() + # type depends on other stubs, remove ignore later + self.req.build_env = BuildEnvironment() # type: ignore self.req.build_env.install_requirements( finder, self.req.pyproject_requires, 'overlay', "Installing build dependencies" @@ -137,7 +154,8 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): with self.req.build_env: # We need to have the env active when calling the hook. self.req.spin_message = "Getting requirements to build wheel" - reqs = self.req.pep517_backend.get_requires_for_build_wheel() + # type depends on other stubs, remove ignore later + reqs = self.req.pep517_backend.get_requires_for_build_wheel() # type: ignore # noqa: E501 conflicting, missing = self.req.build_env.check_requirements(reqs) if conflicting: _raise_conflicts("the backend dependencies", conflicting) @@ -153,9 +171,11 @@ def _raise_conflicts(conflicting_with, conflicting_reqs): class Installed(DistAbstraction): def dist(self, finder): + # type: (PackageFinder) -> pkg_resources.Distribution return self.req.satisfied_by def prep_for_dist(self, finder, build_isolation): + # type: (PackageFinder, bool) -> Any pass @@ -163,8 +183,17 @@ class RequirementPreparer(object): """Prepares a Requirement """ - def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, - progress_bar, build_isolation, req_tracker): + def __init__( + self, + build_dir, # type: str + download_dir, # type: Optional[str] + src_dir, # type: str + wheel_download_dir, # type: Optional[str] + progress_bar, # type: str + build_isolation, # type: bool + req_tracker # type: RequirementTracker + ): + # type: (...) -> None super(RequirementPreparer, self).__init__() self.src_dir = src_dir @@ -194,6 +223,7 @@ def __init__(self, build_dir, download_dir, src_dir, wheel_download_dir, @property def _download_should_save(self): + # type: () -> bool # TODO: Modify to reduce indentation needed if self.download_dir: self.download_dir = expanduser(self.download_dir) @@ -206,8 +236,15 @@ def _download_should_save(self): % display_path(self.download_dir)) return False - def prepare_linked_requirement(self, req, session, finder, - upgrade_allowed, require_hashes): + def prepare_linked_requirement( + self, + req, # type: InstallRequirement + session, # type: PipSession + finder, # type: PackageFinder + upgrade_allowed, # type: bool + require_hashes # type: bool + ): + # type: (...) -> DistAbstraction """Prepare a requirement that would be obtained from req.link """ # TODO: Breakup into smaller functions @@ -325,8 +362,14 @@ def prepare_linked_requirement(self, req, session, finder, req.archive(self.download_dir) return abstract_dist - def prepare_editable_requirement(self, req, require_hashes, use_user_site, - finder): + def prepare_editable_requirement( + self, + req, # type: InstallRequirement + require_hashes, # type: bool + use_user_site, # type: bool + finder # type: PackageFinder + ): + # type: (...) -> DistAbstraction """Prepare an editable requirement """ assert req.editable, "cannot prepare a non-editable req as editable" @@ -354,6 +397,7 @@ def prepare_editable_requirement(self, req, require_hashes, use_user_site, return abstract_dist def prepare_installed_requirement(self, req, require_hashes, skip_reason): + # type: (InstallRequirement, bool, Optional[str]) -> DistAbstraction """Prepare an already-installed requirement """ assert req.satisfied_by, "req should have been satisfied but isn't" diff --git a/src/pip/_internal/pep425tags.py b/src/pip/_internal/pep425tags.py index 7062d7f5fdd..76ee06303aa 100644 --- a/src/pip/_internal/pep425tags.py +++ b/src/pip/_internal/pep425tags.py @@ -12,6 +12,11 @@ import pip._internal.utils.glibc from pip._internal.utils.compat import get_extension_suffixes +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import (Tuple, Callable, List, # noqa: F401 + Optional, Union, Dict) logger = logging.getLogger(__name__) @@ -19,6 +24,7 @@ def get_config_var(var): + # type: (str) -> Optional[str] try: return sysconfig.get_config_var(var) except IOError as e: # Issue #1074 @@ -27,6 +33,7 @@ def get_config_var(var): def get_abbr_impl(): + # type: () -> str """Return abbreviated implementation name.""" if hasattr(sys, 'pypy_version_info'): pyimpl = 'pp' @@ -40,6 +47,7 @@ def get_abbr_impl(): def get_impl_ver(): + # type: () -> str """Return implementation version.""" impl_ver = get_config_var("py_version_nodot") if not impl_ver or get_abbr_impl() == 'pp': @@ -48,17 +56,21 @@ def get_impl_ver(): def get_impl_version_info(): + # type: () -> Tuple[int, ...] """Return sys.version_info-like tuple for use in decrementing the minor version.""" if get_abbr_impl() == 'pp': # as per https://github.com/pypa/pip/issues/2882 - return (sys.version_info[0], sys.pypy_version_info.major, - sys.pypy_version_info.minor) + # attrs exist only on pypy + return (sys.version_info[0], + sys.pypy_version_info.major, # type: ignore + sys.pypy_version_info.minor) # type: ignore else: return sys.version_info[0], sys.version_info[1] def get_impl_tag(): + # type: () -> str """ Returns the Tag for this specific implementation. """ @@ -66,6 +78,7 @@ def get_impl_tag(): def get_flag(var, fallback, expected=True, warn=True): + # type: (str, Callable[..., bool], Union[bool, int], bool) -> bool """Use a fallback method for determining SOABI flags if the needed config var is unset or unavailable.""" val = get_config_var(var) @@ -78,6 +91,7 @@ def get_flag(var, fallback, expected=True, warn=True): def get_abi_tag(): + # type: () -> Optional[str] """Return the ABI tag based on SOABI (if available) or emulate SOABI (CPython 2, PyPy).""" soabi = get_config_var('SOABI') @@ -112,10 +126,12 @@ def get_abi_tag(): def _is_running_32bit(): + # type: () -> bool return sys.maxsize == 2147483647 def get_platform(): + # type: () -> str """Return our platform name 'win32', 'linux_x86_64'""" if sys.platform == 'darwin': # distutils.util.get_platform() returns the release based on the value @@ -142,6 +158,7 @@ def get_platform(): def is_manylinux1_compatible(): + # type: () -> bool # Only Linux, and only x86-64 / i686 if get_platform() not in {"linux_x86_64", "linux_i686"}: return False @@ -159,6 +176,7 @@ def is_manylinux1_compatible(): def is_manylinux2010_compatible(): + # type: () -> bool # Only Linux, and only x86-64 / i686 if get_platform() not in {"linux_x86_64", "linux_i686"}: return False @@ -176,12 +194,14 @@ def is_manylinux2010_compatible(): def get_darwin_arches(major, minor, machine): + # type: (int, int, str) -> List[str] """Return a list of supported arches (including group arches) for the given major, minor and machine architecture of an macOS machine. """ arches = [] def _supports_arch(major, minor, arch): + # type: (int, int, str) -> bool # Looking at the application support for macOS versions in the chart # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears # our timeline looks roughly like: @@ -222,7 +242,7 @@ def _supports_arch(major, minor, arch): ("intel", ("x86_64", "i386")), ("fat64", ("x86_64", "ppc64")), ("fat32", ("x86_64", "i386", "ppc")), - ]) + ]) # type: Dict[str, Tuple[str, ...]] if _supports_arch(major, minor, machine): arches.append(machine) @@ -236,8 +256,14 @@ def _supports_arch(major, minor, arch): return arches -def get_supported(versions=None, noarch=False, platform=None, - impl=None, abi=None): +def get_supported( + versions=None, # type: Optional[List[str]] + noarch=False, # type: bool + platform=None, # type: Optional[str] + impl=None, # type: Optional[str] + abi=None # type: Optional[str] +): + # type: (...) -> List[Tuple[str, str, str]] """Return a list of supported tags for each version specified in `versions`. @@ -263,7 +289,7 @@ def get_supported(versions=None, noarch=False, platform=None, impl = impl or get_abbr_impl() - abis = [] + abis = [] # type: List[str] abi = abi or get_abi_tag() if abi: @@ -285,11 +311,13 @@ def get_supported(versions=None, noarch=False, platform=None, # support macosx-10.6-intel on macosx-10.9-x86_64 match = _osx_arch_pat.match(arch) if match: - name, major, minor, actual_arch = match.groups() + # # https://github.com/python/mypy/issues/1174 + name, major, minor, actual_arch = match.groups() # type: ignore # noqa: E501 tpl = '{}_{}_%i_%s'.format(name, major) arches = [] for m in reversed(range(int(minor) + 1)): - for a in get_darwin_arches(int(major), m, actual_arch): + # https://github.com/python/mypy/issues/1174 + for a in get_darwin_arches(int(major), m, actual_arch): # type: ignore # noqa: E501 arches.append(tpl % (m, a)) else: # arch pattern didn't match (?!) diff --git a/src/pip/_internal/pyproject.py b/src/pip/_internal/pyproject.py index c5cda41ee90..ddbc1869f5f 100644 --- a/src/pip/_internal/pyproject.py +++ b/src/pip/_internal/pyproject.py @@ -6,16 +6,27 @@ from pip._vendor import pytoml, six from pip._internal.exceptions import InstallationError +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Any, Tuple, Optional, List # noqa: F401 def _is_list_of_str(obj): + # type: (Any) -> bool return ( isinstance(obj, list) and all(isinstance(item, six.string_types) for item in obj) ) -def load_pyproject_toml(use_pep517, pyproject_toml, setup_py, req_name): +def load_pyproject_toml( + use_pep517, # type: Optional[bool] + pyproject_toml, # type: str + setup_py, # type: str + req_name # type: str +): + # type: (...) -> Optional[Tuple[List[str], str, List[str]]] """Load the pyproject.toml file. Parameters: @@ -123,7 +134,7 @@ def load_pyproject_toml(use_pep517, pyproject_toml, setup_py, req_name): )) backend = build_system.get("build-backend") - check = [] + check = [] # type: List[str] if backend is None: # If the user didn't specify a backend, we assume they want to use # the setuptools backend. But we can't be sure they have included diff --git a/src/pip/_internal/req/__init__.py b/src/pip/_internal/req/__init__.py index b270498e2a1..327cedd6e5c 100644 --- a/src/pip/_internal/req/__init__.py +++ b/src/pip/_internal/req/__init__.py @@ -6,7 +6,10 @@ from .req_set import RequirementSet from .req_file import parse_requirements from pip._internal.utils.logging import indent_log +from pip._internal.utils.typing import MYPY_CHECK_RUNNING +if MYPY_CHECK_RUNNING: + from typing import List, Iterable, Sequence # noqa: F401 __all__ = [ "RequirementSet", "InstallRequirement", @@ -16,8 +19,13 @@ logger = logging.getLogger(__name__) -def install_given_reqs(to_install, install_options, global_options=(), - *args, **kwargs): +def install_given_reqs( + to_install, # type: List[InstallRequirement] + install_options, # type: List[str] + global_options=(), # type: Sequence[str] + *args, **kwargs +): + # type: (...) -> List[InstallRequirement] """ Install everything in the given list. diff --git a/src/pip/_internal/req/constructors.py b/src/pip/_internal/req/constructors.py index 640efd453f8..3fd08695a91 100644 --- a/src/pip/_internal/req/constructors.py +++ b/src/pip/_internal/req/constructors.py @@ -25,9 +25,16 @@ from pip._internal.models.link import Link from pip._internal.req.req_install import InstallRequirement from pip._internal.utils.misc import is_installable_dir +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.vcs import vcs from pip._internal.wheel import Wheel +if MYPY_CHECK_RUNNING: + from typing import (Optional, Tuple, Set, Any, # noqa: F401 + Mapping, Union, Text) + from pip._internal.cache import WheelCache # noqa: F401 + + __all__ = [ "install_req_from_editable", "install_req_from_line", "parse_editable" @@ -38,6 +45,7 @@ def _strip_extras(path): + # type: (str) -> Tuple[str, Optional[str]] m = re.match(r'^(.+)(\[[^\]]+\])$', path) extras = None if m: @@ -50,6 +58,7 @@ def _strip_extras(path): def parse_editable(editable_req): + # type: (str) -> Tuple[Optional[str], str, Optional[Set[str]]] """Parses an editable requirement into: - a requirement name - an URL @@ -115,6 +124,7 @@ def parse_editable(editable_req): def deduce_helpful_msg(req): + # type: (str) -> str """Returns helpful msg in case requirements file does not exist, or cannot be parsed. @@ -135,7 +145,7 @@ def deduce_helpful_msg(req): " the packages specified within it." except RequirementParseError: logger.debug("Cannot parse '%s' as requirements \ - file" % (req), exc_info=1) + file" % (req), exc_info=True) else: msg += " File '%s' does not exist." % (req) return msg @@ -145,9 +155,15 @@ def deduce_helpful_msg(req): def install_req_from_editable( - editable_req, comes_from=None, use_pep517=None, isolated=False, - options=None, wheel_cache=None, constraint=False + editable_req, # type: str + comes_from=None, # type: Optional[str] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Mapping[Text, Any]] + wheel_cache=None, # type: Optional[WheelCache] + constraint=False # type: bool ): + # type: (...) -> InstallRequirement name, url, extras_override = parse_editable(editable_req) if url.startswith('file:'): source_dir = url_to_path(url) @@ -175,9 +191,15 @@ def install_req_from_editable( def install_req_from_line( - name, comes_from=None, use_pep517=None, isolated=False, options=None, - wheel_cache=None, constraint=False + name, # type: str + comes_from=None, # type: Optional[Union[str, InstallRequirement]] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Mapping[Text, Any]] + wheel_cache=None, # type: Optional[WheelCache] + constraint=False # type: bool ): + # type: (...) -> InstallRequirement """Creates an InstallRequirement from a name, which might be a requirement, directory containing 'setup.py', filename, or URL. """ @@ -186,24 +208,24 @@ def install_req_from_line( else: marker_sep = ';' if marker_sep in name: - name, markers = name.split(marker_sep, 1) - markers = markers.strip() - if not markers: + name, markers_as_string = name.split(marker_sep, 1) + markers_as_string = markers_as_string.strip() + if not markers_as_string: markers = None else: - markers = Marker(markers) + markers = Marker(markers_as_string) else: markers = None name = name.strip() - req = None + req_as_string = None path = os.path.normpath(os.path.abspath(name)) link = None - extras = None + extras_as_string = None if is_url(name): link = Link(name) else: - p, extras = _strip_extras(path) + p, extras_as_string = _strip_extras(path) looks_like_dir = os.path.isdir(p) and ( os.path.sep in name or (os.path.altsep is not None and os.path.altsep in name) or @@ -234,34 +256,37 @@ def install_req_from_line( # wheel file if link.is_wheel: wheel = Wheel(link.filename) # can raise InvalidWheelFilename - req = "%s==%s" % (wheel.name, wheel.version) + req_as_string = "%s==%s" % (wheel.name, wheel.version) else: # set the req to the egg fragment. when it's not there, this # will become an 'unnamed' requirement - req = link.egg_fragment + req_as_string = link.egg_fragment # a requirement specifier else: - req = name + req_as_string = name - if extras: - extras = Requirement("placeholder" + extras.lower()).extras + if extras_as_string: + extras = Requirement("placeholder" + extras_as_string.lower()).extras else: extras = () - if req is not None: + if req_as_string is not None: try: - req = Requirement(req) + req = Requirement(req_as_string) except InvalidRequirement: - if os.path.sep in req: + if os.path.sep in req_as_string: add_msg = "It looks like a path." - add_msg += deduce_helpful_msg(req) - elif '=' in req and not any(op in req for op in operators): + add_msg += deduce_helpful_msg(req_as_string) + elif ('=' in req_as_string and + not any(op in req_as_string for op in operators)): add_msg = "= is not a valid operator. Did you mean == ?" else: add_msg = "" raise InstallationError( - "Invalid requirement: '%s'\n%s" % (req, add_msg) + "Invalid requirement: '%s'\n%s" % (req_as_string, add_msg) ) + else: + req = None return InstallRequirement( req, comes_from, link=link, markers=markers, @@ -273,12 +298,16 @@ def install_req_from_line( ) -def install_req_from_req( - req, comes_from=None, isolated=False, wheel_cache=None, - use_pep517=None +def install_req_from_req_string( + req_string, # type: str + comes_from=None, # type: Optional[InstallRequirement] + isolated=False, # type: bool + wheel_cache=None, # type: Optional[WheelCache] + use_pep517=None # type: Optional[bool] ): + # type: (...) -> InstallRequirement try: - req = Requirement(req) + req = Requirement(req_string) except InvalidRequirement: raise InstallationError("Invalid requirement: '%s'" % req) diff --git a/src/pip/_internal/req/req_file.py b/src/pip/_internal/req/req_file.py index b332f6853a1..6e7aa9d67da 100644 --- a/src/pip/_internal/req/req_file.py +++ b/src/pip/_internal/req/req_file.py @@ -19,6 +19,15 @@ from pip._internal.req.constructors import ( install_req_from_editable, install_req_from_line, ) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import (Iterator, Tuple, Optional, # noqa: F401 + List, Callable, Text) + from pip._internal.req import InstallRequirement # noqa: F401 + from pip._internal.cache import WheelCache # noqa: F401 + from pip._internal.index import PackageFinder # noqa: F401 + from pip._internal.download import PipSession # noqa: F401 __all__ = ['parse_requirements'] @@ -46,22 +55,30 @@ cmdoptions.process_dependency_links, cmdoptions.trusted_host, cmdoptions.require_hashes, -] +] # type: List[Callable[..., optparse.Option]] # options to be passed to requirements SUPPORTED_OPTIONS_REQ = [ cmdoptions.install_options, cmdoptions.global_options, cmdoptions.hash, -] +] # type: List[Callable[..., optparse.Option]] # the 'dest' string values SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] -def parse_requirements(filename, finder=None, comes_from=None, options=None, - session=None, constraint=False, wheel_cache=None, - use_pep517=None): +def parse_requirements( + filename, # type: str + finder=None, # type: Optional[PackageFinder] + comes_from=None, # type: Optional[str] + options=None, # type: Optional[optparse.Values] + session=None, # type: Optional[PipSession] + constraint=False, # type: bool + wheel_cache=None, # type: Optional[WheelCache] + use_pep517=None # type: Optional[bool] +): + # type: (...) -> Iterator[InstallRequirement] """Parse a requirements file and yield InstallRequirement instances. :param filename: Path or url of requirements file. @@ -95,12 +112,13 @@ def parse_requirements(filename, finder=None, comes_from=None, options=None, def preprocess(content, options): + # type: (Text, Optional[optparse.Values]) -> Iterator[Tuple[int, Text]] """Split, filter, and join lines, and return a line iterator :param content: the content of the requirements file :param options: cli options """ - lines_enum = enumerate(content.splitlines(), start=1) + lines_enum = enumerate(content.splitlines(), start=1) # type: Iterator[Tuple[int, Text]] # noqa: E501 lines_enum = join_lines(lines_enum) lines_enum = ignore_comments(lines_enum) lines_enum = skip_regex(lines_enum, options) @@ -108,9 +126,19 @@ def preprocess(content, options): return lines_enum -def process_line(line, filename, line_number, finder=None, comes_from=None, - options=None, session=None, wheel_cache=None, - use_pep517=None, constraint=False): +def process_line( + line, # type: Text + filename, # type: str + line_number, # type: int + finder=None, # type: Optional[PackageFinder] + comes_from=None, # type: Optional[str] + options=None, # type: Optional[optparse.Values] + session=None, # type: Optional[PipSession] + wheel_cache=None, # type: Optional[WheelCache] + use_pep517=None, # type: Optional[bool] + constraint=False # type: bool +): + # type: (...) -> Iterator[InstallRequirement] """Process a single requirements line; This can result in creating/yielding requirements, or updating the finder. @@ -130,15 +158,17 @@ def process_line(line, filename, line_number, finder=None, comes_from=None, """ parser = build_parser(line) defaults = parser.get_default_values() - defaults.index_url = None + defaults.index_url = None # type: ignore if finder: # `finder.format_control` will be updated during parsing - defaults.format_control = finder.format_control + defaults.format_control = finder.format_control # type: ignore args_str, options_str = break_args_options(line) if sys.version_info < (2, 7, 3): # Prior to 2.7.3, shlex cannot deal with unicode entries - options_str = options_str.encode('utf8') - opts, _ = parser.parse_args(shlex.split(options_str), defaults) + # https://github.com/python/mypy/issues/1174 + options_str = options_str.encode('utf8') # type: ignore + # https://github.com/python/mypy/issues/1174 + opts, _ = parser.parse_args(shlex.split(options_str), defaults) # type: ignore # noqa: E501 # preserve for the nested code path line_comes_from = '%s %s (line %s)' % ( @@ -153,8 +183,8 @@ def process_line(line, filename, line_number, finder=None, comes_from=None, # get the options that apply to requirements req_options = {} for dest in SUPPORTED_OPTIONS_REQ_DEST: - if dest in opts.__dict__ and opts.__dict__[dest]: - req_options[dest] = opts.__dict__[dest] + if dest in opts.__dict__ and opts.__dict__[dest]: # type: ignore + req_options[dest] = opts.__dict__[dest] # type: ignore yield install_req_from_line( args_str, line_comes_from, constraint=constraint, use_pep517=use_pep517, @@ -187,16 +217,16 @@ def process_line(line, filename, line_number, finder=None, comes_from=None, # do a join so relative paths work req_path = os.path.join(os.path.dirname(filename), req_path) # TODO: Why not use `comes_from='-r {} (line {})'` here as well? - parser = parse_requirements( + parsed_reqs = parse_requirements( req_path, finder, comes_from, options, session, constraint=nested_constraint, wheel_cache=wheel_cache ) - for req in parser: + for req in parsed_reqs: yield req # percolate hash-checking option upward elif opts.require_hashes: - options.require_hashes = opts.require_hashes + options.require_hashes = opts.require_hashes # type: ignore # set finder options elif finder: @@ -226,6 +256,7 @@ def process_line(line, filename, line_number, finder=None, comes_from=None, def break_args_options(line): + # type: (Text) -> Tuple[str, Text] """Break up the line into an args and options string. We only want to shlex (and then optparse) the options, not the args. args can contain markers which are corrupted by shlex. @@ -239,10 +270,11 @@ def break_args_options(line): else: args.append(token) options.pop(0) - return ' '.join(args), ' '.join(options) + return ' '.join(args), ' '.join(options) # type: ignore def build_parser(line): + # type: (Text) -> optparse.OptionParser """ Return a parser for parsing requirement lines """ @@ -259,20 +291,24 @@ def parser_exit(self, msg): # add offending line msg = 'Invalid requirement: %s\n%s' % (line, msg) raise RequirementsFileParseError(msg) - parser.exit = parser_exit + # ignore type, because mypy disallows assigning to a method, + # see https://github.com/python/mypy/issues/2427 + parser.exit = parser_exit # type: ignore return parser def join_lines(lines_enum): + # type: (Iterator[Tuple[int, Text]]) -> Iterator[Tuple[int, Text]] """Joins a line ending in '\' with the previous line (except when following comments). The joined line takes on the index of the first line. """ primary_line_number = None - new_line = [] + new_line = [] # type: List[Text] for line_number, line in lines_enum: - if not line.endswith('\\') or COMMENT_RE.match(line): - if COMMENT_RE.match(line): + # fixed in mypy==0.641 + if not line.endswith('\\') or COMMENT_RE.match(line): # type: ignore + if COMMENT_RE.match(line): # type: ignore # this ensures comments are always matched later line = ' ' + line if new_line: @@ -294,17 +330,23 @@ def join_lines(lines_enum): def ignore_comments(lines_enum): + # type: (Iterator[Tuple[int, Text]]) -> Iterator[Tuple[int, Text]] """ Strips comments and filter empty lines. """ for line_number, line in lines_enum: - line = COMMENT_RE.sub('', line) + # fixed in mypy==0.641 + line = COMMENT_RE.sub('', line) # type: ignore line = line.strip() if line: yield line_number, line -def skip_regex(lines_enum, options): +def skip_regex( + lines_enum, # type: Iterator[Tuple[int, Text]] + options # type: Optional[optparse.Values] +): + # type: (...) -> Iterator[Tuple[int, Text]] """ Skip lines that match '--skip-requirements-regex' pattern @@ -318,6 +360,7 @@ def skip_regex(lines_enum, options): def expand_env_variables(lines_enum): + # type: (Iterator[Tuple[int, Text]]) -> Iterator[Tuple[int, Text]] """Replace all environment variables that can be retrieved via `os.getenv`. The only allowed format for environment variables defined in the @@ -332,9 +375,10 @@ def expand_env_variables(lines_enum): Valid characters in variable names follow the `POSIX standard `_ and are limited to uppercase letter, digits and the `_` (underscore). - """ + """ for line_number, line in lines_enum: - for env_var, var_name in ENV_VAR_RE.findall(line): + # fixed in mypy==0.641 + for env_var, var_name in ENV_VAR_RE.findall(line): # type: ignore value = os.getenv(var_name) if not value: continue diff --git a/src/pip/_internal/req/req_install.py b/src/pip/_internal/req/req_install.py index 075f86e9b2c..e772b8de08f 100644 --- a/src/pip/_internal/req/req_install.py +++ b/src/pip/_internal/req/req_install.py @@ -35,10 +35,21 @@ from pip._internal.utils.packaging import get_metadata from pip._internal.utils.setuptools_build import SETUPTOOLS_SHIM from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.utils.ui import open_spinner from pip._internal.vcs import vcs from pip._internal.wheel import move_wheel_files +if MYPY_CHECK_RUNNING: + from typing import (Optional, Iterable, List, # noqa: F401 + Union, Any, Mapping, Text, Sequence) + from pip._vendor.pkg_resources import Distribution # noqa: F401 + from pip._internal.index import PackageFinder # noqa: F401 + from pip._internal.cache import WheelCache # noqa: F401 + from pip._vendor.packaging.specifiers import SpecifierSet # noqa: F401 + from pip._vendor.packaging.markers import Marker # noqa: F401 + + logger = logging.getLogger(__name__) @@ -49,10 +60,23 @@ class InstallRequirement(object): installing the said requirement. """ - def __init__(self, req, comes_from, source_dir=None, editable=False, - link=None, update=True, markers=None, use_pep517=None, - isolated=False, options=None, wheel_cache=None, - constraint=False, extras=()): + def __init__( + self, + req, # type: Optional[Requirement] + comes_from, # type: Optional[Union[str, InstallRequirement]] + source_dir=None, # type: Optional[str] + editable=False, # type: bool + link=None, # type: Optional[Link] + update=True, # type: bool + markers=None, # type: Optional[Marker] + use_pep517=None, # type: Optional[bool] + isolated=False, # type: bool + options=None, # type: Optional[Mapping[Text, Any]] + wheel_cache=None, # type: Optional[WheelCache] + constraint=False, # type: bool + extras=() # type: Iterable[str] + ): + # type: (...) -> None assert req is None or isinstance(req, Requirement), req self.req = req self.comes_from = comes_from @@ -67,7 +91,8 @@ def __init__(self, req, comes_from, source_dir=None, editable=False, if link is not None: self.link = self.original_link = link else: - self.link = self.original_link = req and req.url and Link(req.url) + # https://github.com/python/mypy/issues/5540 + self.link = self.original_link = req and req.url and Link(req.url) # type: ignore # noqa: E501 if extras: self.extras = extras @@ -80,8 +105,9 @@ def __init__(self, req, comes_from, source_dir=None, editable=False, if markers is not None: self.markers = markers else: - self.markers = req and req.marker - self._egg_info_path = None + # https://github.com/python/mypy/issues/5540 + self.markers = req and req.marker # type: ignore + self._egg_info_path = None # type: Optional[str] # This holds the pkg_resources.Distribution object if this requirement # is already available: self.satisfied_by = None @@ -92,11 +118,11 @@ def __init__(self, req, comes_from, source_dir=None, editable=False, self._temp_build_dir = TempDirectory(kind="req-build") # Used to store the global directory where the _temp_build_dir should # have been created. Cf _correct_build_location method. - self._ideal_build_dir = None + self._ideal_build_dir = None # type: Optional[str] # True if the editable should be updated: self.update = update # Set to True after successful installation - self.install_succeeded = None + self.install_succeeded = None # type: Optional[bool] # UninstallPathSet of uninstalled distribution (for possible rollback) self.uninstalled_pathset = None self.options = options if options else {} @@ -111,16 +137,16 @@ def __init__(self, req, comes_from, source_dir=None, editable=False, # gets stored. We need this to pass to build_wheel, so the backend # can ensure that the wheel matches the metadata (see the PEP for # details). - self.metadata_directory = None + self.metadata_directory = None # type: Optional[str] # The static build requirements (from pyproject.toml) - self.pyproject_requires = None + self.pyproject_requires = None # type: Optional[List[str]] # Build requirements that we will check are available - self.requirements_to_check = [] + self.requirements_to_check = [] # type: List[str] # The PEP 517 backend we should use to build the project - self.pep517_backend = None + self.pep517_backend = None # type: Optional[Pep517HookCaller] # Are we using PEP 517 for this requirement? # After pyproject.toml has been loaded, the only valid values are True @@ -139,7 +165,7 @@ def __str__(self): else: s = '' if self.satisfied_by is not None: - s += ' in %s' % display_path(self.satisfied_by.location) + s += ' in %s' % display_path(self.satisfied_by.location) # type: ignore # noqa: E501 if self.comes_from: if isinstance(self.comes_from, six.string_types): comes_from = self.comes_from @@ -154,6 +180,7 @@ def __repr__(self): self.__class__.__name__, str(self), self.editable) def populate_link(self, finder, upgrade, require_hashes): + # type: (PackageFinder, bool, bool) -> None """Ensure that if a link can be found for this, that it is found. Note that self.link may still be None - if Upgrade is False and the @@ -176,16 +203,19 @@ def populate_link(self, finder, upgrade, require_hashes): # Things that are valid for all kinds of requirements? @property def name(self): + # type: () -> Optional[str] if self.req is None: return None return native_str(pkg_resources.safe_name(self.req.name)) @property def specifier(self): + # type: () -> SpecifierSet return self.req.specifier @property def is_pinned(self): + # type: () -> bool """Return whether I am pinned to an exact version. For example, some-package==1.2 is pinned; some-package>1.2 is not. @@ -199,6 +229,7 @@ def installed_version(self): return get_installed_version(self.name) def match_markers(self, extras_requested=None): + # type: (Optional[Iterable[str]]) -> bool if not extras_requested: # Provide an extra to safely evaluate the markers # without matching any extra @@ -212,6 +243,7 @@ def match_markers(self, extras_requested=None): @property def has_hash_options(self): + # type: () -> bool """Return whether any known-good hashes are specified as options. These activate --require-hashes mode; hashes specified as part of a @@ -221,6 +253,7 @@ def has_hash_options(self): return bool(self.options.get('hashes', {})) def hashes(self, trust_internet=True): + # type: (bool) -> Hashes """Return a hash-comparer that considers my option- and URL-based hashes to be known-good. @@ -242,6 +275,7 @@ def hashes(self, trust_internet=True): return Hashes(good_hashes) def from_path(self): + # type: () -> Optional[str] """Format a nice indicator to show where this "comes from" """ if self.req is None: @@ -257,6 +291,7 @@ def from_path(self): return s def build_location(self, build_dir): + # type: (str) -> Optional[str] assert build_dir is not None if self._temp_build_dir.path is not None: return self._temp_build_dir.path @@ -284,6 +319,7 @@ def build_location(self, build_dir): return os.path.join(build_dir, name) def _correct_build_location(self): + # type: () -> None """Move self._temp_build_dir to self._ideal_build_dir/self.req.name For some requirements (e.g. a path to a directory), the name of the @@ -297,7 +333,8 @@ def _correct_build_location(self): return assert self.req is not None assert self._temp_build_dir.path - assert self._ideal_build_dir.path + # remove ignore after update to latest (0.650) mypy + assert self._ideal_build_dir is not None and self._ideal_build_dir.path # type: ignore # noqa: E501 old_location = self._temp_build_dir.path self._temp_build_dir.path = None @@ -325,6 +362,7 @@ def _correct_build_location(self): self.metadata_directory = new_meta def remove_temporary_source(self): + # type: () -> None """Remove the source files from this requirement, if they are marked for deletion""" if self.source_dir and os.path.exists( @@ -336,6 +374,7 @@ def remove_temporary_source(self): self.build_env.cleanup() def check_if_exists(self, use_user_site): + # type: (bool) -> bool """Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or self.conflicts_with appropriately. @@ -379,11 +418,21 @@ def check_if_exists(self, use_user_site): # Things valid for wheels @property def is_wheel(self): - return self.link and self.link.is_wheel - - def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, - warn_script_location=True, use_user_site=False, - pycompile=True): + # type: () -> bool + # https://github.com/python/mypy/issues/5540 + return self.link and self.link.is_wheel # type: ignore + + def move_wheel_files( + self, + wheeldir, # type: str + root=None, # type: Optional[str] + home=None, # type: Optional[str] + prefix=None, # type: Optional[str] + warn_script_location=True, # type: bool + use_user_site=False, # type: bool + pycompile=True # type: bool + ): + # type: (...) -> None move_wheel_files( self.name, self.req, wheeldir, user=use_user_site, @@ -398,12 +447,14 @@ def move_wheel_files(self, wheeldir, root=None, home=None, prefix=None, # Things valid for sdists @property def setup_py_dir(self): + # type: () -> str return os.path.join( self.source_dir, self.link and self.link.subdirectory_fragment or '') @property def setup_py(self): + # type: () -> str assert self.source_dir, "No source dir for %s" % self setup_py = os.path.join(self.setup_py_dir, 'setup.py') @@ -416,6 +467,7 @@ def setup_py(self): @property def pyproject_toml(self): + # type: () -> str assert self.source_dir, "No source dir for %s" % self pp_toml = os.path.join(self.setup_py_dir, 'pyproject.toml') @@ -427,6 +479,7 @@ def pyproject_toml(self): return pp_toml def load_pyproject_toml(self): + # type: () -> None """Load the pyproject.toml file. After calling this routine, all of the attributes related to PEP 517 @@ -467,6 +520,7 @@ def runner(cmd, cwd=None, extra_environ=None): self.pep517_backend._subprocess_runner = runner def prepare_metadata(self): + # type: () -> None """Ensure that project metadata is available. Under PEP 517, call the backend hook to prepare the metadata. @@ -505,6 +559,7 @@ def prepare_metadata(self): self.req = Requirement(metadata_name) def prepare_pep517_metadata(self): + # type: () -> None assert self.pep517_backend is not None metadata_dir = os.path.join( @@ -526,6 +581,7 @@ def prepare_pep517_metadata(self): self.metadata_directory = os.path.join(metadata_dir, distinfo_dir) def run_egg_info(self): + # type: () -> None if self.name: logger.debug( 'Running setup.py (path:%s) egg_info for package %s', @@ -545,7 +601,7 @@ def run_egg_info(self): # source code will be mistaken for an installed egg, causing # problems if self.editable: - egg_base_option = [] + egg_base_option = [] # type: List[str] else: egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') ensure_dir(egg_info_dir) @@ -559,6 +615,7 @@ def run_egg_info(self): @property def egg_info_path(self): + # type: () -> str if self._egg_info_path is None: if self.editable: base = self.source_dir @@ -617,6 +674,7 @@ def metadata(self): return self._metadata def get_dist(self): + # type: () -> Distribution """Return a pkg_resources.Distribution for this requirement""" if self.metadata_directory: base_dir, distinfo = os.path.split(self.metadata_directory) @@ -630,7 +688,8 @@ def get_dist(self): base_dir = os.path.dirname(egg_info) metadata = pkg_resources.PathMetadata(base_dir, egg_info) dist_name = os.path.splitext(os.path.basename(egg_info))[0] - typ = pkg_resources.Distribution + # https://github.com/python/mypy/issues/1174 + typ = pkg_resources.Distribution # type: ignore return typ( base_dir, @@ -639,6 +698,7 @@ def get_dist(self): ) def assert_source_matches_version(self): + # type: () -> None assert self.source_dir version = self.metadata['version'] if self.req.specifier and version not in self.req.specifier: @@ -671,8 +731,13 @@ def ensure_has_source_dir(self, parent_dir): return self.source_dir # For editable installations - def install_editable(self, install_options, - global_options=(), prefix=None): + def install_editable( + self, + install_options, # type: List[str] + global_options=(), # type: Sequence[str] + prefix=None # type: Optional[str] + ): + # type: (...) -> None logger.info('Running setup.py develop for %s', self.name) if self.isolated: @@ -702,6 +767,7 @@ def install_editable(self, install_options, self.install_succeeded = True def update_editable(self, obtain=True): + # type: (bool) -> None if not self.link: logger.debug( "Cannot update repository at %s; repository location is " @@ -733,6 +799,7 @@ def update_editable(self, obtain=True): # Top-level Actions def uninstall(self, auto_confirm=False, verbose=False, use_user_site=False): + # type: (bool, bool, bool) -> Optional[UninstallPathSet] """ Uninstall the distribution currently satisfying this requirement. @@ -747,7 +814,7 @@ def uninstall(self, auto_confirm=False, verbose=False, """ if not self.check_if_exists(use_user_site): logger.warning("Skipping %s as it is not installed.", self.name) - return + return None dist = self.satisfied_by or self.conflicts_with uninstalled_pathset = UninstallPathSet.from_dist(dist) @@ -765,6 +832,7 @@ def _clean_zip_name(self, name, prefix): # only used by archive. # TODO: Investigate if this should be kept in InstallRequirement # Seems to be used only when VCS + downloads def archive(self, build_dir): + # type: (str) -> None assert self.source_dir create_archive = True archive_name = '%s-%s.zip' % (self.name, self.metadata["version"]) @@ -800,7 +868,9 @@ def archive(self, build_dir): for dirname in dirnames: dirname = os.path.join(dirpath, dirname) name = self._clean_zip_name(dirname, dir) - zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') + # should be fixed in mypy==0.650 + # see https://github.com/python/typeshed/pull/2628 + zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') # type: ignore # noqa: E501 zipdir.external_attr = 0x1ED << 16 # 0o755 zip.writestr(zipdir, '') for filename in filenames: @@ -812,9 +882,18 @@ def archive(self, build_dir): zip.close() logger.info('Saved %s', display_path(archive_path)) - def install(self, install_options, global_options=None, root=None, - home=None, prefix=None, warn_script_location=True, - use_user_site=False, pycompile=True): + def install( + self, + install_options, # type: List[str] + global_options=None, # type: Optional[Sequence[str]] + root=None, # type: Optional[str] + home=None, # type: Optional[str] + prefix=None, # type: Optional[str] + warn_script_location=True, # type: bool + use_user_site=False, # type: bool + pycompile=True # type: bool + ): + # type: (...) -> None global_options = global_options if global_options is not None else [] if self.editable: self.install_editable( @@ -844,7 +923,8 @@ def install(self, install_options, global_options=None, root=None, self.options.get('install_options', []) if self.isolated: - global_options = global_options + ["--no-user-cfg"] + # https://github.com/python/mypy/issues/1174 + global_options = global_options + ["--no-user-cfg"] # type: ignore with TempDirectory(kind="record") as temp_dir: record_filename = os.path.join(temp_dir.path, 'install-record.txt') @@ -903,8 +983,15 @@ def prepend_root(path): with open(inst_files_path, 'w') as f: f.write('\n'.join(new_lines) + '\n') - def get_install_args(self, global_options, record_filename, root, prefix, - pycompile): + def get_install_args( + self, + global_options, # type: Sequence[str] + record_filename, # type: str + root, # type: Optional[str] + prefix, # type: Optional[str] + pycompile # type: bool + ): + # type: (...) -> List[str] install_args = [sys.executable, "-u"] install_args.append('-c') install_args.append(SETUPTOOLS_SHIM % self.setup_py) diff --git a/src/pip/_internal/req/req_set.py b/src/pip/_internal/req/req_set.py index b1983171de7..e17dfdc0179 100644 --- a/src/pip/_internal/req/req_set.py +++ b/src/pip/_internal/req/req_set.py @@ -5,26 +5,33 @@ from pip._internal.exceptions import InstallationError from pip._internal.utils.logging import indent_log +from pip._internal.utils.typing import MYPY_CHECK_RUNNING from pip._internal.wheel import Wheel +if MYPY_CHECK_RUNNING: + from typing import Optional, List, Tuple, Dict, Iterable # noqa: F401 + from pip._internal.req.req_install import InstallRequirement # noqa: F401 + + logger = logging.getLogger(__name__) class RequirementSet(object): def __init__(self, require_hashes=False, check_supported_wheels=True): + # type: (bool, bool) -> None """Create a RequirementSet. """ - self.requirements = OrderedDict() + self.requirements = OrderedDict() # type: Dict[str, InstallRequirement] # noqa: E501 self.require_hashes = require_hashes self.check_supported_wheels = check_supported_wheels # Mapping of alias: real_name - self.requirement_aliases = {} - self.unnamed_requirements = [] - self.successfully_downloaded = [] - self.reqs_to_cleanup = [] + self.requirement_aliases = {} # type: Dict[str, str] + self.unnamed_requirements = [] # type: List[InstallRequirement] + self.successfully_downloaded = [] # type: List[InstallRequirement] + self.reqs_to_cleanup = [] # type: List[InstallRequirement] def __str__(self): reqs = [req for req in self.requirements.values() @@ -39,8 +46,13 @@ def __repr__(self): return ('<%s object; %d requirement(s): %s>' % (self.__class__.__name__, len(reqs), reqs_str)) - def add_requirement(self, install_req, parent_req_name=None, - extras_requested=None): + def add_requirement( + self, + install_req, # type: InstallRequirement + parent_req_name=None, # type: Optional[str] + extras_requested=None # type: Optional[Iterable[str]] + ): + # type: (...) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]] # noqa: E501 """Add install_req as a requirement to install. :param parent_req_name: The name of the requirement that needed this @@ -152,6 +164,7 @@ def add_requirement(self, install_req, parent_req_name=None, return [existing_req], existing_req def has_requirement(self, project_name): + # type: (str) -> bool name = project_name.lower() if (name in self.requirements and not self.requirements[name].constraint or @@ -162,10 +175,12 @@ def has_requirement(self, project_name): @property def has_requirements(self): + # type: () -> List[InstallRequirement] return list(req for req in self.requirements.values() if not req.constraint) or self.unnamed_requirements def get_requirement(self, project_name): + # type: (str) -> InstallRequirement for name in project_name, project_name.lower(): if name in self.requirements: return self.requirements[name] @@ -174,6 +189,7 @@ def get_requirement(self, project_name): raise KeyError("No project with the name %r" % project_name) def cleanup_files(self): + # type: () -> None """Clean up files, remove builds.""" logger.debug('Cleaning up...') with indent_log(): diff --git a/src/pip/_internal/req/req_tracker.py b/src/pip/_internal/req/req_tracker.py index 0a86f4cd330..82e084a4cee 100644 --- a/src/pip/_internal/req/req_tracker.py +++ b/src/pip/_internal/req/req_tracker.py @@ -7,6 +7,12 @@ import os from pip._internal.utils.temp_dir import TempDirectory +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Set, Iterator # noqa: F401 + from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from pip._internal.models.link import Link # noqa: F401 logger = logging.getLogger(__name__) @@ -14,6 +20,7 @@ class RequirementTracker(object): def __init__(self): + # type: () -> None self._root = os.environ.get('PIP_REQ_TRACKER') if self._root is None: self._temp_dir = TempDirectory(delete=False, kind='req-tracker') @@ -23,7 +30,7 @@ def __init__(self): else: self._temp_dir = None logger.debug('Re-using requirements tracker %r', self._root) - self._entries = set() + self._entries = set() # type: Set[InstallRequirement] def __enter__(self): return self @@ -32,10 +39,12 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.cleanup() def _entry_path(self, link): + # type: (Link) -> str hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest() return os.path.join(self._root, hashed) def add(self, req): + # type: (InstallRequirement) -> None link = req.link info = str(req) entry_path = self._entry_path(link) @@ -54,12 +63,14 @@ def add(self, req): logger.debug('Added %s to build tracker %r', req, self._root) def remove(self, req): + # type: (InstallRequirement) -> None link = req.link self._entries.remove(req) os.unlink(self._entry_path(link)) logger.debug('Removed %s from build tracker %r', req, self._root) def cleanup(self): + # type: () -> None for req in set(self._entries): self.remove(req) remove = self._temp_dir is not None @@ -71,6 +82,7 @@ def cleanup(self): @contextlib.contextmanager def track(self, req): + # type: (InstallRequirement) -> Iterator[None] self.add(req) yield self.remove(req) diff --git a/src/pip/_internal/resolve.py b/src/pip/_internal/resolve.py index a911a348b5a..1b9d2960cc5 100644 --- a/src/pip/_internal/resolve.py +++ b/src/pip/_internal/resolve.py @@ -18,10 +18,21 @@ BestVersionAlreadyInstalled, DistributionNotFound, HashError, HashErrors, UnsupportedPythonVersion, ) -from pip._internal.req.constructors import install_req_from_req +from pip._internal.req.constructors import install_req_from_req_string from pip._internal.utils.logging import indent_log from pip._internal.utils.misc import dist_in_usersite, ensure_dir from pip._internal.utils.packaging import check_dist_requires_python +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import Optional, DefaultDict, List, Set # noqa: F401 + from pip._internal.download import PipSession # noqa: F401 + from pip._internal.req.req_install import InstallRequirement # noqa: F401 + from pip._internal.index import PackageFinder # noqa: F401 + from pip._internal.req.req_set import RequirementSet # noqa: F401 + from pip._internal.operations.prepare import (DistAbstraction, # noqa: F401, E501 + RequirementPreparer) + from pip._internal.cache import WheelCache # noqa: F401 logger = logging.getLogger(__name__) @@ -33,9 +44,22 @@ class Resolver(object): _allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"} - def __init__(self, preparer, session, finder, wheel_cache, use_user_site, - ignore_dependencies, ignore_installed, ignore_requires_python, - force_reinstall, isolated, upgrade_strategy, use_pep517=None): + def __init__( + self, + preparer, # type: RequirementPreparer + session, # type: PipSession + finder, # type: PackageFinder + wheel_cache, # type: Optional[WheelCache] + use_user_site, # type: bool + ignore_dependencies, # type: bool + ignore_installed, # type: bool + ignore_requires_python, # type: bool + force_reinstall, # type: bool + isolated, # type: bool + upgrade_strategy, # type: str + use_pep517=None # type: Optional[bool] + ): + # type: (...) -> None super(Resolver, self).__init__() assert upgrade_strategy in self._allowed_strategies @@ -47,7 +71,8 @@ def __init__(self, preparer, session, finder, wheel_cache, use_user_site, # information about both sdist and wheels transparently. self.wheel_cache = wheel_cache - self.require_hashes = None # This is set in resolve + # This is set in resolve + self.require_hashes = None # type: Optional[bool] self.upgrade_strategy = upgrade_strategy self.force_reinstall = force_reinstall @@ -58,9 +83,10 @@ def __init__(self, preparer, session, finder, wheel_cache, use_user_site, self.use_user_site = use_user_site self.use_pep517 = use_pep517 - self._discovered_dependencies = defaultdict(list) + self._discovered_dependencies = defaultdict(list) # type: DefaultDict[str, List] # noqa: E501 def resolve(self, requirement_set): + # type: (RequirementSet) -> None """Resolve what operations need to be done As a side-effect of this method, the packages (and their dependencies) @@ -95,7 +121,7 @@ def resolve(self, requirement_set): # exceptions cannot be checked ahead of time, because # req.populate_link() needs to be called before we can make decisions # based on link type. - discovered_reqs = [] + discovered_reqs = [] # type: List[InstallRequirement] hash_errors = HashErrors() for req in chain(root_reqs, discovered_reqs): try: @@ -110,6 +136,7 @@ def resolve(self, requirement_set): raise hash_errors def _is_upgrade_allowed(self, req): + # type: (InstallRequirement) -> bool if self.upgrade_strategy == "to-satisfy-only": return False elif self.upgrade_strategy == "eager": @@ -119,6 +146,7 @@ def _is_upgrade_allowed(self, req): return req.is_direct def _set_req_to_reinstall(self, req): + # type: (InstallRequirement) -> None """ Set a requirement to be installed. """ @@ -130,6 +158,7 @@ def _set_req_to_reinstall(self, req): # XXX: Stop passing requirement_set for options def _check_skip_installed(self, req_to_install): + # type: (InstallRequirement) -> Optional[str] """Check if req_to_install should be skipped. This will check if the req is installed, and whether we should upgrade @@ -182,6 +211,7 @@ def _check_skip_installed(self, req_to_install): return None def _get_abstract_dist_for(self, req): + # type: (InstallRequirement) -> DistAbstraction """Takes a InstallRequirement and returns a single AbstractDist \ representing a prepared variant of the same. """ @@ -239,6 +269,7 @@ def _get_abstract_dist_for(self, req): return abstract_dist def _resolve_one(self, requirement_set, req_to_install): + # type: (RequirementSet, InstallRequirement) -> List[InstallRequirement] # noqa: E501 """Prepare a single requirements file. :return: A list of additional InstallRequirements to also install. @@ -266,10 +297,10 @@ def _resolve_one(self, requirement_set, req_to_install): else: raise - more_reqs = [] + more_reqs = [] # type: List[InstallRequirement] def add_req(subreq, extras_requested): - sub_install_req = install_req_from_req( + sub_install_req = install_req_from_req_string( str(subreq), req_to_install, isolated=self.isolated, @@ -328,6 +359,7 @@ def add_req(subreq, extras_requested): return more_reqs def get_installation_order(self, req_set): + # type: (RequirementSet) -> List[InstallRequirement] """Create the installation order. The installation order is topological - requirements are installed @@ -338,7 +370,7 @@ def get_installation_order(self, req_set): # installs the user specified things in the order given, except when # dependencies must come earlier to achieve topological order. order = [] - ordered_reqs = set() + ordered_reqs = set() # type: Set[InstallRequirement] def schedule(req): if req.satisfied_by or req in ordered_reqs: diff --git a/src/pip/_internal/utils/compat.py b/src/pip/_internal/utils/compat.py index 2ae27fd130d..2d8b3bf060e 100644 --- a/src/pip/_internal/utils/compat.py +++ b/src/pip/_internal/utils/compat.py @@ -23,8 +23,8 @@ from pip._vendor import ipaddress # type: ignore except ImportError: import ipaddr as ipaddress # type: ignore - ipaddress.ip_address = ipaddress.IPAddress - ipaddress.ip_network = ipaddress.IPNetwork + ipaddress.ip_address = ipaddress.IPAddress # type: ignore + ipaddress.ip_network = ipaddress.IPNetwork # type: ignore __all__ = [ diff --git a/src/pip/_internal/utils/encoding.py b/src/pip/_internal/utils/encoding.py index 56f60361138..d36defadba9 100644 --- a/src/pip/_internal/utils/encoding.py +++ b/src/pip/_internal/utils/encoding.py @@ -3,6 +3,11 @@ import re import sys +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import List, Tuple, Text # noqa: F401 + BOMS = [ (codecs.BOM_UTF8, 'utf8'), (codecs.BOM_UTF16, 'utf16'), @@ -11,12 +16,13 @@ (codecs.BOM_UTF32, 'utf32'), (codecs.BOM_UTF32_BE, 'utf32-be'), (codecs.BOM_UTF32_LE, 'utf32-le'), -] +] # type: List[Tuple[bytes, Text]] ENCODING_RE = re.compile(br'coding[:=]\s*([-\w.]+)') def auto_decode(data): + # type: (bytes) -> Text """Check a bytes string for a BOM to correctly detect the encoding Fallback to locale.getpreferredencoding(False) like open() on Python3""" diff --git a/src/pip/_internal/utils/hashes.py b/src/pip/_internal/utils/hashes.py index 8b909ba155c..c6df7a187f4 100644 --- a/src/pip/_internal/utils/hashes.py +++ b/src/pip/_internal/utils/hashes.py @@ -8,6 +8,18 @@ HashMismatch, HashMissing, InstallationError, ) from pip._internal.utils.misc import read_chunks +from pip._internal.utils.typing import MYPY_CHECK_RUNNING + +if MYPY_CHECK_RUNNING: + from typing import ( # noqa: F401 + Dict, List, BinaryIO, NoReturn, Iterator + ) + from pip._vendor.six import PY3 + if PY3: + from hashlib import _Hash # noqa: F401 + else: + from hashlib import _hash as _Hash # noqa: F401 + # The recommended hash algo of the moment. Change this whenever the state of # the art changes; it won't hurt backward compatibility. @@ -25,6 +37,7 @@ class Hashes(object): """ def __init__(self, hashes=None): + # type: (Dict[str, List[str]]) -> None """ :param hashes: A dict of algorithm names pointing to lists of allowed hex digests @@ -32,6 +45,7 @@ def __init__(self, hashes=None): self._allowed = {} if hashes is None else hashes def check_against_chunks(self, chunks): + # type: (Iterator[bytes]) -> None """Check good hashes against ones built from iterable of chunks of data. @@ -55,9 +69,11 @@ def check_against_chunks(self, chunks): self._raise(gots) def _raise(self, gots): + # type: (Dict[str, _Hash]) -> NoReturn raise HashMismatch(self._allowed, gots) def check_against_file(self, file): + # type: (BinaryIO) -> None """Check good hashes against a file-like object Raise HashMismatch if none match. @@ -66,14 +82,17 @@ def check_against_file(self, file): return self.check_against_chunks(read_chunks(file)) def check_against_path(self, path): + # type: (str) -> None with open(path, 'rb') as file: return self.check_against_file(file) def __nonzero__(self): + # type: () -> bool """Return whether I know any known-good hashes.""" return bool(self._allowed) def __bool__(self): + # type: () -> bool return self.__nonzero__() @@ -85,10 +104,12 @@ class MissingHashes(Hashes): """ def __init__(self): + # type: () -> None """Don't offer the ``hashes`` kwarg.""" # Pass our favorite hash in to generate a "gotten hash". With the # empty list, it will never match, so an error will always raise. super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) def _raise(self, gots): + # type: (Dict[str, _Hash]) -> NoReturn raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/src/pip/_internal/utils/misc.py b/src/pip/_internal/utils/misc.py index 91f802bc4e8..1e561b416b0 100644 --- a/src/pip/_internal/utils/misc.py +++ b/src/pip/_internal/utils/misc.py @@ -35,12 +35,21 @@ from pip._internal.utils.compat import ( WINDOWS, console_to_str, expanduser, stdlib_pkgs, ) +from pip._internal.utils.typing import MYPY_CHECK_RUNNING if PY2: from io import BytesIO as StringIO else: from io import StringIO +if MYPY_CHECK_RUNNING: + from typing import (Optional, Tuple, Iterable, List, # noqa: F401 + Match, Union, Any, Mapping, Text, AnyStr, Container) + from pip._vendor.pkg_resources import Distribution # noqa: F401 + from pip._internal.models.link import Link # noqa: F401 + from pip._internal.utils.ui import SpinnerInterface # noqa: F401 + + __all__ = ['rmtree', 'display_path', 'backup_dir', 'ask', 'splitext', 'format_size', 'is_installable_dir', @@ -78,6 +87,7 @@ def ensure_dir(path): + # type: (AnyStr) -> None """os.path.makedirs without EEXIST.""" try: os.makedirs(path) @@ -87,6 +97,7 @@ def ensure_dir(path): def get_prog(): + # type: () -> str try: prog = os.path.basename(sys.argv[0]) if prog in ('__main__.py', '-c'): @@ -101,6 +112,7 @@ def get_prog(): # Retry every half second for up to 3 seconds @retry(stop_max_delay=3000, wait_fixed=500) def rmtree(dir, ignore_errors=False): + # type: (str, bool) -> None shutil.rmtree(dir, ignore_errors=ignore_errors, onerror=rmtree_errorhandler) @@ -121,6 +133,7 @@ def rmtree_errorhandler(func, path, exc_info): def display_path(path): + # type: (Union[str, Text]) -> str """Gives the display value for a given path, making it relative to cwd if possible.""" path = os.path.normcase(os.path.abspath(path)) @@ -133,6 +146,7 @@ def display_path(path): def backup_dir(dir, ext='.bak'): + # type: (str, str) -> str """Figure out the name of a directory to back up the given dir to (adding .bak, .bak2, etc)""" n = 1 @@ -144,6 +158,7 @@ def backup_dir(dir, ext='.bak'): def ask_path_exists(message, options): + # type: (str, Iterable[str]) -> str for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): if action in options: return action @@ -151,6 +166,7 @@ def ask_path_exists(message, options): def ask(message, options): + # type: (str, Iterable[str]) -> str """Ask the message interactively, with the given possible responses""" while 1: if os.environ.get('PIP_NO_INPUT'): @@ -170,6 +186,7 @@ def ask(message, options): def format_size(bytes): + # type: (float) -> str if bytes > 1000 * 1000: return '%.1fMB' % (bytes / 1000.0 / 1000) elif bytes > 10 * 1000: @@ -181,6 +198,7 @@ def format_size(bytes): def is_installable_dir(path): + # type: (str) -> bool """Is path is a directory containing setup.py or pyproject.toml? """ if not os.path.isdir(path): @@ -195,6 +213,7 @@ def is_installable_dir(path): def is_svn_page(html): + # type: (Union[str, Text]) -> Optional[Match[Union[str, Text]]] """ Returns true if the page appears to be the index page of an svn repository """ @@ -203,6 +222,7 @@ def is_svn_page(html): def file_contents(filename): + # type: (str) -> Text with open(filename, 'rb') as fp: return fp.read().decode('utf-8') @@ -217,6 +237,7 @@ def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): def split_leading_dir(path): + # type: (Union[str, Text]) -> List[Union[str, Text]] path = path.lstrip('/').lstrip('\\') if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or '\\' not in path): @@ -224,10 +245,11 @@ def split_leading_dir(path): elif '\\' in path: return path.split('\\', 1) else: - return path, '' + return [path, ''] def has_leading_dir(paths): + # type: (Iterable[Union[str, Text]]) -> bool """Returns true if all the paths have the same leading path name (i.e., everything is in one subdirectory in an archive)""" common_prefix = None @@ -243,6 +265,7 @@ def has_leading_dir(paths): def normalize_path(path, resolve_symlinks=True): + # type: (str, bool) -> str """ Convert a path to its canonical, case-normalized, absolute version. @@ -256,6 +279,7 @@ def normalize_path(path, resolve_symlinks=True): def splitext(path): + # type: (str) -> Tuple[str, str] """Like os.path.splitext, but take off .tar too""" base, ext = posixpath.splitext(path) if base.lower().endswith('.tar'): @@ -265,6 +289,7 @@ def splitext(path): def renames(old, new): + # type: (str, str) -> None """Like os.renames(), but handles renaming across devices.""" # Implementation borrowed from os.renames(). head, tail = os.path.split(new) @@ -282,6 +307,7 @@ def renames(old, new): def is_local(path): + # type: (str) -> bool """ Return True if path is within sys.prefix, if we're running in a virtualenv. @@ -294,6 +320,7 @@ def is_local(path): def dist_is_local(dist): + # type: (Distribution) -> bool """ Return True if given Distribution object is installed locally (i.e. within current virtualenv). @@ -305,6 +332,7 @@ def dist_is_local(dist): def dist_in_usersite(dist): + # type: (Distribution) -> bool """ Return True if given Distribution is installed in user site. """ @@ -313,6 +341,7 @@ def dist_in_usersite(dist): def dist_in_site_packages(dist): + # type: (Distribution) -> bool """ Return True if given Distribution is installed in sysconfig.get_python_lib(). @@ -323,6 +352,7 @@ def dist_in_site_packages(dist): def dist_is_editable(dist): + # type: (Distribution) -> bool """ Return True if given Distribution is an editable install. """ @@ -338,6 +368,7 @@ def get_installed_distributions(local_only=True, include_editables=True, editables_only=False, user_only=False): + # type: (bool, Container[str], bool, bool, bool) -> List[Distribution] """ Return a list of installed Distribution objects. @@ -381,7 +412,7 @@ def editables_only_test(d): def user_test(d): return True - return [d for d in pkg_resources.working_set + return [d for d in pkg_resources.working_set # type: ignore if local_test(d) and d.key not in skip and editable_test(d) and @@ -391,6 +422,7 @@ def user_test(d): def egg_link_path(dist): + # type: (Distribution) -> Optional[str] """ Return the path for the .egg-link file if it exists, otherwise, None. @@ -425,9 +457,11 @@ def egg_link_path(dist): egglink = os.path.join(site, dist.project_name) + '.egg-link' if os.path.isfile(egglink): return egglink + return None def dist_location(dist): + # type: (Distribution) -> str """ Get the site-packages location of this distribution. Generally this is dist.location, except in the case of develop-installed @@ -449,6 +483,7 @@ def current_umask(): def unzip_file(filename, location, flatten=True): + # type: (str, str, bool) -> None """ Unzip the file (with path `filename`) to the destination `location`. All files are written based on system defaults and umask (i.e. permissions are @@ -494,6 +529,7 @@ def unzip_file(filename, location, flatten=True): def untar_file(filename, location): + # type: (str, str) -> None """ Untar the file (with path `filename`) to the destination `location`. All files are written based on system defaults and umask (i.e. permissions @@ -524,13 +560,14 @@ def untar_file(filename, location): for member in tar.getmembers(): fn = member.name if leading: - fn = split_leading_dir(fn)[1] + # https://github.com/python/mypy/issues/1174 + fn = split_leading_dir(fn)[1] # type: ignore path = os.path.join(location, fn) if member.isdir(): ensure_dir(path) elif member.issym(): try: - tar._extract_member(member, path) + tar._extract_member(member, path) # type: ignore except Exception as exc: # Some corrupt tar files seem to produce this # (specifically bad symlinks) @@ -555,7 +592,7 @@ def untar_file(filename, location): shutil.copyfileobj(fp, destfp) fp.close() # Update the timestamp (useful for cython compiled files) - tar.utime(member, path) + tar.utime(member, path) # type: ignore # member have any execute permissions for user/group/world? if member.mode & 0o111: # make dest file have execute for user/group/world @@ -565,7 +602,13 @@ def untar_file(filename, location): tar.close() -def unpack_file(filename, location, content_type, link): +def unpack_file( + filename, # type: str + location, # type: str + content_type, # type: Optional[str] + link # type: Optional[Link] +): + # type: (...) -> None filename = os.path.realpath(filename) if (content_type == 'application/zip' or filename.lower().endswith(ZIP_EXTENSIONS) or @@ -598,10 +641,17 @@ def unpack_file(filename, location, content_type, link): ) -def call_subprocess(cmd, show_stdout=True, cwd=None, - on_returncode='raise', - command_desc=None, - extra_environ=None, unset_environ=None, spinner=None): +def call_subprocess( + cmd, # type: List[str] + show_stdout=True, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + unset_environ=None, # type: Optional[Iterable[str]] + spinner=None # type: Optional[SpinnerInterface] +): + # type: (...) -> Optional[Text] """ Args: unset_environ: an iterable of environment variable names to unset @@ -709,9 +759,11 @@ def call_subprocess(cmd, show_stdout=True, cwd=None, repr(on_returncode)) if not show_stdout: return ''.join(all_output) + return None def read_text_file(filename): + # type: (str) -> str """Return the contents of *filename*. Try to decode the file contents with utf-8, the preferred system encoding @@ -726,13 +778,15 @@ def read_text_file(filename): encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] for enc in encodings: try: - data = data.decode(enc) + # https://github.com/python/mypy/issues/1174 + data = data.decode(enc) # type: ignore except UnicodeDecodeError: continue break assert type(data) != bytes # Latin1 should have worked. - return data + # https://github.com/python/mypy/issues/4445 + return data # type: ignore def _make_build_dir(build_dir): @@ -895,6 +949,7 @@ def split_auth_from_netloc(netloc): def redact_netloc(netloc): + # type: (str) -> str """ Replace the password in a netloc with "****", if it exists. @@ -925,6 +980,7 @@ def _get_netloc(netloc): def remove_auth_from_url(url): + # type: (str) -> str # Return a copy of url with 'username:password@' removed. # username/pass params are passed to subversion through flags # and are not recognized in the url. @@ -932,6 +988,7 @@ def remove_auth_from_url(url): def redact_password_from_url(url): + # type: (str) -> str """Replace the password in a given url with ****.""" return _transform_url(url, redact_netloc) diff --git a/src/pip/_internal/utils/ui.py b/src/pip/_internal/utils/ui.py index 6bab904ab44..0ffcea0ac7a 100644 --- a/src/pip/_internal/utils/ui.py +++ b/src/pip/_internal/utils/ui.py @@ -21,7 +21,7 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Any # noqa: F401 + from typing import Any, Iterator, IO # noqa: F401 try: from pip._vendor import colorama @@ -292,6 +292,7 @@ def DownloadProgressProvider(progress_bar, max=None): @contextlib.contextmanager def hidden_cursor(file): + # type: (IO) -> Iterator[None] # The Windows terminal does not support the hide/show cursor ANSI codes, # even via colorama. So don't even try. if WINDOWS: @@ -311,19 +312,32 @@ def hidden_cursor(file): class RateLimiter(object): def __init__(self, min_update_interval_seconds): + # type: (float) -> None self._min_update_interval_seconds = min_update_interval_seconds - self._last_update = 0 + self._last_update = 0 # type: float def ready(self): + # type: () -> bool now = time.time() delta = now - self._last_update return delta >= self._min_update_interval_seconds def reset(self): + # type: () -> None self._last_update = time.time() -class InteractiveSpinner(object): +class SpinnerInterface(object): + def spin(self): + # type: () -> None + raise NotImplementedError() + + def finish(self, final_status): + # type: (str) -> None + raise NotImplementedError() + + +class InteractiveSpinner(SpinnerInterface): def __init__(self, message, file=None, spin_chars="-\\|/", # Empirically, 8 updates/second looks nice min_update_interval_seconds=0.125): @@ -352,6 +366,7 @@ def _write(self, status): self._rate_limiter.reset() def spin(self): + # type: () -> None if self._finished: return if not self._rate_limiter.ready(): @@ -359,6 +374,7 @@ def spin(self): self._write(next(self._spin_cycle)) def finish(self, final_status): + # type: (str) -> None if self._finished: return self._write(final_status) @@ -371,8 +387,9 @@ def finish(self, final_status): # We still print updates occasionally (once every 60 seconds by default) to # act as a keep-alive for systems like Travis-CI that take lack-of-output as # an indication that a task has frozen. -class NonInteractiveSpinner(object): +class NonInteractiveSpinner(SpinnerInterface): def __init__(self, message, min_update_interval_seconds=60): + # type: (str, float) -> None self._message = message self._finished = False self._rate_limiter = RateLimiter(min_update_interval_seconds) @@ -384,6 +401,7 @@ def _update(self, status): logger.info("%s: %s", self._message, status) def spin(self): + # type: () -> None if self._finished: return if not self._rate_limiter.ready(): @@ -391,6 +409,7 @@ def spin(self): self._update("still running...") def finish(self, final_status): + # type: (str) -> None if self._finished: return self._update("finished with status '%s'" % (final_status,)) @@ -399,6 +418,7 @@ def finish(self, final_status): @contextlib.contextmanager def open_spinner(message): + # type: (str) -> Iterator[SpinnerInterface] # Interactive spinner goes directly to sys.stdout rather than being routed # through the logging system, but it acts like it has level INFO, # i.e. it's only displayed if we're at level INFO or better. @@ -407,7 +427,8 @@ def open_spinner(message): if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: spinner = InteractiveSpinner(message) else: - spinner = NonInteractiveSpinner(message) + # https://github.com/python/mypy/issues/1174 + spinner = NonInteractiveSpinner(message) # type: ignore try: with hidden_cursor(sys.stdout): yield spinner diff --git a/src/pip/_internal/vcs/__init__.py b/src/pip/_internal/vcs/__init__.py index c3b4416a66d..f73e9486187 100644 --- a/src/pip/_internal/vcs/__init__.py +++ b/src/pip/_internal/vcs/__init__.py @@ -16,7 +16,9 @@ from pip._internal.utils.typing import MYPY_CHECK_RUNNING if MYPY_CHECK_RUNNING: - from typing import Dict, Optional, Tuple # noqa: F401 + from typing import (Dict, Optional, Tuple, # noqa: F401 + List, Type, Any, Mapping, Text) + from pip._internal.utils.ui import SpinnerInterface # noqa: F401 __all__ = ['vcs'] @@ -86,10 +88,11 @@ def make_new(self, rev): class VcsSupport(object): - _registry = {} # type: Dict[str, VersionControl] + _registry = {} # type: Dict[str, Type[VersionControl]] schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] def __init__(self): + # type: () -> None # Register more schemes with urlparse for various version control # systems urllib_parse.uses_netloc.extend(self.schemes) @@ -103,20 +106,24 @@ def __iter__(self): @property def backends(self): + # type: () -> List[Type[VersionControl]] return list(self._registry.values()) @property def dirnames(self): + # type: () -> List[str] return [backend.dirname for backend in self.backends] @property def all_schemes(self): - schemes = [] + # type: () -> List[str] + schemes = [] # type: List[str] for backend in self.backends: schemes.extend(backend.schemes) return schemes def register(self, cls): + # type: (Type[VersionControl]) -> None if not hasattr(cls, 'name'): logger.warning('Cannot register VCS %s', cls.__name__) return @@ -125,6 +132,7 @@ def register(self, cls): logger.debug('Registered VCS backend: %s', cls.name) def unregister(self, cls=None, name=None): + # type: (Optional[Type[VersionControl]], Optional[str]) -> None if name in self._registry: del self._registry[name] elif cls in self._registry.values(): @@ -133,6 +141,7 @@ def unregister(self, cls=None, name=None): logger.warning('Cannot unregister because no class or name given') def get_backend_type(self, location): + # type: (str) -> Optional[Type[VersionControl]] """ Return the type of the version control backend if found at given location, e.g. vcs.get_backend_type('/path/to/vcs/checkout') @@ -402,6 +411,7 @@ def obtain(self, dest): self.switch(dest, url, rev_options) def unpack(self, location): + # type: (str) -> None """ Clean up current location and download the url repository (and vcs infos) into location @@ -431,10 +441,17 @@ def get_revision(self, location): """ raise NotImplementedError - def run_command(self, cmd, show_stdout=True, cwd=None, - on_returncode='raise', - command_desc=None, - extra_environ=None, spinner=None): + def run_command( + self, + cmd, # type: List[str] + show_stdout=True, # type: bool + cwd=None, # type: Optional[str] + on_returncode='raise', # type: str + command_desc=None, # type: Optional[str] + extra_environ=None, # type: Optional[Mapping[str, Any]] + spinner=None # type: Optional[SpinnerInterface] + ): + # type: (...) -> Optional[Text] """ Run a VCS subcommand This is simply a wrapper around call_subprocess that adds the VCS @@ -460,6 +477,7 @@ def run_command(self, cmd, show_stdout=True, cwd=None, @classmethod def is_repository_directory(cls, path): + # type: (str) -> bool """ Return whether a directory path is a repository directory. """ @@ -469,6 +487,7 @@ def is_repository_directory(cls, path): @classmethod def controls_location(cls, location): + # type: (str) -> bool """ Check if a location is controlled by the vcs. It is meant to be overridden to implement smarter detection diff --git a/src/pip/_internal/wheel.py b/src/pip/_internal/wheel.py index a7d7ce16233..be57be4d665 100644 --- a/src/pip/_internal/wheel.py +++ b/src/pip/_internal/wheel.py @@ -40,7 +40,13 @@ from pip._internal.utils.ui import open_spinner if MYPY_CHECK_RUNNING: - from typing import Dict, List, Optional # noqa: F401 + from typing import (Dict, List, Optional, # noqa: F401 + Sequence, Mapping, Tuple, IO, Text, + Any, Union, Iterable) + from pip._vendor.packaging.requirements import Requirement # noqa: F401 + + OutRow = Tuple[str, Union[str, Text], Union[str, int]] + wheel_ext = '.whl' @@ -51,6 +57,7 @@ def rehash(path, blocksize=1 << 20): + # type: (str, int) -> Tuple[Text, int] """Return (hash, length) for path using hashlib.sha256()""" h = hashlib.sha256() length = 0 @@ -65,16 +72,18 @@ def rehash(path, blocksize=1 << 20): def open_for_csv(name, mode): + # type: (str, Text) -> IO if sys.version_info[0] < 3: - nl = {} + nl = {} # type: Dict[str, Any] bin = 'b' else: - nl = {'newline': ''} + nl = {'newline': ''} # type: Dict[str, Any] bin = '' return open(name, mode + bin, **nl) def replace_python_tag(wheelname, new_tag): + # type: (str, str) -> str """Replace the Python tag in a wheel file name with a new value. """ parts = wheelname.split('-') @@ -83,6 +92,7 @@ def replace_python_tag(wheelname, new_tag): def fix_script(path): + # type: (str) -> Optional[bool] """Replace #!python with #!/path/to/python Return True if file was changed.""" # XXX RECORD hashes will need to be updated @@ -98,6 +108,7 @@ def fix_script(path): script.write(firstline) script.write(rest) return True + return None dist_info_re = re.compile(r"""^(?P(?P.+?)(-(?P.+?))?) @@ -105,6 +116,7 @@ def fix_script(path): def root_is_purelib(name, wheeldir): + # type: (str, str) -> bool """ Return True if the extracted wheel in wheeldir should go into purelib. """ @@ -121,6 +133,7 @@ def root_is_purelib(name, wheeldir): def get_entrypoints(filename): + # type: (str) -> Tuple[Dict[str, str], Dict[str, str]] if not os.path.exists(filename): return {}, {} @@ -152,7 +165,7 @@ def _split_ep(s): def message_about_scripts_not_on_PATH(scripts): - # type: (List[str]) -> Optional[str] + # type: (Sequence[str]) -> Optional[str] """Determine if any scripts are not on PATH and format a warning. Returns a warning message if one or more scripts are not on PATH, @@ -213,6 +226,7 @@ def message_about_scripts_not_on_PATH(scripts): def sorted_outrows(outrows): + # type: (Iterable[OutRow]) -> List[OutRow] """ Return the given rows of a RECORD file in sorted order. @@ -232,9 +246,20 @@ def sorted_outrows(outrows): return sorted(outrows, key=lambda row: tuple(str(x) for x in row)) -def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, - pycompile=True, scheme=None, isolated=False, prefix=None, - warn_script_location=True): +def move_wheel_files( + name, # type: str + req, # type: Requirement + wheeldir, # type: str + user=False, # type: bool + home=None, # type: Optional[str] + root=None, # type: Optional[str] + pycompile=True, # type: bool + scheme=None, # type: Optional[Mapping[str, str]] + isolated=False, # type: bool + prefix=None, # type: Optional[str] + warn_script_location=True # type: bool +): + # type: (...) -> None """Install a wheel""" if not scheme: @@ -248,7 +273,7 @@ def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, else: lib_dir = scheme['platlib'] - info_dir = [] + info_dir = [] # type: List[str] data_dirs = [] source = wheeldir.rstrip(os.path.sep) + os.path.sep @@ -258,7 +283,7 @@ def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, # generated = files newly generated during the install (script wrappers) installed = {} changed = set() - generated = [] + generated = [] # type: List[str] # Compile all of the pyc files that we're going to be installing if pycompile: @@ -416,8 +441,9 @@ def _get_script_text(entry): "import_name": entry.suffix.split(".")[0], "func": entry.suffix, } - - maker._get_script_text = _get_script_text + # ignore type, because mypy disallows assigning to a method, + # see https://github.com/python/mypy/issues/2427 + maker._get_script_text = _get_script_text # type: ignore maker.script_template = r"""# -*- coding: utf-8 -*- import re import sys @@ -528,19 +554,20 @@ def _get_script_text(entry): with open_for_csv(temp_record, 'w+') as record_out: reader = csv.reader(record_in) writer = csv.writer(record_out) - outrows = [] + outrows = [] # type: List[OutRow] for row in reader: row[0] = installed.pop(row[0], row[0]) if row[0] in changed: - row[1], row[2] = rehash(row[0]) - outrows.append(tuple(row)) + row[1], row[2] = rehash(row[0]) # type: ignore + outrows.append(tuple(row)) # type: ignore for f in generated: digest, length = rehash(f) outrows.append((normpath(f, lib_dir), digest, length)) for f in installed: outrows.append((installed[f], '', '')) # Sort to simplify testing. - for row in sorted_outrows(outrows): + # https://github.com/python/mypy/issues/1174 + for row in sorted_outrows(outrows): # type: ignore writer.writerow(row) shutil.move(temp_record, record) diff --git a/tests/unit/test_pep425tags.py b/tests/unit/test_pep425tags.py index 9b2366c27b6..03dbac87fd3 100644 --- a/tests/unit/test_pep425tags.py +++ b/tests/unit/test_pep425tags.py @@ -1,7 +1,6 @@ import sys import pytest - from mock import patch from pip._internal import pep425tags