Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix some mypy issues #3203

Open
wants to merge 12 commits into
base: main
Choose a base branch
from
99 changes: 96 additions & 3 deletions conda-lock.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@ metadata:
- url: conda-forge
used_env_vars: []
content_hash:
linux-64: edc493578310dc74fcd551d5647c4dd82fccb14c36ea1174f0f1df543ba2131a
osx-64: 3c78f8ba137712dabf12aca8d5402b7881e7c94157dfbe0ee5c5d72e908631cf
osx-arm64: b7238ce176b67715c2ba4974d42237d44b3b6955b2b361b32d7a3f2f0339228f
linux-64: d21baa4b2a90d7bd3d1632c117577893c4556731405d1cae75b05fc21e4aefc7
osx-64: 5c868044d527dd312b291d26220f73cd464d0ab52a7987ab748573f97797de1f
osx-arm64: e1a25333db7b847041e4658a228dd71f378eb57f2a75427dbbc831a8e14351c8
platforms:
- osx-arm64
- linux-64
Expand Down Expand Up @@ -8568,6 +8568,99 @@ package:
platform: osx-arm64
url: https://conda.anaconda.org/conda-forge/noarch/msrest-0.6.21-pyh44b312d_0.tar.bz2
version: 0.6.21
- category: main
dependencies:
__glibc: '>=2.17,<3.0.a0'
libgcc: '>=13'
mypy_extensions: '>=1.0.0'
psutil: '>=4.0'
python: '>=3.11,<3.12.0a0'
python_abi: 3.11.*
typing_extensions: '>=4.1.0'
hash:
md5: 0111eaad55bea1e607d90d4f84089f74
sha256: 460f3eb43160dc9e9a1f2aeabdf8cd809aefcf776c33ffd155f9bd2420a005c5
manager: conda
name: mypy
optional: false
platform: linux-64
url:
https://conda.anaconda.org/conda-forge/linux-64/mypy-1.13.0-py311h9ecbd09_0.conda
version: 1.13.0
- category: main
dependencies:
__osx: '>=10.13'
mypy_extensions: '>=1.0.0'
psutil: '>=4.0'
python: '>=3.11,<3.12.0a0'
python_abi: 3.11.*
typing_extensions: '>=4.1.0'
hash:
md5: 83a6e2bb0ea4940434b4d1402c92d3c4
sha256: ddca5a4235df88b876769dc191a5a41a453d2983bcb56104c0821d252e1abbb4
manager: conda
name: mypy
optional: false
platform: osx-64
url: https://conda.anaconda.org/conda-forge/osx-64/mypy-1.13.0-py311h1314207_0.conda
version: 1.13.0
- category: main
dependencies:
__osx: '>=11.0'
mypy_extensions: '>=1.0.0'
psutil: '>=4.0'
python: '>=3.11,<3.12.0a0'
python_abi: 3.11.*
typing_extensions: '>=4.1.0'
hash:
md5: 8b3f3c83db062e4970ba7a04890c83d5
sha256: 23c4fec92c5926baf3fe6ca8c0ad8b3d8ada66786d5b697d6e641d8885ac8ac6
manager: conda
name: mypy
optional: false
platform: osx-arm64
url:
https://conda.anaconda.org/conda-forge/osx-arm64/mypy-1.13.0-py311hae2e1ce_0.conda
version: 1.13.0
- category: main
dependencies:
python: '>=3.5'
hash:
md5: 4eccaeba205f0aed9ac3a9ea58568ca3
sha256: f240217476e148e825420c6bc3a0c0efb08c0718b7042fae960400c02af858a3
manager: conda
name: mypy_extensions
optional: false
platform: linux-64
url:
https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda
version: 1.0.0
- category: main
dependencies:
python: '>=3.5'
hash:
md5: 4eccaeba205f0aed9ac3a9ea58568ca3
sha256: f240217476e148e825420c6bc3a0c0efb08c0718b7042fae960400c02af858a3
manager: conda
name: mypy_extensions
optional: false
platform: osx-64
url:
https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda
version: 1.0.0
- category: main
dependencies:
python: '>=3.5'
hash:
md5: 4eccaeba205f0aed9ac3a9ea58568ca3
sha256: f240217476e148e825420c6bc3a0c0efb08c0718b7042fae960400c02af858a3
manager: conda
name: mypy_extensions
optional: false
platform: osx-arm64
url:
https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda
version: 1.0.0
- category: main
dependencies:
__glibc: '>=2.17,<3.0.a0'
Expand Down
93 changes: 52 additions & 41 deletions conda_forge_tick/auto_tick.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import traceback
import typing
from dataclasses import dataclass
from typing import Literal, cast
from typing import Any, Literal, cast
from urllib.error import URLError
from uuid import uuid4

Expand Down Expand Up @@ -70,12 +70,15 @@
)

from .migrators_types import MigrationUidTypedDict
from .models.pr_json import PullRequestData, PullRequestInfoSpecial, PullRequestState
from .models.pr_json import (
PullRequestData,
PullRequestInfoSpecial,
PullRequestState,
)

logger = logging.getLogger(__name__)

BOT_HOME_DIR: str = os.getcwd()
START_TIME = None
TIMEOUT = int(os.environ.get("TIMEOUT", 600))

# migrator runs on loop so avoid any seeds at current time should that happen
Expand Down Expand Up @@ -424,9 +427,9 @@

def get_spoofed_closed_pr_info() -> PullRequestInfoSpecial:
return PullRequestInfoSpecial(
id=str(uuid4()),
id=uuid4(),
merged_at="never issued",
state="closed",
state=PullRequestState.CLOSED,
)


Expand All @@ -437,7 +440,10 @@
rerender: bool = True,
base_branch: str = "main",
**kwargs: typing.Any,
) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]:
) -> (
tuple[MigrationUidTypedDict, LazyJson | Literal[False]]
| tuple[Literal[False], Literal[False]]
):
"""
For a given feedstock and migration run the migration in a temporary directory that will be deleted after the
migration is complete.
Expand Down Expand Up @@ -465,7 +471,10 @@
rerender: bool = True,
base_branch: str = "main",
**kwargs: typing.Any,
) -> tuple[MigrationUidTypedDict, dict] | tuple[Literal[False], Literal[False]]:
) -> (
tuple[MigrationUidTypedDict, LazyJson | Literal[False]]
| tuple[Literal[False], Literal[False]]
):
"""For a given feedstock and migration run the migration

Parameters
Expand Down Expand Up @@ -601,19 +610,25 @@
and pr_data.state != PullRequestState.CLOSED
and rerender_info.rerender_comment
):
if pr_data.number is None:
raise ValueError(

Check warning on line 614 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L613-L614

Added lines #L613 - L614 were not covered by tests
f"Unexpected GitHub API response: PR number is missing for PR ID {pr_data.id}."
)
git_backend.comment_on_pull_request(
repo_owner=context.git_repo_owner,
repo_name=context.git_repo_name,
pr_number=pr_data.number,
comment=rerender_info.rerender_comment,
)

pr_lazy_json: LazyJson | Literal[False]
if pr_data:
pr_lazy_json = LazyJson(
pr_lazy_json_present = LazyJson(

Check warning on line 626 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L626

Added line #L626 was not covered by tests
os.path.join("pr_json", f"{pr_data.id}.json"),
)
with pr_lazy_json as __edit_pr_lazy_json:
with pr_lazy_json_present as __edit_pr_lazy_json:

Check warning on line 629 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L629

Added line #L629 was not covered by tests
__edit_pr_lazy_json.update(**pr_data.model_dump(mode="json"))
pr_lazy_json = pr_lazy_json_present

Check warning on line 631 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L631

Added line #L631 was not covered by tests
else:
pr_lazy_json = False

Expand All @@ -624,7 +639,10 @@
context.attrs, migrator_name, is_version=is_version_migration
)

return migration_run_data["migrate_return_value"], pr_lazy_json
migrate_return_value: MigrationUidTypedDict = migration_run_data[

Check warning on line 642 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L642

Added line #L642 was not covered by tests
"migrate_return_value"
]
return migrate_return_value, pr_lazy_json

Check warning on line 645 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L645

Added line #L645 was not covered by tests


def _compute_time_per_migrator(migrators):
Expand All @@ -633,8 +651,8 @@
for migrator in tqdm.tqdm(migrators, ncols=80, desc="computing time per migrator"):
if isinstance(migrator, Version):
_num_nodes = 0
for node_name in migrator.effective_graph.nodes:
with migrator.effective_graph.nodes[node_name]["payload"] as attrs:
for node_name in migrator.effective_graph.nodes: # type: ignore[union-attr] # TODO: effective_graph can be None
with migrator.effective_graph.nodes[node_name]["payload"] as attrs: # type: ignore[union-attr] # TODO: effective_graph can be None

Check warning on line 655 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L654-L655

Added lines #L654 - L655 were not covered by tests
with attrs["version_pr_info"] as vpri:
_attempts = vpri.get("new_version_attempts", {}).get(
vpri.get("new_version", ""),
Expand All @@ -644,7 +662,7 @@
_num_nodes += 1
_num_nodes = max(
_num_nodes,
min(PR_LIMIT * 4, len(migrator.effective_graph.nodes)),
min(PR_LIMIT * 4, len(migrator.effective_graph.nodes)), # type: ignore[union-attr] # TODO: effective_graph can be None
)
num_nodes.append(_num_nodes)
else:
Expand Down Expand Up @@ -684,23 +702,6 @@
return num_nodes, time_per_migrator, tot_time_per_migrator


def _over_time_limit():
_now = time.time()
print(
"""\

=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>
=~> elpased time %ds (timeout %ds)
=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>=~>

"""
% (_now - START_TIME, TIMEOUT),
flush=True,
end="",
)
return _now - START_TIME > TIMEOUT


def _run_migrator_on_feedstock_branch(
attrs,
base_branch,
Expand Down Expand Up @@ -730,15 +731,15 @@
# if migration successful
if migrator_uid:
with attrs["pr_info"] as pri:
d = frozen_to_json_friendly(migrator_uid)
d: Any = frozen_to_json_friendly(migrator_uid)

Check warning on line 734 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L734

Added line #L734 was not covered by tests
# if we have the PR already do nothing
if d["data"] in [
existing_pr["data"] for existing_pr in pri.get("PRed", [])
]:
pass
else:
if not pr_json:
pr_json = {
pr_json = { # type: ignore[assignment] # TODO: incompatible with LazyJson

Check warning on line 742 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L742

Added line #L742 was not covered by tests
"state": "closed",
"head": {
"ref": "<this_is_not_a_branch>",
Expand Down Expand Up @@ -847,15 +848,15 @@
return good_prs, break_loop


def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit):
def _is_migrator_done(_mg_start, good_prs, time_per, pr_limit, start_time: float):
curr_time = time.time()
backend = github_backend()
api_req = backend.get_api_requests_left()

if curr_time - START_TIME > TIMEOUT:
if curr_time - start_time > TIMEOUT:

Check warning on line 856 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L856

Added line #L856 was not covered by tests
logger.info(
"BOT TIMEOUT: breaking after %d seconds (limit %d)",
curr_time - START_TIME,
curr_time - start_time,
TIMEOUT,
)
return True
Expand Down Expand Up @@ -885,7 +886,9 @@
return False


def _run_migrator(migrator, mctx, temp, time_per, git_backend: GitPlatformBackend):
def _run_migrator(
migrator, mctx, temp, time_per, git_backend: GitPlatformBackend, start_time: float
):
_mg_start = time.time()

migrator_name = get_migrator_name(migrator)
Expand Down Expand Up @@ -939,7 +942,9 @@
flush=True,
)

if _is_migrator_done(_mg_start, good_prs, time_per, migrator.pr_limit):
if _is_migrator_done(

Check warning on line 945 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L945

Added line #L945 was not covered by tests
_mg_start, good_prs, time_per, migrator.pr_limit, start_time
):
return 0

for node_name in possible_nodes:
Expand All @@ -956,7 +961,9 @@
):
# Don't let CI timeout, break ahead of the timeout so we make certain
# to write to the repo
if _is_migrator_done(_mg_start, good_prs, time_per, migrator.pr_limit):
if _is_migrator_done(

Check warning on line 964 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L964

Added line #L964 was not covered by tests
_mg_start, good_prs, time_per, migrator.pr_limit, start_time
):
break

base_branches = migrator.get_possible_feedstock_branches(attrs)
Expand Down Expand Up @@ -1201,8 +1208,7 @@


def main(ctx: CliContext) -> None:
global START_TIME
START_TIME = time.time()
start_time = time.time()

Check warning on line 1211 in conda_forge_tick/auto_tick.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/auto_tick.py#L1211

Added line #L1211 was not covered by tests

_setup_limits()

Expand Down Expand Up @@ -1260,7 +1266,12 @@

for mg_ind, migrator in enumerate(migrators):
good_prs = _run_migrator(
migrator, mctx, temp, time_per_migrator[mg_ind], git_backend
migrator,
mctx,
temp,
time_per_migrator[mg_ind],
git_backend,
start_time=start_time,
)
if good_prs > 0:
pass
Expand Down
18 changes: 12 additions & 6 deletions conda_forge_tick/container_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,9 +199,11 @@
logger = logging.getLogger("conda_forge_tick.container")

with tempfile.TemporaryDirectory() as tmpdir:
input_fs_dir = glob.glob("/cf_feedstock_ops_dir/*-feedstock")
assert len(input_fs_dir) == 1, f"expected one feedstock, got {input_fs_dir}"
input_fs_dir = input_fs_dir[0]
input_fs_dir_list = glob.glob("/cf_feedstock_ops_dir/*-feedstock")
assert (

Check warning on line 203 in conda_forge_tick/container_cli.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/container_cli.py#L202-L203

Added lines #L202 - L203 were not covered by tests
len(input_fs_dir_list) == 1
), f"expected one feedstock, got {input_fs_dir_list}"
input_fs_dir = input_fs_dir_list[0]

Check warning on line 206 in conda_forge_tick/container_cli.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/container_cli.py#L206

Added line #L206 was not covered by tests
logger.debug(
f"input container feedstock dir {input_fs_dir}: {os.listdir(input_fs_dir)}"
)
Expand Down Expand Up @@ -253,9 +255,11 @@
logger = logging.getLogger("conda_forge_tick.container")

with tempfile.TemporaryDirectory() as tmpdir:
input_fs_dir = glob.glob("/cf_feedstock_ops_dir/*-feedstock")
assert len(input_fs_dir) == 1, f"expected one feedstock, got {input_fs_dir}"
input_fs_dir = input_fs_dir[0]
input_fs_dir_list = glob.glob("/cf_feedstock_ops_dir/*-feedstock")
assert (

Check warning on line 259 in conda_forge_tick/container_cli.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/container_cli.py#L258-L259

Added lines #L258 - L259 were not covered by tests
len(input_fs_dir_list) == 1
), f"expected one feedstock, got {input_fs_dir_list}"
input_fs_dir = input_fs_dir_list[0]

Check warning on line 262 in conda_forge_tick/container_cli.py

View check run for this annotation

Codecov / codecov/patch

conda_forge_tick/container_cli.py#L262

Added line #L262 was not covered by tests
logger.debug(
f"input container feedstock dir {input_fs_dir}: {os.listdir(input_fs_dir)}"
)
Expand Down Expand Up @@ -472,12 +476,14 @@
"--cbc-path", type=str, default=None, help="The path to global pinning file."
)
def parse_recipe_yaml(
log_level,
for_pinning,
platform_arch,
cbc_path,
):
return _run_bot_task(
_parse_recipe_yaml,
log_level=log_level,
existing_feedstock_node_attrs=None,
for_pinning=for_pinning,
platform_arch=platform_arch,
Expand Down
Loading