Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Mark more flaky tests #10889

Merged
merged 7 commits into from
Oct 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .github/workflows/python-tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -444,7 +444,7 @@ jobs:
if: github.ref == 'refs/heads/main' && failure()
runs-on: ubuntu-latest
env:
FAILURE_THRESHOLD: 4
FAILURE_THRESHOLD: 1
steps:
- name: Download all failure flags
uses: actions/download-artifact@v3
Expand All @@ -468,10 +468,10 @@ jobs:
if: ${{ steps.check_failure.outputs.too_many_tests_failed == 'true' }}
uses: 8398a7/action-slack@v3
with:
author_name: Prefect OSS Unit Tests
channel: C032SBRHBSN # This is #engineering-review
author_name: Prefect OSS Tests Failing on Main
channel: CBH18KG8G # This is #engineering
fields: message,commit,author,workflowRun
status: failure
text: "Too many unit test jobs failing: ${{ steps.check_failure.outputs.failure_count }} exceeds threshold of ${{ env.FAILURE_THRESHOLD }} jobs :oh-noo:"
text: ":warning: Unit tests are failing in Prefect's main branch. Commit author: please mark the failing tests as flaky. If they are already marked, delete them, open a GH issue, and assign it to Andrew Brookins."
env:
SLACK_WEBHOOK_URL: ${{ secrets.ENGINEERING_REVIEW_SLACK_WEBHOOK_URL }}
8 changes: 8 additions & 0 deletions tests/cli/test_agent.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,20 @@
from unittest.mock import ANY

import pytest

import prefect.cli.agent
from prefect import PrefectClient
from prefect.settings import PREFECT_AGENT_PREFETCH_SECONDS, temporary_settings
from prefect.testing.cli import invoke_and_assert
from prefect.testing.utilities import MagicMock
from prefect.utilities.asyncutils import run_sync_in_worker_thread

# All tests that invoke invoke_and_assert() can end up running our CLI command
# coroutines off the main thread. If the CLI command calls
# forward_signal_handler(), which prefect.cli.agent.start does, the test run
# will fail because only the main thread can attach signal handlers.
pytestmark = pytest.mark.flaky(max_runs=2)


def test_start_agent_with_no_args():
invoke_and_assert(
Expand Down
1 change: 1 addition & 0 deletions tests/cli/test_cloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,7 @@ def test_login_with_interactive_key_multiple_workspaces(respx_mock):


@pytest.mark.usefixtures("interactive_console")
@pytest.mark.flaky(max_runs=2)
def test_login_with_browser_single_workspace(respx_mock, mock_webbrowser):
foo_workspace = gen_test_workspace(account_handle="test", workspace_handle="foo")

Expand Down
6 changes: 6 additions & 0 deletions tests/cli/test_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,12 @@
from prefect.testing.cli import invoke_and_assert
from prefect.testing.utilities import AsyncMock

# All tests that invoke invoke_and_assert() can end up running our CLI command
# coroutines off the main thread. If the CLI command calls
# forward_signal_handler(), which prefect.cli.server.start does, the test run
# will fail because only the main thread can attach signal handlers.
pytestmark = pytest.mark.flaky(max_runs=2)


@pytest.fixture
def mock_run_process(monkeypatch: pytest.MonkeyPatch):
Expand Down
8 changes: 8 additions & 0 deletions tests/cli/test_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,12 @@
from prefect.utilities.processutils import open_process
from prefect.workers.base import BaseJobConfiguration, BaseWorker

# All tests that invoke invoke_and_assert() can end up running our CLI command
# coroutines off the main thread. If the CLI command calls
# forward_signal_handler(), which prefect.cli.worker.start does, the test run
# will fail because only the main thread can attach signal handlers.
pytestmark = pytest.mark.flaky(max_runs=2)


class MockKubernetesWorker(BaseWorker):
type = "kubernetes"
Expand Down Expand Up @@ -732,6 +738,7 @@ class TestWorkerSignalForwarding:
sys.platform == "win32",
reason="SIGTERM is only used in non-Windows environments",
)
@pytest.mark.flaky(max_runs=2)
async def test_sigint_sends_sigterm(self, worker_process):
worker_process.send_signal(signal.SIGINT)
await safe_shutdown(worker_process)
Expand All @@ -751,6 +758,7 @@ async def test_sigint_sends_sigterm(self, worker_process):
sys.platform == "win32",
reason="SIGTERM is only used in non-Windows environments",
)
@pytest.mark.flaky(max_runs=2)
async def test_sigterm_sends_sigterm_directly(self, worker_process):
worker_process.send_signal(signal.SIGTERM)
await safe_shutdown(worker_process)
Expand Down
1 change: 1 addition & 0 deletions tests/docker/test_image_builds.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,7 @@ def test_requires_real_dockerfile(contexts: Path):
("missing-file", "COPY failed"),
],
)
@pytest.mark.flaky(max_runs=3)
def test_raises_exception_on_bad_base_image(
contexts: Path, example_context: str, expected_error: str
):
Expand Down
2 changes: 2 additions & 0 deletions tests/server/models/test_orm.py
Original file line number Diff line number Diff line change
Expand Up @@ -417,6 +417,7 @@ async def test_flow_run_estimated_run_time_matches_total_run_time(

assert result.scalar() == pendulum.duration(seconds=3)

@pytest.mark.flaky(max_runs=3)
async def test_flow_run_estimated_run_time_includes_current_run(
self, session, flow, db
):
Expand Down Expand Up @@ -609,6 +610,7 @@ async def test_flow_run_lateness_when_scheduled(self, session, flow, db):
< pendulum.duration(seconds=61)
)

@pytest.mark.flaky(max_runs=2)
async def test_flow_run_lateness_when_pending(self, session, flow, db):
dt = pendulum.now("UTC").subtract(minutes=1)
fr = await models.flow_runs.create_flow_run(
Expand Down