From 499a3a656270aafef3760960362761499ff626f7 Mon Sep 17 00:00:00 2001 From: rnetser Date: Sun, 28 Dec 2025 16:22:36 +0200 Subject: [PATCH 01/34] feat: re-trigger CI checks for out-of-date PRs when base branch is updated Implements automatic re-triggering of GitHub check suites when a PR's base branch receives new commits, helping keep CI results current and preventing merges of stale PRs with outdated test results. Key changes: - Add 'retrigger-checks-on-base-push' config option (default: true) in schema.yaml - Implement clean separation of concerns in pull_request_handler.py: * label_pull_request_by_merge_state() - ONLY handles PR labeling * _retrigger_check_suites_for_pr() - handles check suite re-requesting * retrigger_checks_for_out_of_date_prs_on_push() - coordinates labeling and check triggering - Update push_handler.py to delegate PR processing to PullRequestHandler - Add retrigger_checks_on_base_push attribute to GithubWebhook in github_api.py - Add comprehensive tests verifying the separated design and behavior The implementation checks each open PR targeting the updated branch and re-triggers check suites only for PRs with merge_state="behind", ensuring CI validation against the latest base branch commits. --- webhook_server/config/schema.yaml | 8 + webhook_server/libs/github_api.py | 3 + .../libs/handlers/pull_request_handler.py | 86 ++++++++ webhook_server/libs/handlers/push_handler.py | 27 ++- .../tests/test_pull_request_handler.py | 193 ++++++++++++++++++ webhook_server/tests/test_push_handler.py | 17 +- 6 files changed, 326 insertions(+), 8 deletions(-) diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index c63156aa..fb610ca6 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -83,6 +83,10 @@ properties: type: boolean description: Create a tracking issue for new pull requests (global default) default: true + retrigger-checks-on-base-push: + type: boolean + description: Re-trigger CI checks for out-of-date PRs when a push occurs to their base branch (global default) + default: true pr-size-thresholds: type: object @@ -299,6 +303,10 @@ properties: type: boolean description: Create a tracking issue for new pull requests default: true + retrigger-checks-on-base-push: + type: boolean + description: Re-trigger CI checks for out-of-date PRs when a push occurs to their base branch + default: true pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index 41c6493e..6d309371 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -709,6 +709,9 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: ) self.mask_sensitive = self.config.get_value("mask-sensitive-data", return_on_none=True) + self.retrigger_checks_on_base_push: bool = self.config.get_value( + value="retrigger-checks-on-base-push", return_on_none=True, extra_dict=repository_config + ) async def get_pull_request(self, number: int | None = None) -> PullRequest | None: if number: diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 126b4671..88bac55d 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -423,6 +423,48 @@ async def label_all_opened_pull_requests_merge_state_after_merged(self) -> None: self.logger.info(f"{self.log_prefix} check label pull request after merge") await self.label_pull_request_by_merge_state(pull_request=pull_request) + async def retrigger_checks_for_out_of_date_prs_on_push(self, base_branch: str) -> None: + """Re-trigger checks for PRs that are out-of-date after a push to base branch.""" + if not self.github_webhook.retrigger_checks_on_base_push: + self.logger.debug(f"{self.log_prefix} Retrigger checks on base push is disabled") + return + + # Check rate limit before proceeding + rate_limit = await asyncio.to_thread(self.github_webhook.github_api.get_rate_limit) + remaining = rate_limit.core.remaining + if remaining < 100: + self.logger.warning(f"{self.log_prefix} Rate limit too low ({remaining}), skipping") + return + + # Get PRs targeting this branch + pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open", base=base_branch))) + + if not pulls: + self.logger.info(f"{self.log_prefix} No open PRs targeting {base_branch}") + return + + self.logger.info(f"{self.log_prefix} Found {len(pulls)} open PR(s) targeting {base_branch}") + + # Process each PR: label and retrigger if behind + for pr in pulls: + try: + # Label PR based on merge state + await self.label_pull_request_by_merge_state(pull_request=pr) + + # Check if PR is behind and retrigger checks + merge_state = await asyncio.to_thread(lambda p=pr: p.mergeable_state) + if merge_state == "behind": + await self._retrigger_check_suites_for_pr(pull_request=pr) + + except Exception: + pr_number = await asyncio.to_thread(lambda p=pr: p.number) + self.logger.exception(f"{self.log_prefix} Failed to process PR #{pr_number} for retrigger") + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'completed')} " + f"Completed retrigger checks for {len(pulls)} PR(s) targeting {base_branch}", + ) + async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('tag_deletion', 'pr_management', 'processing')} " @@ -865,7 +907,9 @@ async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest) self.logger.error(f"{self.log_prefix} Async task failed: {result}") async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> None: + """Label PR based on merge state (needs-rebase, has-conflicts).""" merge_state = await asyncio.to_thread(lambda: pull_request.mergeable_state) + self.logger.debug(f"{self.log_prefix} Mergeable state is {merge_state}") if merge_state == "unknown": return @@ -880,6 +924,48 @@ async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> else: await self.labels_handler._remove_label(pull_request=pull_request, label=HAS_CONFLICTS_LABEL_STR) + async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> None: + """Re-trigger check suites for a single PR.""" + try: + pr_number = await asyncio.to_thread(lambda: pull_request.number) + head_sha = await asyncio.to_thread(lambda: pull_request.head.sha) + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " + f"Re-triggering checks for out-of-date PR #{pr_number}", + ) + + # Get check suites + commit = await asyncio.to_thread(self.repository.get_commit, head_sha) + check_suites = await asyncio.to_thread(lambda c=commit: list(c.get_check_suites())) + + if not check_suites: + self.logger.debug(f"{self.log_prefix} No check suites found for PR #{pr_number}") + return + + owner = self.github_webhook.hook_data["repository"]["owner"]["login"] + repo = self.repository.name + + for suite in check_suites: + try: + suite_id = await asyncio.to_thread(lambda s=suite: s.id) + url = f"/repos/{owner}/{repo}/check-suites/{suite_id}/rerequest" + + await asyncio.to_thread( + self.github_webhook.github_api.requester.requestJsonAndCheck, + "POST", + url, + ) + + self.logger.info( + f"{self.log_prefix} Successfully re-requested check suite {suite_id} for PR #{pr_number}" + ) + except Exception: + self.logger.exception(f"{self.log_prefix} Failed to re-request check suite for PR #{pr_number}") + + except Exception: + self.logger.exception(f"{self.log_prefix} Failed to retrigger checks for PR") + async def _process_verified_for_update_or_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.verified_job: return diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 8ff09873..eb72e0e0 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -5,6 +5,7 @@ from github.Repository import Repository from webhook_server.libs.handlers.check_run_handler import CheckRunHandler +from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler from webhook_server.libs.handlers.runner_handler import RunnerHandler from webhook_server.utils.helpers import format_task_fields, run_command from webhook_server.utils.notification_utils import send_slack_message @@ -69,10 +70,28 @@ async def process_push_webhook_data(self) -> None: ) self.logger.exception(f"{self.log_prefix} Container build and push failed: {ex}") else: - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " - f"Non-tag push detected, skipping processing", - ) + # Branch push - check for out-of-date PRs + branch_match = re.search(r"^refs/heads/(.+)$", self.hook_data["ref"]) + if branch_match: + branch_name = branch_match.group(1) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " + f"Branch push detected: {branch_name}", + ) + # Create PullRequestHandler to reuse existing logic + pr_handler = PullRequestHandler(github_webhook=self.github_webhook, owners_file_handler=None) + await pr_handler.retrigger_checks_for_out_of_date_prs_on_push(branch_name) + + # Log completion + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} " + f"Branch push processing completed for {branch_name}", + ) + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " + f"Non-branch/non-tag push detected, skipping processing", + ) async def upload_to_pypi(self, tag_name: str) -> None: async def _issue_on_error(_error: str) -> None: diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 93e1a3bd..c9bc8866 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1782,3 +1782,196 @@ async def test_delete_registry_tag_via_regctl_failure( pull_request_handler.logger.error.assert_called_with( "[TEST] Failed to delete tag: tag. OUT:Delete failed. ERR:Error" ) + + @pytest.mark.asyncio + async def test_label_pull_request_by_merge_state_only_labels( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test label_pull_request_by_merge_state only handles labeling, not check triggering.""" + mock_pull_request.mergeable_state = "behind" + + with ( + patch.object(pull_request_handler.labels_handler, "_add_label", new_callable=AsyncMock) as mock_add_label, + patch.object( + pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock + ) as mock_retrigger, + ): + await pull_request_handler.label_pull_request_by_merge_state(pull_request=mock_pull_request) + + # Verify labeling happened + mock_add_label.assert_called_once_with(pull_request=mock_pull_request, label=NEEDS_REBASE_LABEL_STR) + # Verify check triggering did NOT happen (separation of concerns) + mock_retrigger.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_success( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr successfully re-requests check suites.""" + mock_pull_request.number = 123 + mock_pull_request.head.sha = "abc123" + mock_github_webhook.hook_data = {"repository": {"owner": {"login": "test-owner"}}} + + mock_commit = Mock() + mock_suite = Mock() + mock_suite.id = 456 + + call_count = 0 + + async def mock_to_thread_side_effect(func, *args, **kwargs): + nonlocal call_count + call_count += 1 + + # First call: pr.number + if call_count == 1: + return func() + # Second call: pr.head.sha + if call_count == 2: + return func() + # Third call: repository.get_commit + if call_count == 3: + return mock_commit + # Fourth call: commit.get_check_suites + if call_count == 4: + return func() + # Fifth call: suite.id + if call_count == 5: + return func() + # Sixth call: requester.requestJsonAndCheck + if call_count == 6: + return None + + return None + + mock_commit.get_check_suites.return_value = [mock_suite] + + with patch("asyncio.to_thread", side_effect=mock_to_thread_side_effect): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + assert call_count == 6 + pull_request_handler.logger.info.assert_called_with( + "[TEST] Successfully re-requested check suite 456 for PR #123" + ) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_no_check_suites( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr when PR has no check suites.""" + mock_pull_request.number = 123 + mock_pull_request.head.sha = "abc123" + + mock_commit = Mock() + mock_commit.get_check_suites.return_value = [] + + with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): + with patch.object(pull_request_handler.repository, "get_commit", return_value=mock_commit): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + pull_request_handler.logger.debug.assert_called_with("[TEST] No check suites found for PR #123") + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_exception( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr handles exceptions gracefully.""" + + async def mock_to_thread_side_effect(func, *args, **kwargs): + raise Exception("API Error") + + with patch("asyncio.to_thread", side_effect=mock_to_thread_side_effect): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + pull_request_handler.logger.exception.assert_called_with("[TEST] Failed to retrigger checks for PR") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_out_of_date_prs_on_push_disabled( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock + ) -> None: + """Test retrigger_checks_for_out_of_date_prs_on_push when feature is disabled.""" + mock_github_webhook.retrigger_checks_on_base_push = False + + await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") + + pull_request_handler.logger.debug.assert_called_with("[TEST] Retrigger checks on base push is disabled") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_out_of_date_prs_on_push_no_prs( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock + ) -> None: + """Test retrigger_checks_for_out_of_date_prs_on_push with no open PRs.""" + mock_github_webhook.retrigger_checks_on_base_push = True + mock_github_webhook.github_api = Mock() + + mock_rate_limit = Mock() + mock_rate_limit.core.remaining = 5000 + mock_github_webhook.github_api.get_rate_limit.return_value = mock_rate_limit + + with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): + with patch.object(pull_request_handler.repository, "get_pulls", return_value=[]): + await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") + + pull_request_handler.logger.info.assert_called_with("[TEST] No open PRs targeting main") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_out_of_date_prs_on_push_success( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test retrigger_checks_for_out_of_date_prs_on_push successfully processes PRs. + + Verifies: + 1. Labels each PR based on merge state + 2. Re-triggers checks only for PRs with merge_state="behind" + 3. Processes PRs sequentially (simple loop) + """ + mock_github_webhook.retrigger_checks_on_base_push = True + mock_github_webhook.github_api = Mock() + + mock_rate_limit = Mock() + mock_rate_limit.core.remaining = 5000 + mock_github_webhook.github_api.get_rate_limit.return_value = mock_rate_limit + + # Create PRs with different merge states + mock_pr1 = Mock() + mock_pr1.number = 1 + mock_pr1.mergeable_state = "behind" # Should trigger checks + mock_pr2 = Mock() + mock_pr2.number = 2 + mock_pr2.mergeable_state = "clean" # Should NOT trigger checks + + with ( + patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None), + patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]), + patch.object( + pull_request_handler, "label_pull_request_by_merge_state", new_callable=AsyncMock + ) as mock_label, + patch.object( + pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock + ) as mock_retrigger, + ): + await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") + + # Verify labeling was called for BOTH PRs + assert mock_label.call_count == 2 + mock_label.assert_any_call(pull_request=mock_pr1) + mock_label.assert_any_call(pull_request=mock_pr2) + + # Verify check triggering only called for PR with merge_state="behind" + mock_retrigger.assert_called_once_with(pull_request=mock_pr1) + + @pytest.mark.asyncio + async def test_retrigger_checks_for_out_of_date_prs_on_push_low_rate_limit( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock + ) -> None: + """Test retrigger_checks_for_out_of_date_prs_on_push with low rate limit.""" + mock_github_webhook.retrigger_checks_on_base_push = True + mock_github_webhook.github_api = Mock() + + mock_rate_limit = Mock() + mock_rate_limit.core.remaining = 50 # Below 100 threshold + mock_github_webhook.github_api.get_rate_limit.return_value = mock_rate_limit + + with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): + await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") + + pull_request_handler.logger.warning.assert_called_with("[TEST] Rate limit too low (50), skipping") diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 0fac3555..04d533e6 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -103,10 +103,19 @@ async def test_process_push_webhook_data_no_tag(self, push_handler: PushHandler) with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: - await push_handler.process_push_webhook_data() - - mock_upload.assert_not_called() - mock_build.assert_not_called() + with patch("webhook_server.libs.handlers.push_handler.PullRequestHandler") as mock_pr_handler_class: + mock_pr_handler = Mock() + mock_pr_handler.retrigger_checks_for_out_of_date_prs_on_push = AsyncMock() + mock_pr_handler_class.return_value = mock_pr_handler + + await push_handler.process_push_webhook_data() + + mock_upload.assert_not_called() + mock_build.assert_not_called() + mock_pr_handler_class.assert_called_once_with( + github_webhook=push_handler.github_webhook, owners_file_handler=None + ) + mock_pr_handler.retrigger_checks_for_out_of_date_prs_on_push.assert_called_once_with("main") @pytest.mark.asyncio async def test_process_push_webhook_data_tag_with_slash(self, push_handler: PushHandler) -> None: From d215e4b4f113565eea6cc8eab7e1850b4a4b188a Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 29 Dec 2025 14:00:18 +0200 Subject: [PATCH 02/34] Enable re-running checks after merge to base branch --- uv.lock | 110 +++++------ webhook_server/config/schema.yaml | 4 +- webhook_server/libs/github_api.py | 2 +- .../libs/handlers/pull_request_handler.py | 81 +++----- webhook_server/libs/handlers/push_handler.py | 27 +-- .../tests/test_pull_request_handler.py | 183 +++++++++--------- webhook_server/tests/test_push_handler.py | 16 +- 7 files changed, 183 insertions(+), 240 deletions(-) diff --git a/uv.lock b/uv.lock index 554cf1ff..f669663f 100644 --- a/uv.lock +++ b/uv.lock @@ -231,37 +231,37 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/45/2c665ca77ec32ad67e25c77daf1cee28ee4558f3bc571cdbaf88a00b9f23/coverage-7.13.0.tar.gz", hash = "sha256:a394aa27f2d7ff9bc04cf703817773a59ad6dfbd577032e690f961d2460ee936", size = 820905, upload-time = "2025-12-08T13:14:38.055Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/cc/bce226595eb3bf7d13ccffe154c3c487a22222d87ff018525ab4dd2e9542/coverage-7.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:28ee1c96109974af104028a8ef57cec21447d42d0e937c0275329272e370ebcf", size = 218297, upload-time = "2025-12-08T13:13:10.977Z" }, - { url = "https://files.pythonhosted.org/packages/3b/9f/73c4d34600aae03447dff3d7ad1d0ac649856bfb87d1ca7d681cfc913f9e/coverage-7.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d1e97353dcc5587b85986cda4ff3ec98081d7e84dd95e8b2a6d59820f0545f8a", size = 218673, upload-time = "2025-12-08T13:13:12.562Z" }, - { url = "https://files.pythonhosted.org/packages/63/ab/8fa097db361a1e8586535ae5073559e6229596b3489ec3ef2f5b38df8cb2/coverage-7.13.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:99acd4dfdfeb58e1937629eb1ab6ab0899b131f183ee5f23e0b5da5cba2fec74", size = 249652, upload-time = "2025-12-08T13:13:13.909Z" }, - { url = "https://files.pythonhosted.org/packages/90/3a/9bfd4de2ff191feb37ef9465855ca56a6f2f30a3bca172e474130731ac3d/coverage-7.13.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ff45e0cd8451e293b63ced93161e189780baf444119391b3e7d25315060368a6", size = 252251, upload-time = "2025-12-08T13:13:15.553Z" }, - { url = "https://files.pythonhosted.org/packages/df/61/b5d8105f016e1b5874af0d7c67542da780ccd4a5f2244a433d3e20ceb1ad/coverage-7.13.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f4f72a85316d8e13234cafe0a9f81b40418ad7a082792fa4165bd7d45d96066b", size = 253492, upload-time = "2025-12-08T13:13:16.849Z" }, - { url = "https://files.pythonhosted.org/packages/f3/b8/0fad449981803cc47a4694768b99823fb23632150743f9c83af329bb6090/coverage-7.13.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:11c21557d0e0a5a38632cbbaca5f008723b26a89d70db6315523df6df77d6232", size = 249850, upload-time = "2025-12-08T13:13:18.142Z" }, - { url = "https://files.pythonhosted.org/packages/9a/e9/8d68337c3125014d918cf4327d5257553a710a2995a6a6de2ac77e5aa429/coverage-7.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:76541dc8d53715fb4f7a3a06b34b0dc6846e3c69bc6204c55653a85dd6220971", size = 251633, upload-time = "2025-12-08T13:13:19.56Z" }, - { url = "https://files.pythonhosted.org/packages/55/14/d4112ab26b3a1bc4b3c1295d8452dcf399ed25be4cf649002fb3e64b2d93/coverage-7.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6e9e451dee940a86789134b6b0ffbe31c454ade3b849bb8a9d2cca2541a8e91d", size = 249586, upload-time = "2025-12-08T13:13:20.883Z" }, - { url = "https://files.pythonhosted.org/packages/2c/a9/22b0000186db663b0d82f86c2f1028099ae9ac202491685051e2a11a5218/coverage-7.13.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:5c67dace46f361125e6b9cace8fe0b729ed8479f47e70c89b838d319375c8137", size = 249412, upload-time = "2025-12-08T13:13:22.22Z" }, - { url = "https://files.pythonhosted.org/packages/a1/2e/42d8e0d9e7527fba439acdc6ed24a2b97613b1dc85849b1dd935c2cffef0/coverage-7.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f59883c643cb19630500f57016f76cfdcd6845ca8c5b5ea1f6e17f74c8e5f511", size = 251191, upload-time = "2025-12-08T13:13:23.899Z" }, - { url = "https://files.pythonhosted.org/packages/a4/af/8c7af92b1377fd8860536aadd58745119252aaaa71a5213e5a8e8007a9f5/coverage-7.13.0-cp313-cp313-win32.whl", hash = "sha256:58632b187be6f0be500f553be41e277712baa278147ecb7559983c6d9faf7ae1", size = 220829, upload-time = "2025-12-08T13:13:25.182Z" }, - { url = "https://files.pythonhosted.org/packages/58/f9/725e8bf16f343d33cbe076c75dc8370262e194ff10072c0608b8e5cf33a3/coverage-7.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:73419b89f812f498aca53f757dd834919b48ce4799f9d5cad33ca0ae442bdb1a", size = 221640, upload-time = "2025-12-08T13:13:26.836Z" }, - { url = "https://files.pythonhosted.org/packages/8a/ff/e98311000aa6933cc79274e2b6b94a2fe0fe3434fca778eba82003675496/coverage-7.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:eb76670874fdd6091eedcc856128ee48c41a9bbbb9c3f1c7c3cf169290e3ffd6", size = 220269, upload-time = "2025-12-08T13:13:28.116Z" }, - { url = "https://files.pythonhosted.org/packages/cf/cf/bbaa2e1275b300343ea865f7d424cc0a2e2a1df6925a070b2b2d5d765330/coverage-7.13.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6e63ccc6e0ad8986386461c3c4b737540f20426e7ec932f42e030320896c311a", size = 218990, upload-time = "2025-12-08T13:13:29.463Z" }, - { url = "https://files.pythonhosted.org/packages/21/1d/82f0b3323b3d149d7672e7744c116e9c170f4957e0c42572f0366dbb4477/coverage-7.13.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:494f5459ffa1bd45e18558cd98710c36c0b8fbfa82a5eabcbe671d80ecffbfe8", size = 219340, upload-time = "2025-12-08T13:13:31.524Z" }, - { url = "https://files.pythonhosted.org/packages/fb/e3/fe3fd4702a3832a255f4d43013eacb0ef5fc155a5960ea9269d8696db28b/coverage-7.13.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:06cac81bf10f74034e055e903f5f946e3e26fc51c09fc9f584e4a1605d977053", size = 260638, upload-time = "2025-12-08T13:13:32.965Z" }, - { url = "https://files.pythonhosted.org/packages/ad/01/63186cb000307f2b4da463f72af9b85d380236965574c78e7e27680a2593/coverage-7.13.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f2ffc92b46ed6e6760f1d47a71e56b5664781bc68986dbd1836b2b70c0ce2071", size = 262705, upload-time = "2025-12-08T13:13:34.378Z" }, - { url = "https://files.pythonhosted.org/packages/7c/a1/c0dacef0cc865f2455d59eed3548573ce47ed603205ffd0735d1d78b5906/coverage-7.13.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0602f701057c6823e5db1b74530ce85f17c3c5be5c85fc042ac939cbd909426e", size = 265125, upload-time = "2025-12-08T13:13:35.73Z" }, - { url = "https://files.pythonhosted.org/packages/ef/92/82b99223628b61300bd382c205795533bed021505eab6dd86e11fb5d7925/coverage-7.13.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:25dc33618d45456ccb1d37bce44bc78cf269909aa14c4db2e03d63146a8a1493", size = 259844, upload-time = "2025-12-08T13:13:37.69Z" }, - { url = "https://files.pythonhosted.org/packages/cf/2c/89b0291ae4e6cd59ef042708e1c438e2290f8c31959a20055d8768349ee2/coverage-7.13.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:71936a8b3b977ddd0b694c28c6a34f4fff2e9dd201969a4ff5d5fc7742d614b0", size = 262700, upload-time = "2025-12-08T13:13:39.525Z" }, - { url = "https://files.pythonhosted.org/packages/bf/f9/a5f992efae1996245e796bae34ceb942b05db275e4b34222a9a40b9fbd3b/coverage-7.13.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:936bc20503ce24770c71938d1369461f0c5320830800933bc3956e2a4ded930e", size = 260321, upload-time = "2025-12-08T13:13:41.172Z" }, - { url = "https://files.pythonhosted.org/packages/4c/89/a29f5d98c64fedbe32e2ac3c227fbf78edc01cc7572eee17d61024d89889/coverage-7.13.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:af0a583efaacc52ae2521f8d7910aff65cdb093091d76291ac5820d5e947fc1c", size = 259222, upload-time = "2025-12-08T13:13:43.282Z" }, - { url = "https://files.pythonhosted.org/packages/b3/c3/940fe447aae302a6701ee51e53af7e08b86ff6eed7631e5740c157ee22b9/coverage-7.13.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f1c23e24a7000da892a312fb17e33c5f94f8b001de44b7cf8ba2e36fbd15859e", size = 261411, upload-time = "2025-12-08T13:13:44.72Z" }, - { url = "https://files.pythonhosted.org/packages/eb/31/12a4aec689cb942a89129587860ed4d0fd522d5fda81237147fde554b8ae/coverage-7.13.0-cp313-cp313t-win32.whl", hash = "sha256:5f8a0297355e652001015e93be345ee54393e45dc3050af4a0475c5a2b767d46", size = 221505, upload-time = "2025-12-08T13:13:46.332Z" }, - { url = "https://files.pythonhosted.org/packages/65/8c/3b5fe3259d863572d2b0827642c50c3855d26b3aefe80bdc9eba1f0af3b0/coverage-7.13.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6abb3a4c52f05e08460bd9acf04fec027f8718ecaa0d09c40ffbc3fbd70ecc39", size = 222569, upload-time = "2025-12-08T13:13:47.79Z" }, - { url = "https://files.pythonhosted.org/packages/b0/39/f71fa8316a96ac72fc3908839df651e8eccee650001a17f2c78cdb355624/coverage-7.13.0-cp313-cp313t-win_arm64.whl", hash = "sha256:3ad968d1e3aa6ce5be295ab5fe3ae1bf5bb4769d0f98a80a0252d543a2ef2e9e", size = 220841, upload-time = "2025-12-08T13:13:49.243Z" }, - { url = "https://files.pythonhosted.org/packages/8d/4c/1968f32fb9a2604645827e11ff84a31e59d532e01995f904723b4f5328b3/coverage-7.13.0-py3-none-any.whl", hash = "sha256:850d2998f380b1e266459ca5b47bc9e7daf9af1d070f66317972f382d46f1904", size = 210068, upload-time = "2025-12-08T13:14:36.236Z" }, +version = "7.13.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, + { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, + { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, + { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, + { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, + { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, + { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, + { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, + { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, + { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, + { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, + { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, + { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, + { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, + { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, + { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, + { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, + { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, + { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, + { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, + { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, ] [[package]] @@ -334,7 +334,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.127.0" +version = "0.128.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -342,9 +342,9 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/02/2cbbecf6551e0c1a06f9b9765eb8f7ae126362fbba43babbb11b0e3b7db3/fastapi-0.127.0.tar.gz", hash = "sha256:5a9246e03dcd1fdb19f1396db30894867c1d630f5107dc167dcbc5ed1ea7d259", size = 369269, upload-time = "2025-12-21T16:47:16.393Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/fa/6a27e2ef789eb03060abb43b952a7f0bd39e6feaa3805362b48785bcedc5/fastapi-0.127.0-py3-none-any.whl", hash = "sha256:725aa2bb904e2eff8031557cf4b9b77459bfedd63cae8427634744fd199f6a49", size = 112055, upload-time = "2025-12-21T16:47:14.757Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, ] [[package]] @@ -772,22 +772,24 @@ wheels = [ [[package]] name = "psutil" -version = "7.1.3" +version = "7.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/7c/31d1c3ceb1260301f87565f50689dc6da3db427ece1e1e012af22abca54e/psutil-7.2.0.tar.gz", hash = "sha256:2e4f8e1552f77d14dc96fb0f6240c5b34a37081c0889f0853b3b29a496e5ef64", size = 489863, upload-time = "2025-12-23T20:26:24.616Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" }, - { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" }, - { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" }, - { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" }, - { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" }, - { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" }, - { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" }, - { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" }, - { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" }, - { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" }, - { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" }, - { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" }, + { url = "https://files.pythonhosted.org/packages/a8/8e/b35aae6ed19bc4e2286cac4832e4d522fcf00571867b0a85a3f77ef96a80/psutil-7.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c31e927555539132a00380c971816ea43d089bf4bd5f3e918ed8c16776d68474", size = 129593, upload-time = "2025-12-23T20:26:28.019Z" }, + { url = "https://files.pythonhosted.org/packages/61/a2/773d17d74e122bbffe08b97f73f2d4a01ef53fb03b98e61b8e4f64a9c6b9/psutil-7.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:db8e44e766cef86dea47d9a1fa535d38dc76449e5878a92f33683b7dba5bfcb2", size = 130104, upload-time = "2025-12-23T20:26:30.27Z" }, + { url = "https://files.pythonhosted.org/packages/0d/e3/d3a9b3f4bd231abbd70a988beb2e3edd15306051bccbfc4472bd34a56e01/psutil-7.2.0-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85ef849ac92169dedc59a7ac2fb565f47b3468fbe1524bf748746bc21afb94c7", size = 180579, upload-time = "2025-12-23T20:26:32.628Z" }, + { url = "https://files.pythonhosted.org/packages/66/f8/6c73044424aabe1b7824d4d4504029d406648286d8fe7ba8c4682e0d3042/psutil-7.2.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:26782bdbae2f5c14ce9ebe8ad2411dc2ca870495e0cd90f8910ede7fa5e27117", size = 183171, upload-time = "2025-12-23T20:26:34.972Z" }, + { url = "https://files.pythonhosted.org/packages/48/7d/76d7a863340885d41826562225a566683e653ee6c9ba03c9f3856afa7d80/psutil-7.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b7665f612d3b38a583391b95969667a53aaf6c5706dc27a602c9a4874fbf09e4", size = 139055, upload-time = "2025-12-23T20:26:36.848Z" }, + { url = "https://files.pythonhosted.org/packages/a0/48/200054ada0ae4872c8a71db54f3eb6a9af4101680ee6830d373b7fda526b/psutil-7.2.0-cp313-cp313t-win_arm64.whl", hash = "sha256:4413373c174520ae28a24a8974ad8ce6b21f060d27dde94e25f8c73a7effe57a", size = 134737, upload-time = "2025-12-23T20:26:38.784Z" }, + { url = "https://files.pythonhosted.org/packages/40/c5/a49160bf3e165b7b93a60579a353cf5d939d7f878fe5fd369110f1d18043/psutil-7.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:977a2fcd132d15cb05b32b2d85b98d087cad039b0ce435731670ba74da9e6133", size = 128116, upload-time = "2025-12-23T20:26:53.516Z" }, + { url = "https://files.pythonhosted.org/packages/10/a1/c75feb480f60cd768fb6ed00ac362a16a33e5076ec8475a22d8162fb2659/psutil-7.2.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:24151011c21fadd94214d7139d7c6c54569290d7e553989bdf0eab73b13beb8c", size = 128925, upload-time = "2025-12-23T20:26:55.573Z" }, + { url = "https://files.pythonhosted.org/packages/12/ff/e93136587c00a543f4bc768b157fac2c47cd77b180d4f4e5c6efb6ea53a2/psutil-7.2.0-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:91f211ba9279e7c61d9d8f84b713cfc38fa161cb0597d5cb3f1ca742f6848254", size = 154666, upload-time = "2025-12-23T20:26:57.312Z" }, + { url = "https://files.pythonhosted.org/packages/b8/dd/4c2de9c3827c892599d277a69d2224136800870a8a88a80981de905de28d/psutil-7.2.0-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f37415188b7ea98faf90fed51131181646c59098b077550246e2e092e127418b", size = 156109, upload-time = "2025-12-23T20:26:58.851Z" }, + { url = "https://files.pythonhosted.org/packages/81/3f/090943c682d3629968dd0b04826ddcbc760ee1379021dbe316e2ddfcd01b/psutil-7.2.0-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0d12c7ce6ed1128cd81fd54606afa054ac7dbb9773469ebb58cf2f171c49f2ac", size = 148081, upload-time = "2025-12-23T20:27:01.318Z" }, + { url = "https://files.pythonhosted.org/packages/c4/88/c39648ebb8ec182d0364af53cdefe6eddb5f3872ba718b5855a8ff65d6d4/psutil-7.2.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ca0faef7976530940dcd39bc5382d0d0d5eb023b186a4901ca341bd8d8684151", size = 147376, upload-time = "2025-12-23T20:27:03.347Z" }, + { url = "https://files.pythonhosted.org/packages/01/a2/5b39e08bd9b27476bc7cce7e21c71a481ad60b81ffac49baf02687a50d7f/psutil-7.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:abdb74137ca232d20250e9ad471f58d500e7743bc8253ba0bfbf26e570c0e437", size = 136910, upload-time = "2025-12-23T20:27:05.289Z" }, + { url = "https://files.pythonhosted.org/packages/59/54/53839db1258c1eaeb4ded57ff202144ebc75b23facc05a74fd98d338b0c6/psutil-7.2.0-cp37-abi3-win_arm64.whl", hash = "sha256:284e71038b3139e7ab3834b63b3eb5aa5565fcd61a681ec746ef9a0a8c457fd2", size = 133807, upload-time = "2025-12-23T20:27:06.825Z" }, ] [[package]] @@ -1235,15 +1237,15 @@ wheels = [ [[package]] name = "sse-starlette" -version = "3.0.4" +version = "3.1.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/17/8b/54651ad49bce99a50fd61a7f19c2b6a79fbb072e693101fbb1194c362054/sse_starlette-3.0.4.tar.gz", hash = "sha256:5e34286862e96ead0eb70f5ddd0bd21ab1f6473a8f44419dd267f431611383dd", size = 22576, upload-time = "2025-12-14T16:22:52.493Z" } +sdist = { url = "https://files.pythonhosted.org/packages/62/08/8f554b0e5bad3e4e880521a1686d96c05198471eed860b0eb89b57ea3636/sse_starlette-3.1.1.tar.gz", hash = "sha256:bffa531420c1793ab224f63648c059bcadc412bf9fdb1301ac8de1cf9a67b7fb", size = 24306, upload-time = "2025-12-26T15:22:53.836Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/22/8ab1066358601163e1ac732837adba3672f703818f693e179b24e0d3b65c/sse_starlette-3.0.4-py3-none-any.whl", hash = "sha256:32c80ef0d04506ced4b0b6ab8fe300925edc37d26f666afb1874c754895f5dc3", size = 11764, upload-time = "2025-12-14T16:22:51.453Z" }, + { url = "https://files.pythonhosted.org/packages/e3/31/4c281581a0f8de137b710a07f65518b34bcf333b201cfa06cfda9af05f8a/sse_starlette-3.1.1-py3-none-any.whl", hash = "sha256:bb38f71ae74cfd86b529907a9fda5632195dfa6ae120f214ea4c890c7ee9d436", size = 12442, upload-time = "2025-12-26T15:22:52.911Z" }, ] [[package]] @@ -1333,7 +1335,7 @@ wheels = [ [[package]] name = "typer" -version = "0.20.1" +version = "0.21.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -1341,9 +1343,9 @@ dependencies = [ { name = "shellingham" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6d/c1/933d30fd7a123ed981e2a1eedafceab63cb379db0402e438a13bc51bbb15/typer-0.20.1.tar.gz", hash = "sha256:68585eb1b01203689c4199bc440d6be616f0851e9f0eb41e4a778845c5a0fd5b", size = 105968, upload-time = "2025-12-19T16:48:56.302Z" } +sdist = { url = "https://files.pythonhosted.org/packages/85/30/ff9ede605e3bd086b4dd842499814e128500621f7951ca1e5ce84bbf61b1/typer-0.21.0.tar.gz", hash = "sha256:c87c0d2b6eee3b49c5c64649ec92425492c14488096dfbc8a0c2799b2f6f9c53", size = 106781, upload-time = "2025-12-25T09:54:53.651Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/52/1f2df7e7d1be3d65ddc2936d820d4a3d9777a54f4204f5ca46b8513eff77/typer-0.20.1-py3-none-any.whl", hash = "sha256:4b3bde918a67c8e03d861aa02deca90a95bbac572e71b1b9be56ff49affdb5a8", size = 47381, upload-time = "2025-12-19T16:48:53.679Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e4/5ebc1899d31d2b1601b32d21cfb4bba022ae6fce323d365f0448031b1660/typer-0.21.0-py3-none-any.whl", hash = "sha256:c79c01ca6b30af9fd48284058a7056ba0d3bf5cf10d0ff3d0c5b11b68c258ac6", size = 47109, upload-time = "2025-12-25T09:54:51.918Z" }, ] [[package]] diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index fb610ca6..06110ea2 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -86,7 +86,7 @@ properties: retrigger-checks-on-base-push: type: boolean description: Re-trigger CI checks for out-of-date PRs when a push occurs to their base branch (global default) - default: true + default: false pr-size-thresholds: type: object @@ -306,7 +306,7 @@ properties: retrigger-checks-on-base-push: type: boolean description: Re-trigger CI checks for out-of-date PRs when a push occurs to their base branch - default: true + default: false pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index 6d309371..9560ac88 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -710,7 +710,7 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: self.mask_sensitive = self.config.get_value("mask-sensitive-data", return_on_none=True) self.retrigger_checks_on_base_push: bool = self.config.get_value( - value="retrigger-checks-on-base-push", return_on_none=True, extra_dict=repository_config + value="retrigger-checks-on-base-push", return_on_none=False, extra_dict=repository_config ) async def get_pull_request(self, number: int | None = None) -> PullRequest | None: diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 88bac55d..58990d22 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -164,7 +164,7 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> pull_request=pull_request, ) - await self.label_all_opened_pull_requests_merge_state_after_merged() + await self.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() # Log completion - task_status reflects the result of our action self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " @@ -407,12 +407,20 @@ def _prepare_retest_welcome_comment(self) -> str: return " * No retest actions are configured for this repository" if not retest_msg else retest_msg - async def label_all_opened_pull_requests_merge_state_after_merged(self) -> None: - """ - Labels pull requests based on their mergeable state. + async def label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged(self) -> None: + """Labels pull requests based on their mergeable state and optionally re-triggers checks. + + Primary action: + Adds/removes labels based on merge state: + - 'needs rebase' label when merge_state is 'behind' + - 'has conflicts' label when merge_state is 'dirty' + + Secondary action (if retrigger-checks-on-base-push is enabled): + Re-triggers CI check suites for out-of-date PRs (merge_state in 'behind' or 'blocked'). + This ensures CI checks run against the updated base branch. - If the mergeable state is 'behind', the 'needs rebase' label is added. - If the mergeable state is 'dirty', the 'has conflicts' label is added. + Note: + Waits 30 seconds before processing to allow GitHub's merge state calculation to complete. """ time_sleep = 30 self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") @@ -421,49 +429,12 @@ async def label_all_opened_pull_requests_merge_state_after_merged(self) -> None: pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open"))) for pull_request in pulls: self.logger.info(f"{self.log_prefix} check label pull request after merge") - await self.label_pull_request_by_merge_state(pull_request=pull_request) - - async def retrigger_checks_for_out_of_date_prs_on_push(self, base_branch: str) -> None: - """Re-trigger checks for PRs that are out-of-date after a push to base branch.""" - if not self.github_webhook.retrigger_checks_on_base_push: - self.logger.debug(f"{self.log_prefix} Retrigger checks on base push is disabled") - return - - # Check rate limit before proceeding - rate_limit = await asyncio.to_thread(self.github_webhook.github_api.get_rate_limit) - remaining = rate_limit.core.remaining - if remaining < 100: - self.logger.warning(f"{self.log_prefix} Rate limit too low ({remaining}), skipping") - return - - # Get PRs targeting this branch - pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open", base=base_branch))) - - if not pulls: - self.logger.info(f"{self.log_prefix} No open PRs targeting {base_branch}") - return - - self.logger.info(f"{self.log_prefix} Found {len(pulls)} open PR(s) targeting {base_branch}") + merge_state = await self.label_pull_request_by_merge_state(pull_request=pull_request) - # Process each PR: label and retrigger if behind - for pr in pulls: - try: - # Label PR based on merge state - await self.label_pull_request_by_merge_state(pull_request=pr) - - # Check if PR is behind and retrigger checks - merge_state = await asyncio.to_thread(lambda p=pr: p.mergeable_state) - if merge_state == "behind": - await self._retrigger_check_suites_for_pr(pull_request=pr) - - except Exception: - pr_number = await asyncio.to_thread(lambda p=pr: p.number) - self.logger.exception(f"{self.log_prefix} Failed to process PR #{pr_number} for retrigger") - - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'completed')} " - f"Completed retrigger checks for {len(pulls)} PR(s) targeting {base_branch}", - ) + # If retrigger is enabled and PR is behind, retrigger checks + if self.github_webhook.retrigger_checks_on_base_push: + if merge_state in ("behind", "blocked"): + await self._retrigger_check_suites_for_pr(pull_request=pull_request) async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] @@ -906,13 +877,16 @@ async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest) if isinstance(result, Exception): self.logger.error(f"{self.log_prefix} Async task failed: {result}") - async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> None: - """Label PR based on merge state (needs-rebase, has-conflicts).""" - merge_state = await asyncio.to_thread(lambda: pull_request.mergeable_state) + async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> str: + """Label PR based on merge state (needs-rebase, has-conflicts). + Returns: + The mergeable_state value for further processing. + """ + merge_state = await asyncio.to_thread(lambda: pull_request.mergeable_state) self.logger.debug(f"{self.log_prefix} Mergeable state is {merge_state}") if merge_state == "unknown": - return + return merge_state if merge_state == "behind": await self.labels_handler._add_label(pull_request=pull_request, label=NEEDS_REBASE_LABEL_STR) @@ -924,8 +898,9 @@ async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> else: await self.labels_handler._remove_label(pull_request=pull_request, label=HAS_CONFLICTS_LABEL_STR) + return merge_state + async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> None: - """Re-trigger check suites for a single PR.""" try: pr_number = await asyncio.to_thread(lambda: pull_request.number) head_sha = await asyncio.to_thread(lambda: pull_request.head.sha) diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index eb72e0e0..8ff09873 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -5,7 +5,6 @@ from github.Repository import Repository from webhook_server.libs.handlers.check_run_handler import CheckRunHandler -from webhook_server.libs.handlers.pull_request_handler import PullRequestHandler from webhook_server.libs.handlers.runner_handler import RunnerHandler from webhook_server.utils.helpers import format_task_fields, run_command from webhook_server.utils.notification_utils import send_slack_message @@ -70,28 +69,10 @@ async def process_push_webhook_data(self) -> None: ) self.logger.exception(f"{self.log_prefix} Container build and push failed: {ex}") else: - # Branch push - check for out-of-date PRs - branch_match = re.search(r"^refs/heads/(.+)$", self.hook_data["ref"]) - if branch_match: - branch_name = branch_match.group(1) - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " - f"Branch push detected: {branch_name}", - ) - # Create PullRequestHandler to reuse existing logic - pr_handler = PullRequestHandler(github_webhook=self.github_webhook, owners_file_handler=None) - await pr_handler.retrigger_checks_for_out_of_date_prs_on_push(branch_name) - - # Log completion - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} " - f"Branch push processing completed for {branch_name}", - ) - else: - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " - f"Non-branch/non-tag push detected, skipping processing", - ) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " + f"Non-tag push detected, skipping processing", + ) async def upload_to_pypi(self, tag_name: str) -> None: async def _issue_on_error(_error: str) -> None: diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index c9bc8866..a419cf02 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -280,7 +280,7 @@ async def test_process_pull_request_webhook_data_closed_action_merged( pull_request_handler.runner_handler, "run_build_container", new_callable=AsyncMock ) as mock_build: with patch.object( - pull_request_handler, "label_all_opened_pull_requests_merge_state_after_merged" + pull_request_handler, "label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged" ) as mock_label_all: await pull_request_handler.process_pull_request_webhook_data(mock_pull_request) mock_close_issue.assert_called_once_with( @@ -401,7 +401,7 @@ def test_prepare_retest_welcome_comment(self, pull_request_handler: PullRequestH assert "pre-commit" in result @pytest.mark.asyncio - async def test_label_all_opened_pull_requests_merge_state_after_merged( + async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged( self, pull_request_handler: PullRequestHandler ) -> None: """Test labeling all opened pull requests merge state after merged.""" @@ -409,13 +409,98 @@ async def test_label_all_opened_pull_requests_merge_state_after_merged( mock_pr2 = Mock() mock_pr1.number = 1 mock_pr2.number = 2 + mock_pr1.mergeable_state = "clean" + mock_pr2.mergeable_state = "clean" + + pull_request_handler.github_webhook.retrigger_checks_on_base_push = False with patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]): with patch.object(pull_request_handler, "label_pull_request_by_merge_state", new=AsyncMock()) as mock_label: with patch("asyncio.sleep", new=AsyncMock()): - await pull_request_handler.label_all_opened_pull_requests_merge_state_after_merged() + await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() assert mock_label.await_count == 2 + @pytest.mark.asyncio + async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged_with_retrigger( + self, pull_request_handler: PullRequestHandler + ) -> None: + """Test labeling all opened pull requests with retrigger enabled.""" + mock_pr1 = Mock() + mock_pr2 = Mock() + mock_pr1.number = 1 + mock_pr2.number = 2 + mock_pr1.mergeable_state = "behind" + mock_pr2.mergeable_state = "clean" + + pull_request_handler.github_webhook.retrigger_checks_on_base_push = True + + async def mock_label_side_effect(pull_request=None): + return pull_request.mergeable_state + + with ( + patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]), + patch.object( + pull_request_handler, + "label_pull_request_by_merge_state", + new=AsyncMock(side_effect=mock_label_side_effect), + ) as mock_label, + patch.object(pull_request_handler, "_retrigger_check_suites_for_pr", new=AsyncMock()) as mock_retrigger, + patch("asyncio.sleep", new=AsyncMock()), + patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None), + ): + await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + # Verify labeling called for both PRs + assert mock_label.await_count == 2 + # Verify retrigger only called for PR with merge_state="behind" + mock_retrigger.assert_called_once_with(pull_request=mock_pr1) + + @pytest.mark.asyncio + @pytest.mark.parametrize( + "merge_state,should_retrigger", + [ + ("behind", True), # Out-of-date - should retrigger + ("blocked", True), # Blocked by reviews/checks - currently triggers (per implementation) + ("clean", False), # Up-to-date - no retrigger needed + ("dirty", False), # Has conflicts - retrigger won't help + ("unstable", False), # Failing checks - no retrigger + ("unknown", False), # Unknown state - skip processing + ], + ) + async def test_label_all_opened_prs_retrigger_for_different_merge_states( + self, + pull_request_handler: PullRequestHandler, + mock_github_webhook: Mock, + merge_state: str, + should_retrigger: bool, + ) -> None: + """Test retrigger behavior for different merge states.""" + mock_github_webhook.retrigger_checks_on_base_push = True + + mock_pr = Mock() + mock_pr.number = 1 + mock_pr.mergeable_state = merge_state + + with ( + patch("asyncio.sleep", new_callable=AsyncMock), + patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None), + patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr]), + patch.object( + pull_request_handler, + "label_pull_request_by_merge_state", + new_callable=AsyncMock, + return_value=merge_state, + ), + patch.object( + pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock + ) as mock_retrigger, + ): + await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + + if should_retrigger: + mock_retrigger.assert_called_once_with(pull_request=mock_pr) + else: + mock_retrigger.assert_not_called() + @pytest.mark.asyncio async def test_delete_remote_tag_for_merged_or_closed_pr_with_tag( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock @@ -1883,95 +1968,3 @@ async def mock_to_thread_side_effect(func, *args, **kwargs): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) pull_request_handler.logger.exception.assert_called_with("[TEST] Failed to retrigger checks for PR") - - @pytest.mark.asyncio - async def test_retrigger_checks_for_out_of_date_prs_on_push_disabled( - self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock - ) -> None: - """Test retrigger_checks_for_out_of_date_prs_on_push when feature is disabled.""" - mock_github_webhook.retrigger_checks_on_base_push = False - - await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") - - pull_request_handler.logger.debug.assert_called_with("[TEST] Retrigger checks on base push is disabled") - - @pytest.mark.asyncio - async def test_retrigger_checks_for_out_of_date_prs_on_push_no_prs( - self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock - ) -> None: - """Test retrigger_checks_for_out_of_date_prs_on_push with no open PRs.""" - mock_github_webhook.retrigger_checks_on_base_push = True - mock_github_webhook.github_api = Mock() - - mock_rate_limit = Mock() - mock_rate_limit.core.remaining = 5000 - mock_github_webhook.github_api.get_rate_limit.return_value = mock_rate_limit - - with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): - with patch.object(pull_request_handler.repository, "get_pulls", return_value=[]): - await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") - - pull_request_handler.logger.info.assert_called_with("[TEST] No open PRs targeting main") - - @pytest.mark.asyncio - async def test_retrigger_checks_for_out_of_date_prs_on_push_success( - self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock - ) -> None: - """Test retrigger_checks_for_out_of_date_prs_on_push successfully processes PRs. - - Verifies: - 1. Labels each PR based on merge state - 2. Re-triggers checks only for PRs with merge_state="behind" - 3. Processes PRs sequentially (simple loop) - """ - mock_github_webhook.retrigger_checks_on_base_push = True - mock_github_webhook.github_api = Mock() - - mock_rate_limit = Mock() - mock_rate_limit.core.remaining = 5000 - mock_github_webhook.github_api.get_rate_limit.return_value = mock_rate_limit - - # Create PRs with different merge states - mock_pr1 = Mock() - mock_pr1.number = 1 - mock_pr1.mergeable_state = "behind" # Should trigger checks - mock_pr2 = Mock() - mock_pr2.number = 2 - mock_pr2.mergeable_state = "clean" # Should NOT trigger checks - - with ( - patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None), - patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]), - patch.object( - pull_request_handler, "label_pull_request_by_merge_state", new_callable=AsyncMock - ) as mock_label, - patch.object( - pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock - ) as mock_retrigger, - ): - await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") - - # Verify labeling was called for BOTH PRs - assert mock_label.call_count == 2 - mock_label.assert_any_call(pull_request=mock_pr1) - mock_label.assert_any_call(pull_request=mock_pr2) - - # Verify check triggering only called for PR with merge_state="behind" - mock_retrigger.assert_called_once_with(pull_request=mock_pr1) - - @pytest.mark.asyncio - async def test_retrigger_checks_for_out_of_date_prs_on_push_low_rate_limit( - self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock - ) -> None: - """Test retrigger_checks_for_out_of_date_prs_on_push with low rate limit.""" - mock_github_webhook.retrigger_checks_on_base_push = True - mock_github_webhook.github_api = Mock() - - mock_rate_limit = Mock() - mock_rate_limit.core.remaining = 50 # Below 100 threshold - mock_github_webhook.github_api.get_rate_limit.return_value = mock_rate_limit - - with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): - await pull_request_handler.retrigger_checks_for_out_of_date_prs_on_push("main") - - pull_request_handler.logger.warning.assert_called_with("[TEST] Rate limit too low (50), skipping") diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 04d533e6..567e9d27 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -103,19 +103,10 @@ async def test_process_push_webhook_data_no_tag(self, push_handler: PushHandler) with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: - with patch("webhook_server.libs.handlers.push_handler.PullRequestHandler") as mock_pr_handler_class: - mock_pr_handler = Mock() - mock_pr_handler.retrigger_checks_for_out_of_date_prs_on_push = AsyncMock() - mock_pr_handler_class.return_value = mock_pr_handler - - await push_handler.process_push_webhook_data() + await push_handler.process_push_webhook_data() - mock_upload.assert_not_called() - mock_build.assert_not_called() - mock_pr_handler_class.assert_called_once_with( - github_webhook=push_handler.github_webhook, owners_file_handler=None - ) - mock_pr_handler.retrigger_checks_for_out_of_date_prs_on_push.assert_called_once_with("main") + mock_upload.assert_not_called() + mock_build.assert_not_called() @pytest.mark.asyncio async def test_process_push_webhook_data_tag_with_slash(self, push_handler: PushHandler) -> None: @@ -419,3 +410,4 @@ async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandl assert "published to PYPI" in call_args[1]["message"] assert call_args[1]["logger"] == push_handler.logger assert call_args[1]["log_prefix"] == push_handler.log_prefix + From 40dc9ce563c5a7ec423b5410bf1fa38b94070049 Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 29 Dec 2025 14:19:52 +0200 Subject: [PATCH 03/34] Enable re-running checks after merge to base branch --- webhook_server/libs/handlers/pull_request_handler.py | 7 +++++-- webhook_server/tests/test_pull_request_handler.py | 7 +++++-- webhook_server/tests/test_push_handler.py | 1 - 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 58990d22..10537822 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -912,7 +912,7 @@ async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> Non # Get check suites commit = await asyncio.to_thread(self.repository.get_commit, head_sha) - check_suites = await asyncio.to_thread(lambda c=commit: list(c.get_check_suites())) + check_suites = await asyncio.to_thread(lambda: list(commit.get_check_suites())) if not check_suites: self.logger.debug(f"{self.log_prefix} No check suites found for PR #{pr_number}") @@ -923,9 +923,12 @@ async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> Non for suite in check_suites: try: - suite_id = await asyncio.to_thread(lambda s=suite: s.id) + # Extract suite.id outside the loop to avoid B023 (lambda in loop) + # suite.id is a cached property, safe to access directly + suite_id = await asyncio.to_thread(getattr, suite, "id") url = f"/repos/{owner}/{repo}/check-suites/{suite_id}/rerequest" + assert self.github_webhook.github_api is not None await asyncio.to_thread( self.github_webhook.github_api.requester.requestJsonAndCheck, "POST", diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index a419cf02..6a62de86 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -280,7 +280,8 @@ async def test_process_pull_request_webhook_data_closed_action_merged( pull_request_handler.runner_handler, "run_build_container", new_callable=AsyncMock ) as mock_build: with patch.object( - pull_request_handler, "label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged" + pull_request_handler, + "label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged", ) as mock_label_all: await pull_request_handler.process_pull_request_webhook_data(mock_pull_request) mock_close_issue.assert_called_once_with( @@ -417,7 +418,9 @@ async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after with patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]): with patch.object(pull_request_handler, "label_pull_request_by_merge_state", new=AsyncMock()) as mock_label: with patch("asyncio.sleep", new=AsyncMock()): - await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + await ( + pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + ) assert mock_label.await_count == 2 @pytest.mark.asyncio diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 567e9d27..0fac3555 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -410,4 +410,3 @@ async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandl assert "published to PYPI" in call_args[1]["message"] assert call_args[1]["logger"] == push_handler.logger assert call_args[1]["log_prefix"] == push_handler.log_prefix - From 3a8072e0dd3b034f46cdadd6e18a103ce0d7987a Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 29 Dec 2025 14:51:50 +0200 Subject: [PATCH 04/34] re-use code from process_retest_command to run checks --- .../libs/handlers/issue_comment_handler.py | 28 +------ .../libs/handlers/pull_request_handler.py | 52 ++++-------- .../libs/handlers/runner_handler.py | 34 +++++++- .../tests/test_pull_request_handler.py | 82 ++++++------------- 4 files changed, 78 insertions(+), 118 deletions(-) diff --git a/webhook_server/libs/handlers/issue_comment_handler.py b/webhook_server/libs/handlers/issue_comment_handler.py index e0b77dea..afec27cc 100644 --- a/webhook_server/libs/handlers/issue_comment_handler.py +++ b/webhook_server/libs/handlers/issue_comment_handler.py @@ -2,7 +2,7 @@ import asyncio from asyncio import Task -from collections.abc import Callable, Coroutine +from collections.abc import Coroutine from typing import TYPE_CHECKING, Any from github.PullRequest import PullRequest @@ -16,7 +16,6 @@ from webhook_server.utils.constants import ( AUTOMERGE_LABEL_STR, BUILD_AND_PUSH_CONTAINER_STR, - BUILD_CONTAINER_STR, CHERRY_PICK_LABEL_PREFIX, COMMAND_ADD_ALLOWED_USER_STR, COMMAND_ASSIGN_REVIEWER_STR, @@ -25,12 +24,8 @@ COMMAND_CHERRY_PICK_STR, COMMAND_REPROCESS_STR, COMMAND_RETEST_STR, - CONVENTIONAL_TITLE_STR, HOLD_LABEL_STR, - PRE_COMMIT_STR, - PYTHON_MODULE_INSTALL_STR, REACTIONS, - TOX_STR, USER_LABELS_DICT, VERIFIED_LABEL_STR, WIP_STR, @@ -415,14 +410,6 @@ async def process_retest_command( self.logger.debug(f"{self.log_prefix} Target tests for re-test: {_target_tests}") _not_supported_retests: list[str] = [] _supported_retests: list[str] = [] - _retests_to_func_map: dict[str, Callable] = { - TOX_STR: self.runner_handler.run_tox, - PRE_COMMIT_STR: self.runner_handler.run_pre_commit, - BUILD_CONTAINER_STR: self.runner_handler.run_build_container, - PYTHON_MODULE_INSTALL_STR: self.runner_handler.run_install_python_module, - CONVENTIONAL_TITLE_STR: self.runner_handler.run_conventional_title_check, - } - self.logger.debug(f"{self.log_prefix} Retest map is {_retests_to_func_map}") if not _target_tests: msg = "No test defined to retest" @@ -459,17 +446,8 @@ async def process_retest_command( self.logger.debug(error_msg) await asyncio.to_thread(pull_request.create_issue_comment, msg) - if _supported_retests: - tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] - for _test in _supported_retests: - self.logger.debug(f"{self.log_prefix} running retest {_test}") - task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) - tasks.append(task) - - results = await asyncio.gather(*tasks, return_exceptions=True) - for result in results: - if isinstance(result, Exception): - self.logger.error(f"{self.log_prefix} Async task failed: {result}") + # Run all supported retests using the shared runner handler method + await self.runner_handler.run_retests(supported_retests=_supported_retests, pull_request=pull_request) if automerge: await self.labels_handler._add_label(pull_request=pull_request, label=AUTOMERGE_LABEL_STR) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 10537822..900e51ca 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -901,48 +901,28 @@ async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> return merge_state async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> None: - try: - pr_number = await asyncio.to_thread(lambda: pull_request.number) - head_sha = await asyncio.to_thread(lambda: pull_request.head.sha) - - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " - f"Re-triggering checks for out-of-date PR #{pr_number}", - ) + """Re-trigger configured checks for a PR when base branch is updated. - # Get check suites - commit = await asyncio.to_thread(self.repository.get_commit, head_sha) - check_suites = await asyncio.to_thread(lambda: list(commit.get_check_suites())) - - if not check_suites: - self.logger.debug(f"{self.log_prefix} No check suites found for PR #{pr_number}") - return + Uses the same runner approach as process_retest_command - only runs checks + that are configured for this repository. + """ + pr_number = await asyncio.to_thread(lambda: pull_request.number) - owner = self.github_webhook.hook_data["repository"]["owner"]["login"] - repo = self.repository.name + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " + f"Re-triggering checks for out-of-date PR #{pr_number}", + ) - for suite in check_suites: - try: - # Extract suite.id outside the loop to avoid B023 (lambda in loop) - # suite.id is a cached property, safe to access directly - suite_id = await asyncio.to_thread(getattr, suite, "id") - url = f"/repos/{owner}/{repo}/check-suites/{suite_id}/rerequest" + available_checks = self.github_webhook.current_pull_request_supported_retest - assert self.github_webhook.github_api is not None - await asyncio.to_thread( - self.github_webhook.github_api.requester.requestJsonAndCheck, - "POST", - url, - ) + if not available_checks: + self.logger.debug(f"{self.log_prefix} No checks configured for this repository") + return - self.logger.info( - f"{self.log_prefix} Successfully re-requested check suite {suite_id} for PR #{pr_number}" - ) - except Exception: - self.logger.exception(f"{self.log_prefix} Failed to re-request check suite for PR #{pr_number}") + self.logger.info(f"{self.log_prefix} Available checks to retrigger: {available_checks}") - except Exception: - self.logger.exception(f"{self.log_prefix} Failed to retrigger checks for PR") + # Run all available checks using the shared runner handler method + await self.runner_handler.run_retests(supported_retests=available_checks, pull_request=pull_request) async def _process_verified_for_update_or_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.verified_job: diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index d81c2d80..c79355b2 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -2,7 +2,8 @@ import contextlib import re import shutil -from collections.abc import AsyncGenerator +from asyncio import Task +from collections.abc import AsyncGenerator, Callable, Coroutine from typing import TYPE_CHECKING, Any import shortuuid @@ -742,3 +743,34 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie await asyncio.to_thread( pull_request.create_issue_comment, f"Cherry-picked PR {pull_request.title} into {target_branch}" ) + + async def run_retests(self, supported_retests: list[str], pull_request: PullRequest) -> None: + """Run the specified retests for a pull request. + + Args: + supported_retests: List of test names to run (e.g., ['tox', 'pre-commit']) + pull_request: The PullRequest object to run tests for + """ + if not supported_retests: + self.logger.debug(f"{self.log_prefix} No retests to run") + return + + # Map check names to runner functions + _retests_to_func_map: dict[str, Callable] = { + TOX_STR: self.run_tox, + PRE_COMMIT_STR: self.run_pre_commit, + BUILD_CONTAINER_STR: self.run_build_container, + PYTHON_MODULE_INSTALL_STR: self.run_install_python_module, + CONVENTIONAL_TITLE_STR: self.run_conventional_title_check, + } + + tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] + for _test in supported_retests: + self.logger.debug(f"{self.log_prefix} running retest {_test}") + task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) + tasks.append(task) + + results = await asyncio.gather(*tasks, return_exceptions=True) + for result in results: + if isinstance(result, Exception): + self.logger.error(f"{self.log_prefix} Async task failed: {result}") diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 6a62de86..7502759c 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -18,6 +18,7 @@ HAS_CONFLICTS_LABEL_STR, LGTM_BY_LABEL_PREFIX, NEEDS_REBASE_LABEL_STR, + PRE_COMMIT_STR, TOX_STR, VERIFIED_LABEL_STR, WIP_STR, @@ -1895,79 +1896,48 @@ async def test_label_pull_request_by_merge_state_only_labels( async def test_retrigger_check_suites_for_pr_success( self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock ) -> None: - """Test _retrigger_check_suites_for_pr successfully re-requests check suites.""" + """Test _retrigger_check_suites_for_pr successfully runs configured checks.""" mock_pull_request.number = 123 - mock_pull_request.head.sha = "abc123" - mock_github_webhook.hook_data = {"repository": {"owner": {"login": "test-owner"}}} - - mock_commit = Mock() - mock_suite = Mock() - mock_suite.id = 456 - - call_count = 0 - - async def mock_to_thread_side_effect(func, *args, **kwargs): - nonlocal call_count - call_count += 1 - - # First call: pr.number - if call_count == 1: - return func() - # Second call: pr.head.sha - if call_count == 2: - return func() - # Third call: repository.get_commit - if call_count == 3: - return mock_commit - # Fourth call: commit.get_check_suites - if call_count == 4: - return func() - # Fifth call: suite.id - if call_count == 5: - return func() - # Sixth call: requester.requestJsonAndCheck - if call_count == 6: - return None + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] - return None - - mock_commit.get_check_suites.return_value = [mock_suite] + # Mock the shared run_retests method + mock_run_retests = AsyncMock() + pull_request_handler.runner_handler.run_retests = mock_run_retests - with patch("asyncio.to_thread", side_effect=mock_to_thread_side_effect): + with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else f): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - assert call_count == 6 - pull_request_handler.logger.info.assert_called_with( - "[TEST] Successfully re-requested check suite 456 for PR #123" + # Verify run_retests was called with the correct arguments + mock_run_retests.assert_called_once_with( + supported_retests=[TOX_STR, PRE_COMMIT_STR], pull_request=mock_pull_request ) @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_no_check_suites( - self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock ) -> None: - """Test _retrigger_check_suites_for_pr when PR has no check suites.""" + """Test _retrigger_check_suites_for_pr when repository has no configured checks.""" mock_pull_request.number = 123 - mock_pull_request.head.sha = "abc123" - - mock_commit = Mock() - mock_commit.get_check_suites.return_value = [] + mock_github_webhook.current_pull_request_supported_retest = [] with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): - with patch.object(pull_request_handler.repository, "get_commit", return_value=mock_commit): - await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - pull_request_handler.logger.debug.assert_called_with("[TEST] No check suites found for PR #123") + pull_request_handler.logger.debug.assert_called_with("[TEST] No checks configured for this repository") @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_exception( - self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock ) -> None: - """Test _retrigger_check_suites_for_pr handles exceptions gracefully.""" - - async def mock_to_thread_side_effect(func, *args, **kwargs): - raise Exception("API Error") + """Test _retrigger_check_suites_for_pr handles exceptions from runners gracefully.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR] - with patch("asyncio.to_thread", side_effect=mock_to_thread_side_effect): - await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + # Mock run_retests to raise exception + mock_run_retests = AsyncMock(side_effect=Exception("Runner failed")) + pull_request_handler.runner_handler.run_retests = mock_run_retests - pull_request_handler.logger.exception.assert_called_with("[TEST] Failed to retrigger checks for PR") + with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else f): + # The exception should propagate since we're not catching it in _retrigger_check_suites_for_pr + with pytest.raises(Exception, match="Runner failed"): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) From 42968da631a24d0069389291618b59addb377a0b Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 29 Dec 2025 14:55:01 +0200 Subject: [PATCH 05/34] remove unuseful info from docstring --- webhook_server/libs/handlers/pull_request_handler.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 900e51ca..20beea48 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -901,11 +901,7 @@ async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> return merge_state async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> None: - """Re-trigger configured checks for a PR when base branch is updated. - - Uses the same runner approach as process_retest_command - only runs checks - that are configured for this repository. - """ + """Re-trigger configured checks for a PR when base branch is updated.""" pr_number = await asyncio.to_thread(lambda: pull_request.number) self.logger.step( # type: ignore[attr-defined] From 1746abfcf0d0d598bea1c2cfcafe4e5407cc15e7 Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 29 Dec 2025 15:44:23 +0200 Subject: [PATCH 06/34] address comments --- webhook_server/config/schema.yaml | 4 +- .../tests/test_pull_request_handler.py | 37 +++++++++---------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index 06110ea2..2aebc876 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -85,7 +85,7 @@ properties: default: true retrigger-checks-on-base-push: type: boolean - description: Re-trigger CI checks for out-of-date PRs when a push occurs to their base branch (global default) + description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default) default: false pr-size-thresholds: @@ -305,7 +305,7 @@ properties: default: true retrigger-checks-on-base-push: type: boolean - description: Re-trigger CI checks for out-of-date PRs when a push occurs to their base branch + description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch default: false pr-size-thresholds: type: object diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 7502759c..a3b193f8 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -25,6 +25,13 @@ ) +# Async shim for mocking asyncio.to_thread in tests +# This allows us to run sync functions in tests while preserving async/await semantics +async def _sync_to_thread(func, *args, **kwargs): + """Mock implementation of asyncio.to_thread that runs synchronously but returns awaitable.""" + return func(*args, **kwargs) + + class _AwaitableValue: def __init__(self, return_value: dict | None = None) -> None: self._value = return_value or {} @@ -438,7 +445,7 @@ async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after pull_request_handler.github_webhook.retrigger_checks_on_base_push = True - async def mock_label_side_effect(pull_request=None): + async def mock_label_side_effect(pull_request: Mock | PullRequest) -> str: return pull_request.mergeable_state with ( @@ -450,7 +457,7 @@ async def mock_label_side_effect(pull_request=None): ) as mock_label, patch.object(pull_request_handler, "_retrigger_check_suites_for_pr", new=AsyncMock()) as mock_retrigger, patch("asyncio.sleep", new=AsyncMock()), - patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None), + patch("asyncio.to_thread", new=_sync_to_thread), ): await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() # Verify labeling called for both PRs @@ -486,7 +493,7 @@ async def test_label_all_opened_prs_retrigger_for_different_merge_states( with ( patch("asyncio.sleep", new_callable=AsyncMock), - patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None), + patch("asyncio.to_thread", new=_sync_to_thread), patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr]), patch.object( pull_request_handler, @@ -1636,7 +1643,7 @@ async def test_process_labeled_wip( mock_pull_request.labels = [mock_label] with patch.object(pull_request_handler, "check_if_can_be_merged", new=AsyncMock()) as mock_check_merge: - with patch("asyncio.to_thread", side_effect=lambda f, *args: f(*args) if callable(f) else None): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.process_pull_request_webhook_data(mock_pull_request) mock_check_merge.assert_awaited_once() @@ -1682,9 +1689,7 @@ async def test_delete_ghcr_tag_exceptions( side_effect=GithubException(404, "Not Found") ) - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._delete_ghcr_tag_via_github_api(mock_pull_request, "ghcr.io/org/pkg:123", "123") pull_request_handler.logger.warning.assert_called_with("[TEST] Package pkg not found for owner org on GHCR") @@ -1701,9 +1706,7 @@ async def test_add_assignee_exception( pull_request_handler.owners_file_handler.root_approvers = ["approver1"] - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.add_pull_request_owner_as_assingee(mock_pull_request) pull_request_handler.logger.debug.assert_any_call("[TEST] Exception while adding PR owner as assignee: Failed") @@ -1814,9 +1817,7 @@ async def test_set_pull_request_automerge_exception( pull_request_handler.github_webhook.set_auto_merge_prs = ["main"] mock_pull_request.base.ref = "main" - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.set_pull_request_automerge(mock_pull_request) pull_request_handler.logger.error.assert_called_with( @@ -1830,9 +1831,7 @@ async def test_label_pull_request_by_merge_state_unknown( """Test label_pull_request_by_merge_state when unknown.""" mock_pull_request.mergeable_state = "unknown" - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.label_pull_request_by_merge_state(mock_pull_request) # Should return early @@ -1904,7 +1903,7 @@ async def test_retrigger_check_suites_for_pr_success( mock_run_retests = AsyncMock() pull_request_handler.runner_handler.run_retests = mock_run_retests - with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else f): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) # Verify run_retests was called with the correct arguments @@ -1920,7 +1919,7 @@ async def test_retrigger_check_suites_for_pr_no_check_suites( mock_pull_request.number = 123 mock_github_webhook.current_pull_request_supported_retest = [] - with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else None): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) pull_request_handler.logger.debug.assert_called_with("[TEST] No checks configured for this repository") @@ -1937,7 +1936,7 @@ async def test_retrigger_check_suites_for_pr_exception( mock_run_retests = AsyncMock(side_effect=Exception("Runner failed")) pull_request_handler.runner_handler.run_retests = mock_run_retests - with patch("asyncio.to_thread", side_effect=lambda f, *a, **k: f(*a, **k) if callable(f) else f): + with patch("asyncio.to_thread", new=_sync_to_thread): # The exception should propagate since we're not catching it in _retrigger_check_suites_for_pr with pytest.raises(Exception, match="Runner failed"): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) From 084e88cd09fd0d5b8bf8f2dad658a6b30761aa4b Mon Sep 17 00:00:00 2001 From: rnetser Date: Mon, 29 Dec 2025 17:37:01 +0200 Subject: [PATCH 07/34] test: improve type safety and consistency in pull request handler tests Added type annotations to _sync_to_thread shim function using modern Python type parameter syntax. Added missing asyncio.to_thread patch in test_retrigger_check_suites_for_pr_behind_merge to ensure consistent test behavior. Fixed test docstring to accurately reflect exception propagation behavior. --- webhook_server/tests/test_pull_request_handler.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index a3b193f8..fb4f290d 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1,4 +1,6 @@ import asyncio +from collections.abc import Callable +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -27,7 +29,7 @@ # Async shim for mocking asyncio.to_thread in tests # This allows us to run sync functions in tests while preserving async/await semantics -async def _sync_to_thread(func, *args, **kwargs): +async def _sync_to_thread[T](func: Callable[..., T], *args: Any, **kwargs: Any) -> T: """Mock implementation of asyncio.to_thread that runs synchronously but returns awaitable.""" return func(*args, **kwargs) @@ -1879,6 +1881,7 @@ async def test_label_pull_request_by_merge_state_only_labels( mock_pull_request.mergeable_state = "behind" with ( + patch("asyncio.to_thread", new=_sync_to_thread), patch.object(pull_request_handler.labels_handler, "_add_label", new_callable=AsyncMock) as mock_add_label, patch.object( pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock @@ -1928,7 +1931,7 @@ async def test_retrigger_check_suites_for_pr_no_check_suites( async def test_retrigger_check_suites_for_pr_exception( self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock ) -> None: - """Test _retrigger_check_suites_for_pr handles exceptions from runners gracefully.""" + """Test _retrigger_check_suites_for_pr propagates exceptions from runners.""" mock_pull_request.number = 123 mock_github_webhook.current_pull_request_supported_retest = [TOX_STR] From 4696a7bec56d678436c141695713d5a398f94921 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 13:45:10 +0200 Subject: [PATCH 08/34] feat: re-trigger CI checks for out-of-date PRs on base branch push When base branch is updated via direct push (not PR merge), automatically re-trigger CI checks for pull requests with merge state 'behind' or 'blocked'. This ensures PRs stay up-to-date with the latest base branch changes and prevents merging stale code. --- webhook_server/libs/handlers/push_handler.py | 91 +++++++++- webhook_server/tests/test_push_handler.py | 170 +++++++++++++++++++ 2 files changed, 260 insertions(+), 1 deletion(-) diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 8ff09873..fdb0751e 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -2,6 +2,7 @@ import re from typing import TYPE_CHECKING +from github.PullRequest import PullRequest from github.Repository import Repository from webhook_server.libs.handlers.check_run_handler import CheckRunHandler @@ -69,11 +70,99 @@ async def process_push_webhook_data(self) -> None: ) self.logger.exception(f"{self.log_prefix} Container build and push failed: {ex}") else: + # Non-tag push - check if this is a push to a branch that could be a base for PRs self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " - f"Non-tag push detected, skipping processing", + f"Processing branch push event", ) + # Only process if retrigger is enabled + if self.github_webhook.retrigger_checks_on_base_push: + # Extract branch name from ref (refs/heads/main -> main) + branch_match = re.search(r"^refs/heads/(.+)$", self.hook_data["ref"]) + if branch_match: + branch_name = branch_match.group(1) + self.logger.info(f"{self.log_prefix} Branch push detected: {branch_name}") + await self._retrigger_checks_for_prs_targeting_branch(branch_name=branch_name) + else: + self.logger.debug( + f"{self.log_prefix} Could not extract branch name from ref: {self.hook_data['ref']}" + ) + else: + self.logger.debug(f"{self.log_prefix} retrigger-checks-on-base-push not enabled, skipping") + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} " + f"Branch push processing completed", + ) + + async def _retrigger_checks_for_prs_targeting_branch(self, branch_name: str) -> None: + """Re-trigger CI checks for PRs targeting the updated branch that are behind or blocked. + + Args: + branch_name: The branch that was pushed to (e.g., 'main') + """ + time_sleep = 30 + self.logger.info(f"{self.log_prefix} Waiting {time_sleep}s for GitHub to update merge states") + await asyncio.sleep(time_sleep) + + # Get all open PRs targeting this branch + def get_pulls() -> list[PullRequest]: + return list(self.repository.get_pulls(state="open", base=branch_name)) + + pulls = await asyncio.to_thread(get_pulls) + + if not pulls: + self.logger.info(f"{self.log_prefix} No open PRs targeting branch {branch_name}") + return + + self.logger.info(f"{self.log_prefix} Found {len(pulls)} open PRs targeting {branch_name}") + + for pull_request in pulls: + # pr.number is in-memory data from get_pulls() result - no wrapping needed + pr_number = pull_request.number + # mergeable_state triggers API call - must wrap to avoid blocking + + def get_merge_state(pr: PullRequest = pull_request) -> str | None: + return pr.mergeable_state + + merge_state = await asyncio.to_thread(get_merge_state) + + self.logger.debug(f"{self.log_prefix} PR #{pr_number} merge state: {merge_state}") + + # Handle None/unknown merge states explicitly + if merge_state in (None, "unknown"): + self.logger.warning( + f"{self.log_prefix} PR #{pr_number} merge state is '{merge_state}' - " + "GitHub still calculating, skipping for now" + ) + continue + + # Only re-trigger for PRs that are behind or blocked + if merge_state in ("behind", "blocked"): + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " + f"Re-triggering checks for out-of-date PR #{pr_number} (state: {merge_state})", + ) + + available_checks = self.github_webhook.current_pull_request_supported_retest + + if not available_checks: + self.logger.debug(f"{self.log_prefix} No checks configured for this repository") + continue + + self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {available_checks}") + try: + await self.runner_handler.run_retests(supported_retests=available_checks, pull_request=pull_request) + self.logger.info(f"{self.log_prefix} Successfully re-triggered checks for PR #{pr_number}") + except Exception: + self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") + # Continue processing other PRs + else: + self.logger.debug( + f"{self.log_prefix} PR #{pr_number} merge state is '{merge_state}', not re-triggering" + ) + async def upload_to_pypi(self, tag_name: str) -> None: async def _issue_on_error(_error: str) -> None: # Sanitize title: replace newlines, remove backticks, strip whitespace, truncate diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 0fac3555..619b79b8 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -100,6 +100,7 @@ async def test_process_push_webhook_data_with_tag_no_container_release(self, pus async def test_process_push_webhook_data_no_tag(self, push_handler: PushHandler) -> None: """Test processing push webhook data without tag.""" push_handler.hook_data["ref"] = "refs/heads/main" + push_handler.github_webhook.retrigger_checks_on_base_push = False with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: @@ -410,3 +411,172 @@ async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandl assert "published to PYPI" in call_args[1]["message"] assert call_args[1]["logger"] == push_handler.logger assert call_args[1]["log_prefix"] == push_handler.log_prefix + + @pytest.mark.asyncio + async def test_process_push_webhook_data_branch_push_retrigger_enabled(self, push_handler: PushHandler) -> None: + """Test processing branch push with retrigger enabled.""" + push_handler.hook_data["ref"] = "refs/heads/main" + push_handler.github_webhook.retrigger_checks_on_base_push = True + + with patch.object( + push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock + ) as mock_retrigger: + await push_handler.process_push_webhook_data() + + mock_retrigger.assert_called_once_with(branch_name="main") + + @pytest.mark.asyncio + async def test_process_push_webhook_data_branch_push_retrigger_disabled(self, push_handler: PushHandler) -> None: + """Test processing branch push with retrigger disabled.""" + push_handler.hook_data["ref"] = "refs/heads/main" + push_handler.github_webhook.retrigger_checks_on_base_push = False + + with patch.object( + push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock + ) as mock_retrigger: + await push_handler.process_push_webhook_data() + + mock_retrigger.assert_not_called() + + @pytest.mark.asyncio + async def test_process_push_webhook_data_branch_push_feature_branch(self, push_handler: PushHandler) -> None: + """Test processing push to feature branch.""" + push_handler.hook_data["ref"] = "refs/heads/feature/my-feature" + push_handler.github_webhook.retrigger_checks_on_base_push = True + + with patch.object( + push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock + ) as mock_retrigger: + await push_handler.process_push_webhook_data() + + mock_retrigger.assert_called_once_with(branch_name="feature/my-feature") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_no_prs(self, push_handler: PushHandler) -> None: + """Test retrigger when no PRs target the branch.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [] + + with patch("asyncio.sleep", new_callable=AsyncMock): + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_get_pulls.assert_called_once_with(state="open", base="main") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_pr_behind(self, push_handler: PushHandler) -> None: + """Test retrigger for PR with merge state 'behind'.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_called_once_with(supported_retests=["tox", "pre-commit"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_pr_blocked(self, push_handler: PushHandler) -> None: + """Test retrigger for PR with merge state 'blocked'.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 456 + mock_pr.mergeable_state = "blocked" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_called_once_with(supported_retests=["tox"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_pr_clean(self, push_handler: PushHandler) -> None: + """Test that retrigger skips PR with merge state 'clean'.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 789 + mock_pr.mergeable_state = "clean" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_multiple_prs(self, push_handler: PushHandler) -> None: + """Test retrigger with multiple PRs in different states.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + + mock_pr1 = Mock() + mock_pr1.number = 100 + mock_pr1.mergeable_state = "behind" + + mock_pr2 = Mock() + mock_pr2.number = 200 + mock_pr2.mergeable_state = "clean" + + mock_pr3 = Mock() + mock_pr3.number = 300 + mock_pr3.mergeable_state = "blocked" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr1, mock_pr2, mock_pr3] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should be called twice: for PR 100 (behind) and PR 300 (blocked) + assert mock_retests.call_count == 2 + calls = mock_retests.call_args_list + assert calls[0].kwargs["pull_request"] == mock_pr1 + assert calls[1].kwargs["pull_request"] == mock_pr3 + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_no_checks_configured( + self, push_handler: PushHandler + ) -> None: + """Test retrigger when no checks are configured.""" + push_handler.github_webhook.current_pull_request_supported_retest = [] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_waits_for_github(self, push_handler: PushHandler) -> None: + """Test that retrigger waits 30 seconds for GitHub to update merge states.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [] + + with patch("asyncio.sleep", new_callable=AsyncMock) as mock_sleep: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_sleep.assert_called_once_with(30) From 90a05387aa0031adca198d9fe61b27d418369319 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 14:08:55 +0200 Subject: [PATCH 09/34] feat: make retrigger-checks-on-base-push configurable with selective check execution Enhance retrigger-checks-on-base-push to accept multiple configuration modes: - false: disable check retriggering (default) - "all": retrigger all available checks - ["check1", "check2"]: retrigger only specified checks This provides fine-grained control over which CI checks are retriggered when the base branch is updated, allowing users to optimize webhook processing and avoid unnecessary check reruns. Changes: - Update schema to accept boolean | string | list of strings - Add validation for "all" string value and check name lists - Implement selective check filtering in push_handler and pull_request_handler - Add comprehensive tests for all configuration options - Update get_retrigger_checks_on_base_push to handle new config types --- webhook_server/config/schema.yaml | 24 +++++- webhook_server/libs/github_api.py | 2 +- .../libs/handlers/pull_request_handler.py | 37 ++++++-- webhook_server/libs/handlers/push_handler.py | 32 +++++-- .../tests/test_pull_request_handler.py | 61 +++++++++++++ webhook_server/tests/test_push_handler.py | 85 +++++++++++++++++++ 6 files changed, 225 insertions(+), 16 deletions(-) diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index 2aebc876..e0c12994 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -84,7 +84,17 @@ properties: description: Create a tracking issue for new pull requests (global default) default: true retrigger-checks-on-base-push: - type: boolean + oneOf: + - type: boolean + enum: [false] + description: Disable re-triggering of CI checks (default) + - type: string + enum: ["all"] + description: Re-trigger all available CI checks for out-of-date PRs + - type: array + items: + type: string + description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default) default: false @@ -304,7 +314,17 @@ properties: description: Create a tracking issue for new pull requests default: true retrigger-checks-on-base-push: - type: boolean + oneOf: + - type: boolean + enum: [false] + description: Disable re-triggering of CI checks (default) + - type: string + enum: ["all"] + description: Re-trigger all available CI checks for out-of-date PRs + - type: array + items: + type: string + description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch default: false pr-size-thresholds: diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index 9560ac88..16e99043 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -709,7 +709,7 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: ) self.mask_sensitive = self.config.get_value("mask-sensitive-data", return_on_none=True) - self.retrigger_checks_on_base_push: bool = self.config.get_value( + self.retrigger_checks_on_base_push: list[str] | str | bool = self.config.get_value( value="retrigger-checks-on-base-push", return_on_none=False, extra_dict=repository_config ) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 20beea48..a4b0ddae 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -431,10 +431,14 @@ async def label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merg self.logger.info(f"{self.log_prefix} check label pull request after merge") merge_state = await self.label_pull_request_by_merge_state(pull_request=pull_request) - # If retrigger is enabled and PR is behind, retrigger checks - if self.github_webhook.retrigger_checks_on_base_push: - if merge_state in ("behind", "blocked"): - await self._retrigger_check_suites_for_pr(pull_request=pull_request) + # Check if retrigger is enabled (not False or empty list) + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + if retrigger_config is False or retrigger_config == []: + continue + + # If retrigger is enabled and PR is behind or blocked, retrigger checks + if merge_state in ("behind", "blocked"): + await self._retrigger_check_suites_for_pr(pull_request=pull_request) async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] @@ -915,10 +919,29 @@ async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> Non self.logger.debug(f"{self.log_prefix} No checks configured for this repository") return - self.logger.info(f"{self.log_prefix} Available checks to retrigger: {available_checks}") + # Determine which checks to run based on config + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + + if retrigger_config == "all": + checks_to_run = available_checks + elif isinstance(retrigger_config, list): + # Filter to only configured checks that are available + checks_to_run = [check for check in retrigger_config if check in available_checks] + if not checks_to_run: + self.logger.warning( + f"{self.log_prefix} None of the configured retrigger checks {retrigger_config} " + f"are available. Available: {available_checks}" + ) + return + else: + # Shouldn't happen with schema validation, but handle gracefully + self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") + return + + self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") - # Run all available checks using the shared runner handler method - await self.runner_handler.run_retests(supported_retests=available_checks, pull_request=pull_request) + # Run configured checks using the shared runner handler method + await self.runner_handler.run_retests(supported_retests=checks_to_run, pull_request=pull_request) async def _process_verified_for_update_or_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.verified_job: diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index fdb0751e..4a5e6e07 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -76,8 +76,11 @@ async def process_push_webhook_data(self) -> None: f"Processing branch push event", ) - # Only process if retrigger is enabled - if self.github_webhook.retrigger_checks_on_base_push: + # Check if retrigger is enabled (not False or empty list) + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + if retrigger_config is False or retrigger_config == []: + self.logger.debug(f"{self.log_prefix} retrigger-checks-on-base-push not enabled, skipping") + else: # Extract branch name from ref (refs/heads/main -> main) branch_match = re.search(r"^refs/heads/(.+)$", self.hook_data["ref"]) if branch_match: @@ -88,8 +91,6 @@ async def process_push_webhook_data(self) -> None: self.logger.debug( f"{self.log_prefix} Could not extract branch name from ref: {self.hook_data['ref']}" ) - else: - self.logger.debug(f"{self.log_prefix} retrigger-checks-on-base-push not enabled, skipping") self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} " @@ -151,9 +152,28 @@ def get_merge_state(pr: PullRequest = pull_request) -> str | None: self.logger.debug(f"{self.log_prefix} No checks configured for this repository") continue - self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {available_checks}") + # Determine which checks to run based on config + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + + if retrigger_config == "all": + checks_to_run = available_checks + elif isinstance(retrigger_config, list): + # Filter to only configured checks that are available + checks_to_run = [check for check in retrigger_config if check in available_checks] + if not checks_to_run: + self.logger.warning( + f"{self.log_prefix} None of the configured retrigger checks {retrigger_config} " + f"are available. Available: {available_checks}" + ) + continue + else: + # Shouldn't happen with schema validation, but handle gracefully + self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") + continue + + self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") try: - await self.runner_handler.run_retests(supported_retests=available_checks, pull_request=pull_request) + await self.runner_handler.run_retests(supported_retests=checks_to_run, pull_request=pull_request) self.logger.info(f"{self.log_prefix} Successfully re-triggered checks for PR #{pr_number}") except Exception: self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index fb4f290d..b0d89397 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1901,6 +1901,7 @@ async def test_retrigger_check_suites_for_pr_success( """Test _retrigger_check_suites_for_pr successfully runs configured checks.""" mock_pull_request.number = 123 mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] + mock_github_webhook.retrigger_checks_on_base_push = "all" # Mock the shared run_retests method mock_run_retests = AsyncMock() @@ -1934,6 +1935,7 @@ async def test_retrigger_check_suites_for_pr_exception( """Test _retrigger_check_suites_for_pr propagates exceptions from runners.""" mock_pull_request.number = 123 mock_github_webhook.current_pull_request_supported_retest = [TOX_STR] + mock_github_webhook.retrigger_checks_on_base_push = "all" # Mock run_retests to raise exception mock_run_retests = AsyncMock(side_effect=Exception("Runner failed")) @@ -1943,3 +1945,62 @@ async def test_retrigger_check_suites_for_pr_exception( # The exception should propagate since we're not catching it in _retrigger_check_suites_for_pr with pytest.raises(Exception, match="Runner failed"): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_with_specific_checks_list( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr with specific checks list.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR, "build-container"] + mock_github_webhook.retrigger_checks_on_base_push = [TOX_STR, PRE_COMMIT_STR] + + # Mock the shared run_retests method + mock_run_retests = AsyncMock() + pull_request_handler.runner_handler.run_retests = mock_run_retests + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Verify run_retests was called with only configured checks + mock_run_retests.assert_called_once_with( + supported_retests=[TOX_STR, PRE_COMMIT_STR], pull_request=mock_pull_request + ) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_with_nonexistent_checks( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr when configured checks don't exist.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] + mock_github_webhook.retrigger_checks_on_base_push = ["nonexistent-check"] + + # Mock the shared run_retests method + mock_run_retests = AsyncMock() + pull_request_handler.runner_handler.run_retests = mock_run_retests + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Should not run any checks since none match + mock_run_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_with_partial_match( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr with partial match.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] + mock_github_webhook.retrigger_checks_on_base_push = [TOX_STR, "nonexistent-check"] + + # Mock the shared run_retests method + mock_run_retests = AsyncMock() + pull_request_handler.runner_handler.run_retests = mock_run_retests + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Should only run checks that match + mock_run_retests.assert_called_once_with(supported_retests=[TOX_STR], pull_request=mock_pull_request) diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 619b79b8..b1bb5163 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -468,6 +468,7 @@ async def test_retrigger_checks_for_prs_targeting_branch_no_prs(self, push_handl async def test_retrigger_checks_for_prs_targeting_branch_pr_behind(self, push_handler: PushHandler) -> None: """Test retrigger for PR with merge state 'behind'.""" push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" mock_pr = Mock() mock_pr.number = 123 @@ -486,6 +487,7 @@ async def test_retrigger_checks_for_prs_targeting_branch_pr_behind(self, push_ha async def test_retrigger_checks_for_prs_targeting_branch_pr_blocked(self, push_handler: PushHandler) -> None: """Test retrigger for PR with merge state 'blocked'.""" push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" mock_pr = Mock() mock_pr.number = 456 @@ -522,6 +524,7 @@ async def test_retrigger_checks_for_prs_targeting_branch_pr_clean(self, push_han async def test_retrigger_checks_for_prs_targeting_branch_multiple_prs(self, push_handler: PushHandler) -> None: """Test retrigger with multiple PRs in different states.""" push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" mock_pr1 = Mock() mock_pr1.number = 100 @@ -580,3 +583,85 @@ async def test_retrigger_checks_waits_for_github(self, push_handler: PushHandler await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") mock_sleep.assert_called_once_with(30) + + @pytest.mark.asyncio + async def test_retrigger_checks_with_specific_checks_list(self, push_handler: PushHandler) -> None: + """Test retrigger with specific checks list.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit", "build-container"] + push_handler.github_webhook.retrigger_checks_on_base_push = ["tox", "pre-commit"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should only run configured checks, not all available + mock_retests.assert_called_once_with(supported_retests=["tox", "pre-commit"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_with_nonexistent_checks(self, push_handler: PushHandler) -> None: + """Test retrigger when configured checks don't exist.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = ["nonexistent-check"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should not run any checks since none match + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_with_partial_match(self, push_handler: PushHandler) -> None: + """Test retrigger with some configured checks matching available checks.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = ["tox", "nonexistent-check"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should only run checks that match + mock_retests.assert_called_once_with(supported_retests=["tox"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_with_empty_list(self, push_handler: PushHandler) -> None: + """Test retrigger disabled with empty list.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = [] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Empty list is treated as disabled - should not trigger + # But the current implementation will process the PR since the check happens at webhook level + # This test validates current behavior + mock_retests.assert_not_called() From e54fc9f80c4dc6ed5ea983573c14fadf5a991796 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 14:27:51 +0200 Subject: [PATCH 10/34] refactor: simplify re-trigger-pr-not-rebased config by removing explicit false option Remove the explicit 're-trigger-pr-not-rebased: false' option from schema and code. The feature now defaults to disabled when not configured, following the standard pattern where absence of configuration means the feature is not enabled. This simplifies the config schema by removing unnecessary boilerplate while maintaining backward compatibility (existing configs with 'false' will continue to work as the feature simply won't be configured/enabled). --- webhook_server/config/schema.yaml | 12 ++---------- webhook_server/libs/github_api.py | 4 ++-- webhook_server/libs/handlers/pull_request_handler.py | 6 +++--- webhook_server/libs/handlers/push_handler.py | 8 ++++---- webhook_server/tests/test_pull_request_handler.py | 4 ++-- webhook_server/tests/test_push_handler.py | 6 +++--- 6 files changed, 16 insertions(+), 24 deletions(-) diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index e0c12994..3d8c46cc 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -85,9 +85,6 @@ properties: default: true retrigger-checks-on-base-push: oneOf: - - type: boolean - enum: [false] - description: Disable re-triggering of CI checks (default) - type: string enum: ["all"] description: Re-trigger all available CI checks for out-of-date PRs @@ -95,8 +92,7 @@ properties: items: type: string description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) - description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default) - default: false + description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default). If not configured, defaults to disabled. pr-size-thresholds: type: object @@ -315,9 +311,6 @@ properties: default: true retrigger-checks-on-base-push: oneOf: - - type: boolean - enum: [false] - description: Disable re-triggering of CI checks (default) - type: string enum: ["all"] description: Re-trigger all available CI checks for out-of-date PRs @@ -325,8 +318,7 @@ properties: items: type: string description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) - description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch - default: false + description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch. If not configured, defaults to disabled. pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index 16e99043..d12fd796 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -709,8 +709,8 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: ) self.mask_sensitive = self.config.get_value("mask-sensitive-data", return_on_none=True) - self.retrigger_checks_on_base_push: list[str] | str | bool = self.config.get_value( - value="retrigger-checks-on-base-push", return_on_none=False, extra_dict=repository_config + self.retrigger_checks_on_base_push: list[str] | str | None = self.config.get_value( + value="retrigger-checks-on-base-push", return_on_none=None, extra_dict=repository_config ) async def get_pull_request(self, number: int | None = None) -> PullRequest | None: diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index a4b0ddae..a9b10f8b 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -431,9 +431,9 @@ async def label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merg self.logger.info(f"{self.log_prefix} check label pull request after merge") merge_state = await self.label_pull_request_by_merge_state(pull_request=pull_request) - # Check if retrigger is enabled (not False or empty list) + # Check if retrigger is enabled (not None or empty list) retrigger_config = self.github_webhook.retrigger_checks_on_base_push - if retrigger_config is False or retrigger_config == []: + if not retrigger_config: continue # If retrigger is enabled and PR is behind or blocked, retrigger checks @@ -934,7 +934,7 @@ async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> Non ) return else: - # Shouldn't happen with schema validation, but handle gracefully + # Config is None - already handled in caller, shouldn't reach here self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") return diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 4a5e6e07..5d8a3e11 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -76,10 +76,10 @@ async def process_push_webhook_data(self) -> None: f"Processing branch push event", ) - # Check if retrigger is enabled (not False or empty list) + # Check if retrigger is enabled (not None or empty list) retrigger_config = self.github_webhook.retrigger_checks_on_base_push - if retrigger_config is False or retrigger_config == []: - self.logger.debug(f"{self.log_prefix} retrigger-checks-on-base-push not enabled, skipping") + if not retrigger_config: + self.logger.debug(f"{self.log_prefix} retrigger-checks-on-base-push not configured, skipping") else: # Extract branch name from ref (refs/heads/main -> main) branch_match = re.search(r"^refs/heads/(.+)$", self.hook_data["ref"]) @@ -167,7 +167,7 @@ def get_merge_state(pr: PullRequest = pull_request) -> str | None: ) continue else: - # Shouldn't happen with schema validation, but handle gracefully + # Config is None - already handled above, shouldn't reach here self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") continue diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index b0d89397..47c26fc6 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -415,7 +415,7 @@ def test_prepare_retest_welcome_comment(self, pull_request_handler: PullRequestH async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged( self, pull_request_handler: PullRequestHandler ) -> None: - """Test labeling all opened pull requests merge state after merged.""" + """Test labeling all opened pull requests merge state after merged with retrigger not configured.""" mock_pr1 = Mock() mock_pr2 = Mock() mock_pr1.number = 1 @@ -423,7 +423,7 @@ async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after mock_pr1.mergeable_state = "clean" mock_pr2.mergeable_state = "clean" - pull_request_handler.github_webhook.retrigger_checks_on_base_push = False + pull_request_handler.github_webhook.retrigger_checks_on_base_push = None with patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]): with patch.object(pull_request_handler, "label_pull_request_by_merge_state", new=AsyncMock()) as mock_label: diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index b1bb5163..88e51332 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -100,7 +100,7 @@ async def test_process_push_webhook_data_with_tag_no_container_release(self, pus async def test_process_push_webhook_data_no_tag(self, push_handler: PushHandler) -> None: """Test processing push webhook data without tag.""" push_handler.hook_data["ref"] = "refs/heads/main" - push_handler.github_webhook.retrigger_checks_on_base_push = False + push_handler.github_webhook.retrigger_checks_on_base_push = None with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: @@ -427,9 +427,9 @@ async def test_process_push_webhook_data_branch_push_retrigger_enabled(self, pus @pytest.mark.asyncio async def test_process_push_webhook_data_branch_push_retrigger_disabled(self, push_handler: PushHandler) -> None: - """Test processing branch push with retrigger disabled.""" + """Test processing branch push with retrigger not configured.""" push_handler.hook_data["ref"] = "refs/heads/main" - push_handler.github_webhook.retrigger_checks_on_base_push = False + push_handler.github_webhook.retrigger_checks_on_base_push = None with patch.object( push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock From 56ab46996c1e0903a42bfeb25b730807d966e4ff Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 14:45:34 +0200 Subject: [PATCH 11/34] fix: address code review feedback for retrigger CI feature - Improve error logging in run_retests() to include task name - Add clarifying comment explaining closure pattern in push_handler --- webhook_server/libs/handlers/push_handler.py | 2 ++ webhook_server/libs/handlers/runner_handler.py | 10 ++++++++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 5d8a3e11..7e4a43ff 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -124,6 +124,8 @@ def get_pulls() -> list[PullRequest]: pr_number = pull_request.number # mergeable_state triggers API call - must wrap to avoid blocking + # Use default parameter to capture current iteration's pull_request (closure pattern) + # This ensures each lambda captures the correct PR object, not the loop variable def get_merge_state(pr: PullRequest = pull_request) -> str | None: return pr.mergeable_state diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index c79355b2..d6c7087c 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -750,6 +750,10 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ Args: supported_retests: List of test names to run (e.g., ['tox', 'pre-commit']) pull_request: The PullRequest object to run tests for + + Note: + Uses asyncio.gather with return_exceptions=True to continue processing + even if some tasks fail. Failed tasks are logged for debugging. """ if not supported_retests: self.logger.debug(f"{self.log_prefix} No retests to run") @@ -771,6 +775,8 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ tasks.append(task) results = await asyncio.gather(*tasks, return_exceptions=True) - for result in results: + # Log any task failures for debugging + for i, result in enumerate(results): if isinstance(result, Exception): - self.logger.error(f"{self.log_prefix} Async task failed: {result}") + test_name = supported_retests[i] if i < len(supported_retests) else "unknown" + self.logger.error(f"{self.log_prefix} Retest task '{test_name}' failed: {result}") From b3104a8b5d276c1ddeb0198cb99da6dd95b5e9f7 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 14:51:08 +0200 Subject: [PATCH 12/34] refactor: extract duplicate retrigger check logic to shared method - Add run_retests_from_config() to runner_handler.py - Simplify push_handler and pull_request_handler to use shared method - Eliminates ~55 lines of duplicate code - Update tests to mock new shared method --- .../libs/handlers/pull_request_handler.py | 29 +--------- webhook_server/libs/handlers/push_handler.py | 33 ++--------- .../libs/handlers/runner_handler.py | 41 +++++++++++++ .../tests/test_pull_request_handler.py | 57 ++++++++++--------- 4 files changed, 76 insertions(+), 84 deletions(-) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index a9b10f8b..6ec1710a 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -913,35 +913,8 @@ async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> Non f"Re-triggering checks for out-of-date PR #{pr_number}", ) - available_checks = self.github_webhook.current_pull_request_supported_retest - - if not available_checks: - self.logger.debug(f"{self.log_prefix} No checks configured for this repository") - return - - # Determine which checks to run based on config - retrigger_config = self.github_webhook.retrigger_checks_on_base_push - - if retrigger_config == "all": - checks_to_run = available_checks - elif isinstance(retrigger_config, list): - # Filter to only configured checks that are available - checks_to_run = [check for check in retrigger_config if check in available_checks] - if not checks_to_run: - self.logger.warning( - f"{self.log_prefix} None of the configured retrigger checks {retrigger_config} " - f"are available. Available: {available_checks}" - ) - return - else: - # Config is None - already handled in caller, shouldn't reach here - self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") - return - - self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") - # Run configured checks using the shared runner handler method - await self.runner_handler.run_retests(supported_retests=checks_to_run, pull_request=pull_request) + await self.runner_handler.run_retests_from_config(pull_request=pull_request) async def _process_verified_for_update_or_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.verified_job: diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 7e4a43ff..9ffcde80 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -148,35 +148,12 @@ def get_merge_state(pr: PullRequest = pull_request) -> str | None: f"Re-triggering checks for out-of-date PR #{pr_number} (state: {merge_state})", ) - available_checks = self.github_webhook.current_pull_request_supported_retest - - if not available_checks: - self.logger.debug(f"{self.log_prefix} No checks configured for this repository") - continue - - # Determine which checks to run based on config - retrigger_config = self.github_webhook.retrigger_checks_on_base_push - - if retrigger_config == "all": - checks_to_run = available_checks - elif isinstance(retrigger_config, list): - # Filter to only configured checks that are available - checks_to_run = [check for check in retrigger_config if check in available_checks] - if not checks_to_run: - self.logger.warning( - f"{self.log_prefix} None of the configured retrigger checks {retrigger_config} " - f"are available. Available: {available_checks}" - ) - continue - else: - # Config is None - already handled above, shouldn't reach here - self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") - continue - - self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") try: - await self.runner_handler.run_retests(supported_retests=checks_to_run, pull_request=pull_request) - self.logger.info(f"{self.log_prefix} Successfully re-triggered checks for PR #{pr_number}") + checks_triggered = await self.runner_handler.run_retests_from_config(pull_request=pull_request) + if checks_triggered: + self.logger.info(f"{self.log_prefix} Successfully re-triggered checks for PR #{pr_number}") + else: + self.logger.debug(f"{self.log_prefix} No checks triggered for PR #{pr_number}") except Exception: self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") # Continue processing other PRs diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index d6c7087c..a0fd1cf5 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -780,3 +780,44 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ if isinstance(result, Exception): test_name = supported_retests[i] if i < len(supported_retests) else "unknown" self.logger.error(f"{self.log_prefix} Retest task '{test_name}' failed: {result}") + + async def run_retests_from_config(self, pull_request: PullRequest) -> bool: + """Run retests based on retrigger-checks-on-base-push configuration. + + Determines which checks to run based on the configuration and available checks, + then calls run_retests() to execute them. + + Args: + pull_request: The pull request to run checks on + + Returns: + True if checks were triggered, False if skipped (no config, no available checks, etc.) + """ + available_checks = self.github_webhook.current_pull_request_supported_retest + + if not available_checks: + self.logger.debug(f"{self.log_prefix} No checks configured for this repository") + return False + + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + + if retrigger_config == "all": + checks_to_run = available_checks + elif isinstance(retrigger_config, list): + # Filter to only configured checks that are available + checks_to_run = [check for check in retrigger_config if check in available_checks] + if not checks_to_run: + self.logger.warning( + f"{self.log_prefix} None of the configured retrigger checks {retrigger_config} " + f"are available. Available: {available_checks}" + ) + return False + else: + self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") + return False + + pr_number = await asyncio.to_thread(lambda: pull_request.number) + self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") + + await self.run_retests(supported_retests=checks_to_run, pull_request=pull_request) + return True diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 47c26fc6..71acc741 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1903,17 +1903,15 @@ async def test_retrigger_check_suites_for_pr_success( mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] mock_github_webhook.retrigger_checks_on_base_push = "all" - # Mock the shared run_retests method - mock_run_retests = AsyncMock() - pull_request_handler.runner_handler.run_retests = mock_run_retests + # Mock the run_retests_from_config method + mock_run_retests_from_config = AsyncMock(return_value=True) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - # Verify run_retests was called with the correct arguments - mock_run_retests.assert_called_once_with( - supported_retests=[TOX_STR, PRE_COMMIT_STR], pull_request=mock_pull_request - ) + # Verify run_retests_from_config was called with the correct arguments + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_no_check_suites( @@ -1923,10 +1921,15 @@ async def test_retrigger_check_suites_for_pr_no_check_suites( mock_pull_request.number = 123 mock_github_webhook.current_pull_request_supported_retest = [] + # Mock the run_retests_from_config method (returns False when no checks) + mock_run_retests_from_config = AsyncMock(return_value=False) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - pull_request_handler.logger.debug.assert_called_with("[TEST] No checks configured for this repository") + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_exception( @@ -1937,9 +1940,9 @@ async def test_retrigger_check_suites_for_pr_exception( mock_github_webhook.current_pull_request_supported_retest = [TOX_STR] mock_github_webhook.retrigger_checks_on_base_push = "all" - # Mock run_retests to raise exception - mock_run_retests = AsyncMock(side_effect=Exception("Runner failed")) - pull_request_handler.runner_handler.run_retests = mock_run_retests + # Mock run_retests_from_config to raise exception + mock_run_retests_from_config = AsyncMock(side_effect=Exception("Runner failed")) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config with patch("asyncio.to_thread", new=_sync_to_thread): # The exception should propagate since we're not catching it in _retrigger_check_suites_for_pr @@ -1955,17 +1958,15 @@ async def test_retrigger_check_suites_for_pr_with_specific_checks_list( mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR, "build-container"] mock_github_webhook.retrigger_checks_on_base_push = [TOX_STR, PRE_COMMIT_STR] - # Mock the shared run_retests method - mock_run_retests = AsyncMock() - pull_request_handler.runner_handler.run_retests = mock_run_retests + # Mock the run_retests_from_config method + mock_run_retests_from_config = AsyncMock(return_value=True) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - # Verify run_retests was called with only configured checks - mock_run_retests.assert_called_once_with( - supported_retests=[TOX_STR, PRE_COMMIT_STR], pull_request=mock_pull_request - ) + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_with_nonexistent_checks( @@ -1976,15 +1977,15 @@ async def test_retrigger_check_suites_for_pr_with_nonexistent_checks( mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] mock_github_webhook.retrigger_checks_on_base_push = ["nonexistent-check"] - # Mock the shared run_retests method - mock_run_retests = AsyncMock() - pull_request_handler.runner_handler.run_retests = mock_run_retests + # Mock the run_retests_from_config method (returns False when no matching checks) + mock_run_retests_from_config = AsyncMock(return_value=False) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - # Should not run any checks since none match - mock_run_retests.assert_not_called() + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_with_partial_match( @@ -1995,12 +1996,12 @@ async def test_retrigger_check_suites_for_pr_with_partial_match( mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] mock_github_webhook.retrigger_checks_on_base_push = [TOX_STR, "nonexistent-check"] - # Mock the shared run_retests method - mock_run_retests = AsyncMock() - pull_request_handler.runner_handler.run_retests = mock_run_retests + # Mock the run_retests_from_config method (returns True when checks match) + mock_run_retests_from_config = AsyncMock(return_value=True) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) - # Should only run checks that match - mock_run_retests.assert_called_once_with(supported_retests=[TOX_STR], pull_request=mock_pull_request) + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) From 26db0beb67fc5023a80feeb2a1cdb13f687df504 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 15:06:23 +0200 Subject: [PATCH 13/34] fix: add defensive checks and exception handling for retrigger functionality - Add defensive key check in runner_handler to handle unknown retest types - Add exception handling in pull_request_handler _retrigger_check_suites_for_pr method - Add 3 new edge case tests in test_push_handler: - Unknown merge state handling - None merge state handling - Exception handling continuity across multiple PRs --- .../libs/handlers/pull_request_handler.py | 7 +- .../libs/handlers/runner_handler.py | 3 + webhook_server/tests/test_push_handler.py | 65 +++++++++++++++++++ 3 files changed, 73 insertions(+), 2 deletions(-) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 6ec1710a..3182c4cf 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -913,8 +913,11 @@ async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> Non f"Re-triggering checks for out-of-date PR #{pr_number}", ) - # Run configured checks using the shared runner handler method - await self.runner_handler.run_retests_from_config(pull_request=pull_request) + try: + # Run configured checks using the shared runner handler method + await self.runner_handler.run_retests_from_config(pull_request=pull_request) + except Exception: + self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") async def _process_verified_for_update_or_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.verified_job: diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index a0fd1cf5..9e887c94 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -770,6 +770,9 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] for _test in supported_retests: + if _test not in _retests_to_func_map: + self.logger.error(f"{self.log_prefix} Unknown retest type: {_test}") + continue self.logger.debug(f"{self.log_prefix} running retest {_test}") task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) tasks.append(task) diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 88e51332..d56d129b 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -665,3 +665,68 @@ async def test_retrigger_checks_with_empty_list(self, push_handler: PushHandler) # But the current implementation will process the PR since the check happens at webhook level # This test validates current behavior mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_with_unknown_merge_state(self, push_handler: PushHandler) -> None: + """Test that PRs with unknown merge state are skipped with warning.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 999 + mock_pr.mergeable_state = "unknown" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + push_handler.runner_handler, "run_retests_from_config", new_callable=AsyncMock + ) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + mock_retests.assert_not_called() + push_handler.logger.warning.assert_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_with_none_merge_state(self, push_handler: PushHandler) -> None: + """Test that PRs with None merge state are skipped with warning.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 888 + mock_pr.mergeable_state = None + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + push_handler.runner_handler, "run_retests_from_config", new_callable=AsyncMock + ) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_continues_on_exception(self, push_handler: PushHandler) -> None: + """Test that exception in one PR doesn't stop processing others.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr1 = Mock() + mock_pr1.number = 100 + mock_pr1.mergeable_state = "behind" + + mock_pr2 = Mock() + mock_pr2.number = 200 + mock_pr2.mergeable_state = "behind" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr1, mock_pr2] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + push_handler.runner_handler, "run_retests_from_config", new_callable=AsyncMock + ) as mock_retests: + # First call raises exception, second succeeds + mock_retests.side_effect = [Exception("Test error"), True] + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + # Both PRs should be attempted + assert mock_retests.call_count == 2 + # Exception should be logged + push_handler.logger.exception.assert_called() From 9ce53b02a7c331ccdaa08ecda3b6e54312dc64c9 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 15:07:03 +0200 Subject: [PATCH 14/34] test: update retrigger exception test to match new handling behavior Updated test_retrigger_check_suites_for_pr_exception to verify that exceptions are caught and logged rather than propagated, matching the new exception handling implementation in _retrigger_check_suites_for_pr. --- webhook_server/tests/test_pull_request_handler.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 71acc741..18f7ce17 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1935,7 +1935,7 @@ async def test_retrigger_check_suites_for_pr_no_check_suites( async def test_retrigger_check_suites_for_pr_exception( self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock ) -> None: - """Test _retrigger_check_suites_for_pr propagates exceptions from runners.""" + """Test _retrigger_check_suites_for_pr catches and logs exceptions from runners.""" mock_pull_request.number = 123 mock_github_webhook.current_pull_request_supported_retest = [TOX_STR] mock_github_webhook.retrigger_checks_on_base_push = "all" @@ -1945,9 +1945,12 @@ async def test_retrigger_check_suites_for_pr_exception( pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config with patch("asyncio.to_thread", new=_sync_to_thread): - # The exception should propagate since we're not catching it in _retrigger_check_suites_for_pr - with pytest.raises(Exception, match="Runner failed"): - await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + # The exception should be caught and logged, not propagated + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + # Verify exception was logged + pull_request_handler.logger.exception.assert_called_once_with( + "[TEST] Failed to re-trigger checks for PR #123" + ) @pytest.mark.asyncio async def test_retrigger_check_suites_for_pr_with_specific_checks_list( From 2159a5c98cb124887cb6e05ca0f4a8fa7b86660d Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 15:31:04 +0200 Subject: [PATCH 15/34] refactor: address CodeRabbit review feedback - Add merge-state-check-delay config option for configurable wait time - Remove unnecessary asyncio.to_thread() wrapper for in-memory property access - Fix test to use realistic config value 'all' instead of boolean True --- examples/config.yaml | 4 ++++ webhook_server/config/schema.yaml | 11 +++++++++++ webhook_server/libs/github_api.py | 3 +++ webhook_server/libs/handlers/pull_request_handler.py | 4 ++-- webhook_server/libs/handlers/push_handler.py | 2 +- webhook_server/libs/handlers/runner_handler.py | 2 +- webhook_server/tests/test_pull_request_handler.py | 1 + webhook_server/tests/test_push_handler.py | 3 ++- 8 files changed, 25 insertions(+), 5 deletions(-) diff --git a/examples/config.yaml b/examples/config.yaml index 86f6c39b..eae89ee3 100644 --- a/examples/config.yaml +++ b/examples/config.yaml @@ -33,6 +33,10 @@ auto-verify-cherry-picked-prs: true # Default: true - automatically verify cher create-issue-for-new-pr: true # Global default: create tracking issues for new PRs +# Delay in seconds before checking merge states after base branch updates (default: 30) +# This allows GitHub time to calculate merge states after a PR is merged or base branch is pushed +merge-state-check-delay: 30 + # Global PR size label configuration (optional) # Define custom categories based on total lines changed (additions + deletions) # threshold: positive integer or 'inf' for unbounded largest category diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index 3d8c46cc..17938879 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -94,6 +94,12 @@ properties: description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default). If not configured, defaults to disabled. + merge-state-check-delay: + type: integer + description: Delay in seconds before checking merge states after base branch updates (default is 30 seconds) + default: 30 + minimum: 0 + pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors @@ -319,6 +325,11 @@ properties: type: string description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch. If not configured, defaults to disabled. + merge-state-check-delay: + type: integer + description: Delay in seconds before checking merge states after base branch updates (default is 30 seconds) + default: 30 + minimum: 0 pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index d12fd796..bd66199c 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -712,6 +712,9 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: self.retrigger_checks_on_base_push: list[str] | str | None = self.config.get_value( value="retrigger-checks-on-base-push", return_on_none=None, extra_dict=repository_config ) + self.merge_state_check_delay: int = self.config.get_value( + value="merge-state-check-delay", return_on_none=30, extra_dict=repository_config + ) async def get_pull_request(self, number: int | None = None) -> PullRequest | None: if number: diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 3182c4cf..e813e529 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -420,9 +420,9 @@ async def label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merg This ensures CI checks run against the updated base branch. Note: - Waits 30 seconds before processing to allow GitHub's merge state calculation to complete. + Waits for configured delay before processing to allow GitHub's merge state calculation to complete. """ - time_sleep = 30 + time_sleep = self.github_webhook.merge_state_check_delay self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") await asyncio.sleep(time_sleep) diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 9ffcde80..6b7c024a 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -103,7 +103,7 @@ async def _retrigger_checks_for_prs_targeting_branch(self, branch_name: str) -> Args: branch_name: The branch that was pushed to (e.g., 'main') """ - time_sleep = 30 + time_sleep = self.github_webhook.merge_state_check_delay self.logger.info(f"{self.log_prefix} Waiting {time_sleep}s for GitHub to update merge states") await asyncio.sleep(time_sleep) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 9e887c94..180bf3b7 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -819,7 +819,7 @@ async def run_retests_from_config(self, pull_request: PullRequest) -> bool: self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") return False - pr_number = await asyncio.to_thread(lambda: pull_request.number) + pr_number = pull_request.number self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") await self.run_retests(supported_retests=checks_to_run, pull_request=pull_request) diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 18f7ce17..e31c3733 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -86,6 +86,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.pre_commit = True mock_webhook.python_module_install = False mock_webhook.pypi = False + mock_webhook.merge_state_check_delay = 30 # Default delay mock_webhook.token = "test-token" # pragma: allowlist secret mock_webhook.auto_verify_cherry_picked_prs = True mock_webhook.last_commit = Mock() diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index d56d129b..ecebe857 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -43,6 +43,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.container_repository_username = "test-user" # Always a string mock_webhook.container_repository_password = "test-password" # Always a string # pragma: allowlist secret mock_webhook.token = "test-token" # Always a string + mock_webhook.merge_state_check_delay = 30 # Default delay return mock_webhook @pytest.fixture @@ -416,7 +417,7 @@ async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandl async def test_process_push_webhook_data_branch_push_retrigger_enabled(self, push_handler: PushHandler) -> None: """Test processing branch push with retrigger enabled.""" push_handler.hook_data["ref"] = "refs/heads/main" - push_handler.github_webhook.retrigger_checks_on_base_push = True + push_handler.github_webhook.retrigger_checks_on_base_push = "all" with patch.object( push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock From 638dcf8f06536faa77ee12e1caf5682316c6ffa2 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 15:50:36 +0200 Subject: [PATCH 16/34] refactor: address CodeRabbit review comments - Remove unnecessary asyncio.to_thread() for in-memory pull_request.number property - Fix test to use realistic config value 'all' instead of boolean True --- webhook_server/libs/handlers/pull_request_handler.py | 2 +- webhook_server/tests/test_push_handler.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index e813e529..5de3526f 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -906,7 +906,7 @@ async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> None: """Re-trigger configured checks for a PR when base branch is updated.""" - pr_number = await asyncio.to_thread(lambda: pull_request.number) + pr_number = pull_request.number self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index ecebe857..1dc3b6b3 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -443,7 +443,7 @@ async def test_process_push_webhook_data_branch_push_retrigger_disabled(self, pu async def test_process_push_webhook_data_branch_push_feature_branch(self, push_handler: PushHandler) -> None: """Test processing push to feature branch.""" push_handler.hook_data["ref"] = "refs/heads/feature/my-feature" - push_handler.github_webhook.retrigger_checks_on_base_push = True + push_handler.github_webhook.retrigger_checks_on_base_push = "all" with patch.object( push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock From 142ca7da8c24985f93ad127b4f1736eec26bbad4 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 16:41:53 +0200 Subject: [PATCH 17/34] fix: address CodeRabbit review comments for PR processing - Refactor label_and_rerun_checks method to use parallel processing - Move config lookup outside loop for efficiency - Add per-PR exception handling with error isolation - Add clarifying comments for test mocking strategy - Add logger warning assertion for None merge state test - Remove merge-state-check-delay from config (internal implementation) --- examples/config.yaml | 4 -- webhook_server/config/schema.yaml | 11 ----- webhook_server/libs/github_api.py | 3 -- .../libs/handlers/pull_request_handler.py | 40 ++++++++++++------- webhook_server/libs/handlers/push_handler.py | 2 +- .../tests/test_pull_request_handler.py | 1 - webhook_server/tests/test_push_handler.py | 20 ++++++++-- 7 files changed, 43 insertions(+), 38 deletions(-) diff --git a/examples/config.yaml b/examples/config.yaml index eae89ee3..86f6c39b 100644 --- a/examples/config.yaml +++ b/examples/config.yaml @@ -33,10 +33,6 @@ auto-verify-cherry-picked-prs: true # Default: true - automatically verify cher create-issue-for-new-pr: true # Global default: create tracking issues for new PRs -# Delay in seconds before checking merge states after base branch updates (default: 30) -# This allows GitHub time to calculate merge states after a PR is merged or base branch is pushed -merge-state-check-delay: 30 - # Global PR size label configuration (optional) # Define custom categories based on total lines changed (additions + deletions) # threshold: positive integer or 'inf' for unbounded largest category diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index 17938879..3d8c46cc 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -94,12 +94,6 @@ properties: description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default). If not configured, defaults to disabled. - merge-state-check-delay: - type: integer - description: Delay in seconds before checking merge states after base branch updates (default is 30 seconds) - default: 30 - minimum: 0 - pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors @@ -325,11 +319,6 @@ properties: type: string description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch. If not configured, defaults to disabled. - merge-state-check-delay: - type: integer - description: Delay in seconds before checking merge states after base branch updates (default is 30 seconds) - default: 30 - minimum: 0 pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index bd66199c..d12fd796 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -712,9 +712,6 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: self.retrigger_checks_on_base_push: list[str] | str | None = self.config.get_value( value="retrigger-checks-on-base-push", return_on_none=None, extra_dict=repository_config ) - self.merge_state_check_delay: int = self.config.get_value( - value="merge-state-check-delay", return_on_none=30, extra_dict=repository_config - ) async def get_pull_request(self, number: int | None = None) -> PullRequest | None: if number: diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 5de3526f..c5200d9a 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -420,25 +420,37 @@ async def label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merg This ensures CI checks run against the updated base branch. Note: - Waits for configured delay before processing to allow GitHub's merge state calculation to complete. + Waits 30 seconds before processing to allow GitHub's merge state calculation to complete. """ - time_sleep = self.github_webhook.merge_state_check_delay + time_sleep = 30 self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") await asyncio.sleep(time_sleep) pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open"))) - for pull_request in pulls: - self.logger.info(f"{self.log_prefix} check label pull request after merge") - merge_state = await self.label_pull_request_by_merge_state(pull_request=pull_request) - - # Check if retrigger is enabled (not None or empty list) - retrigger_config = self.github_webhook.retrigger_checks_on_base_push - if not retrigger_config: - continue - - # If retrigger is enabled and PR is behind or blocked, retrigger checks - if merge_state in ("behind", "blocked"): - await self._retrigger_check_suites_for_pr(pull_request=pull_request) + + # Move config lookup outside the loop (static value) + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + + # Process all PRs in parallel with error isolation + async def process_single_pr(pull_request: PullRequest) -> None: + """Process a single PR with error handling.""" + try: + self.logger.info(f"{self.log_prefix} check label pull request after merge") + merge_state = await self.label_pull_request_by_merge_state(pull_request=pull_request) + + # If retrigger is enabled and PR is behind or blocked, retrigger checks + if retrigger_config and merge_state in ("behind", "blocked"): + await self._retrigger_check_suites_for_pr(pull_request=pull_request) + except Exception: + self.logger.exception( + f"{self.log_prefix} Failed to process PR #{pull_request.number} during label/retrigger operation" + ) + + # Process all PRs concurrently + await asyncio.gather( + *[process_single_pr(pr) for pr in pulls], + return_exceptions=True, + ) async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 6b7c024a..9ffcde80 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -103,7 +103,7 @@ async def _retrigger_checks_for_prs_targeting_branch(self, branch_name: str) -> Args: branch_name: The branch that was pushed to (e.g., 'main') """ - time_sleep = self.github_webhook.merge_state_check_delay + time_sleep = 30 self.logger.info(f"{self.log_prefix} Waiting {time_sleep}s for GitHub to update merge states") await asyncio.sleep(time_sleep) diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index e31c3733..18f7ce17 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -86,7 +86,6 @@ def mock_github_webhook(self) -> Mock: mock_webhook.pre_commit = True mock_webhook.python_module_install = False mock_webhook.pypi = False - mock_webhook.merge_state_check_delay = 30 # Default delay mock_webhook.token = "test-token" # pragma: allowlist secret mock_webhook.auto_verify_cherry_picked_prs = True mock_webhook.last_commit = Mock() diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 1dc3b6b3..f483aef1 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -43,7 +43,6 @@ def mock_github_webhook(self) -> Mock: mock_webhook.container_repository_username = "test-user" # Always a string mock_webhook.container_repository_password = "test-password" # Always a string # pragma: allowlist secret mock_webhook.token = "test-token" # Always a string - mock_webhook.merge_state_check_delay = 30 # Default delay return mock_webhook @pytest.fixture @@ -669,7 +668,11 @@ async def test_retrigger_checks_with_empty_list(self, push_handler: PushHandler) @pytest.mark.asyncio async def test_retrigger_checks_for_prs_with_unknown_merge_state(self, push_handler: PushHandler) -> None: - """Test that PRs with unknown merge state are skipped with warning.""" + """Test that PRs with unknown merge state are skipped with warning. + + Note: These tests mock run_retests_from_config to exercise higher-level behavior + and exception propagation, while run_retests is tested directly in other tests. + """ push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] mock_pr = Mock() @@ -688,7 +691,11 @@ async def test_retrigger_checks_for_prs_with_unknown_merge_state(self, push_hand @pytest.mark.asyncio async def test_retrigger_checks_for_prs_with_none_merge_state(self, push_handler: PushHandler) -> None: - """Test that PRs with None merge state are skipped with warning.""" + """Test that PRs with None merge state are skipped with warning. + + Note: These tests mock run_retests_from_config to exercise higher-level behavior + and exception propagation, while run_retests is tested directly in other tests. + """ push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] mock_pr = Mock() @@ -703,10 +710,15 @@ async def test_retrigger_checks_for_prs_with_none_merge_state(self, push_handler ) as mock_retests: await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") mock_retests.assert_not_called() + push_handler.logger.warning.assert_called() @pytest.mark.asyncio async def test_retrigger_checks_continues_on_exception(self, push_handler: PushHandler) -> None: - """Test that exception in one PR doesn't stop processing others.""" + """Test that exception in one PR doesn't stop processing others. + + Note: These tests mock run_retests_from_config to exercise higher-level behavior + and exception propagation, while run_retests is tested directly in other tests. + """ push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] push_handler.github_webhook.retrigger_checks_on_base_push = "all" From 9a616a1d1c6f76860d4287ecf40e3065dd55bf5b Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 19:09:25 +0200 Subject: [PATCH 18/34] fix: address CodeRabbit review comments - Update retrigger-checks-on-base-push description for both merged PRs and direct branch pushes - Fix asyncio.CancelledError handling in push_handler to propagate cancellations - Use realistic config values ("all") instead of True in tests - Replace PEP 695 generics with TypeVar for Python 3.11/3.12 compatibility --- webhook_server/config/schema.yaml | 4 ++-- webhook_server/libs/handlers/push_handler.py | 6 +++++- webhook_server/tests/test_pull_request_handler.py | 8 +++++--- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index 3d8c46cc..9de7d7e9 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -92,7 +92,7 @@ properties: items: type: string description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) - description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch (global default). If not configured, defaults to disabled. + description: Re-trigger CI checks for out-of-date PRs when their base branch is updated (triggered by both merged PRs and direct non-tag branch pushes). Value can be "all" (string) to re-trigger all checks, or an array of specific check names. If not configured, defaults to disabled. pr-size-thresholds: type: object @@ -318,7 +318,7 @@ properties: items: type: string description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) - description: Re-trigger CI checks for out-of-date PRs when a pull request is merged into their base branch. If not configured, defaults to disabled. + description: Re-trigger CI checks for out-of-date PRs when their base branch is updated (triggered by both merged PRs and direct non-tag branch pushes). Value can be "all" (string) to re-trigger all checks, or an array of specific check names. If not configured, defaults to disabled. pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 9ffcde80..024bed3b 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -154,7 +154,11 @@ def get_merge_state(pr: PullRequest = pull_request) -> str | None: self.logger.info(f"{self.log_prefix} Successfully re-triggered checks for PR #{pr_number}") else: self.logger.debug(f"{self.log_prefix} No checks triggered for PR #{pr_number}") - except Exception: + except Exception as e: + # Re-raise CancelledError immediately to allow cooperative cancellation + if isinstance(e, asyncio.CancelledError): + raise + # Log all other exceptions and continue processing other PRs self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") # Continue processing other PRs else: diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 18f7ce17..9bc21f5d 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1,6 +1,6 @@ import asyncio from collections.abc import Callable -from typing import Any +from typing import Any, TypeVar from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -26,6 +26,8 @@ WIP_STR, ) +T = TypeVar("T") + # Async shim for mocking asyncio.to_thread in tests # This allows us to run sync functions in tests while preserving async/await semantics @@ -445,7 +447,7 @@ async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after mock_pr1.mergeable_state = "behind" mock_pr2.mergeable_state = "clean" - pull_request_handler.github_webhook.retrigger_checks_on_base_push = True + pull_request_handler.github_webhook.retrigger_checks_on_base_push = "all" async def mock_label_side_effect(pull_request: Mock | PullRequest) -> str: return pull_request.mergeable_state @@ -487,7 +489,7 @@ async def test_label_all_opened_prs_retrigger_for_different_merge_states( should_retrigger: bool, ) -> None: """Test retrigger behavior for different merge states.""" - mock_github_webhook.retrigger_checks_on_base_push = True + mock_github_webhook.retrigger_checks_on_base_push = "all" mock_pr = Mock() mock_pr.number = 1 From 50d817f59744ea6197118c1d13753da7e339d845 Mon Sep 17 00:00:00 2001 From: rnetser Date: Tue, 30 Dec 2025 19:40:35 +0200 Subject: [PATCH 19/34] fix: remove PEP 695 generic syntax for Python 3.11 compatibility Remove [T] from _sync_to_thread function signature to use traditional TypeVar-based generics instead of PEP 695 syntax. --- webhook_server/tests/test_pull_request_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 9bc21f5d..55e8bfe0 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -31,7 +31,7 @@ # Async shim for mocking asyncio.to_thread in tests # This allows us to run sync functions in tests while preserving async/await semantics -async def _sync_to_thread[T](func: Callable[..., T], *args: Any, **kwargs: Any) -> T: +async def _sync_to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: # noqa: UP047 """Mock implementation of asyncio.to_thread that runs synchronously but returns awaitable.""" return func(*args, **kwargs) From e830d4bf810a19a86450fa84c00e83d5c1e71958 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 16:46:22 +0200 Subject: [PATCH 20/34] fix: improve PR check run status detection and merge state handling - Fix 'GithubWebhook' object has no attribute 'last_commit' error in is_check_run_in_progress() * Accept optional pull_request parameter to get last commit when available * Add fallback to github_webhook.last_commit for backward compatibility * Optimize PaginatedList iteration with early exit pattern * Update all callers in runner_handler.py to pass pull_request parameter - Add retry logic for unknown merge states in push_handler.py * Retry up to 5 times with 10-second delays instead of skipping * Wait for GitHub to calculate merge state before proceeding * Only skip after max retries exceeded - Add loop prevention for recently updated PRs * Skip retrigger if PR updated within last 60 seconds * Avoid duplicate work when PR already has fresh checks * Reduces unnecessary CI load from concurrent webhook events - Improve code quality and consistency * Use datetime.UTC instead of timezone.utc throughout * Move imports to module level following project standards * Use named functions for asyncio.to_thread() calls * Add comprehensive test coverage for new behaviors --- .../libs/handlers/check_run_handler.py | 44 +++++++++++++++--- webhook_server/libs/handlers/push_handler.py | 40 ++++++++++++++--- .../libs/handlers/runner_handler.py | 19 +++++--- webhook_server/tests/test_push_handler.py | 45 +++++++++++++++++++ 4 files changed, 131 insertions(+), 17 deletions(-) diff --git a/webhook_server/libs/handlers/check_run_handler.py b/webhook_server/libs/handlers/check_run_handler.py index ab189c62..45b15286 100644 --- a/webhook_server/libs/handlers/check_run_handler.py +++ b/webhook_server/libs/handlers/check_run_handler.py @@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Any from github.CheckRun import CheckRun +from github.Commit import Commit from github.CommitStatus import CommitStatus from github.PullRequest import PullRequest from github.Repository import Repository @@ -353,12 +354,43 @@ def get_check_run_text(self, err: str, out: str) -> str: return _output - async def is_check_run_in_progress(self, check_run: str) -> bool: - if self.github_webhook.last_commit: - for run in await asyncio.to_thread(self.github_webhook.last_commit.get_check_runs): - if run.name == check_run and run.status == IN_PROGRESS_STR: - self.logger.debug(f"{self.log_prefix} Check run {check_run} is in progress.") - return True + async def is_check_run_in_progress(self, check_run: str, pull_request: PullRequest | None = None) -> bool: + """Check if a specific check run is in progress. + + Args: + check_run: Name of the check run to check + pull_request: Optional pull request to get last commit from. If provided, + gets last commit from PR. Otherwise, falls back to github_webhook.last_commit + + Returns: + True if check run is in progress, False otherwise + """ + last_commit = None + if pull_request: + # Use single-pass iteration to find last commit - O(1) memory instead of O(N) + def get_last_commit_from_pr() -> Commit | None: + last = None + for commit in pull_request.get_commits(): + last = commit + return last + + last_commit = await asyncio.to_thread(get_last_commit_from_pr) + else: + # last_commit may not exist on github_webhook for push events (optional attribute) + last_commit = self.github_webhook.last_commit if hasattr(self.github_webhook, "last_commit") else None + + if last_commit: + # Optimize PaginatedList iteration with early exit + def find_check_run_in_progress() -> bool: + for run in last_commit.get_check_runs(): + if run.name == check_run and run.status == IN_PROGRESS_STR: + return True + return False + + is_in_progress = await asyncio.to_thread(find_check_run_in_progress) + if is_in_progress: + self.logger.debug(f"{self.log_prefix} Check run {check_run} is in progress.") + return True return False async def required_check_failed_or_no_status( diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 024bed3b..48c2f491 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -1,5 +1,6 @@ import asyncio import re +from datetime import UTC, datetime from typing import TYPE_CHECKING from github.PullRequest import PullRequest @@ -129,20 +130,47 @@ def get_pulls() -> list[PullRequest]: def get_merge_state(pr: PullRequest = pull_request) -> str | None: return pr.mergeable_state - merge_state = await asyncio.to_thread(get_merge_state) + # Handle None/unknown merge states with retry + max_retries = 5 + retry_delay = 10 # seconds + merge_state: str | None = None - self.logger.debug(f"{self.log_prefix} PR #{pr_number} merge state: {merge_state}") + for attempt in range(1, max_retries + 1): + merge_state = await asyncio.to_thread(get_merge_state) + self.logger.debug(f"{self.log_prefix} PR #{pr_number} merge state: {merge_state}") - # Handle None/unknown merge states explicitly - if merge_state in (None, "unknown"): + if merge_state not in (None, "unknown"): + break + + if attempt < max_retries: + self.logger.info( + f"{self.log_prefix} PR #{pr_number} merge state is '{merge_state}' - " + f"waiting {retry_delay}s for GitHub to calculate (attempt {attempt}/{max_retries})" + ) + await asyncio.sleep(retry_delay) + else: + # Loop completed without break - merge_state is still None or "unknown" self.logger.warning( - f"{self.log_prefix} PR #{pr_number} merge state is '{merge_state}' - " - "GitHub still calculating, skipping for now" + f"{self.log_prefix} PR #{pr_number} merge state still '{merge_state}' after " + f"{max_retries} attempts, skipping" ) continue # Only re-trigger for PRs that are behind or blocked if merge_state in ("behind", "blocked"): + # Skip if PR was updated very recently (likely already has fresh checks) + def get_updated_at(pr: PullRequest = pull_request) -> datetime: + return pr.updated_at + + pr_updated_at = await asyncio.to_thread(get_updated_at) + time_since_update = (datetime.now(UTC) - pr_updated_at).total_seconds() + if time_since_update < 60: # Skip if updated within last minute + self.logger.debug( + f"{self.log_prefix} PR #{pr_number} was updated {time_since_update:.0f}s ago, " + "skipping retrigger to avoid duplicate work" + ) + continue + self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " f"Re-triggering checks for out-of-date PR #{pr_number} (state: {merge_state})", diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 180bf3b7..719d4f7e 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -178,7 +178,7 @@ async def run_tox(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting tox tests execution" ) - if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR): + if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR, pull_request=pull_request): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {TOX_STR}.") python_ver = ( @@ -251,7 +251,7 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: f"Starting pre-commit checks execution", ) - if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR): + if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR, pull_request=pull_request): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {PRE_COMMIT_STR}.") self.logger.step( # type: ignore[attr-defined] @@ -333,7 +333,12 @@ async def run_build_container( return if pull_request and set_check: - if await self.check_run_handler.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: + if ( + await self.check_run_handler.is_check_run_in_progress( + check_run=BUILD_CONTAINER_STR, pull_request=pull_request + ) + and not is_merged + ): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {BUILD_CONTAINER_STR}.") self.logger.step( # type: ignore[attr-defined] @@ -489,7 +494,9 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"Starting Python module installation" ) - if await self.check_run_handler.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR): + if await self.check_run_handler.is_check_run_in_progress( + check_run=PYTHON_MODULE_INSTALL_STR, pull_request=pull_request + ): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {PYTHON_MODULE_INSTALL_STR}.") self.logger.info(f"{self.log_prefix} Installing python module") @@ -560,7 +567,9 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: ), } - if await self.check_run_handler.is_check_run_in_progress(check_run=CONVENTIONAL_TITLE_STR): + if await self.check_run_handler.is_check_run_in_progress( + check_run=CONVENTIONAL_TITLE_STR, pull_request=pull_request + ): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {CONVENTIONAL_TITLE_STR}.") self.logger.step( # type: ignore[attr-defined] diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index f483aef1..b12d233c 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -1,6 +1,7 @@ """Tests for webhook_server.libs.handlers.push_handler module.""" from contextlib import asynccontextmanager +from datetime import UTC, datetime, timedelta from unittest.mock import AsyncMock, Mock, patch import pytest @@ -473,6 +474,8 @@ async def test_retrigger_checks_for_prs_targeting_branch_pr_behind(self, push_ha mock_pr = Mock() mock_pr.number = 123 mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -492,6 +495,8 @@ async def test_retrigger_checks_for_prs_targeting_branch_pr_blocked(self, push_h mock_pr = Mock() mock_pr.number = 456 mock_pr.mergeable_state = "blocked" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -529,6 +534,8 @@ async def test_retrigger_checks_for_prs_targeting_branch_multiple_prs(self, push mock_pr1 = Mock() mock_pr1.number = 100 mock_pr1.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr1.updated_at = datetime.now(UTC) - timedelta(seconds=120) mock_pr2 = Mock() mock_pr2.number = 200 @@ -537,6 +544,8 @@ async def test_retrigger_checks_for_prs_targeting_branch_multiple_prs(self, push mock_pr3 = Mock() mock_pr3.number = 300 mock_pr3.mergeable_state = "blocked" + # Set updated_at to more than 60 seconds ago + mock_pr3.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr1, mock_pr2, mock_pr3] @@ -561,6 +570,8 @@ async def test_retrigger_checks_for_prs_targeting_branch_no_checks_configured( mock_pr = Mock() mock_pr.number = 123 mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -593,6 +604,8 @@ async def test_retrigger_checks_with_specific_checks_list(self, push_handler: Pu mock_pr = Mock() mock_pr.number = 123 mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -613,6 +626,8 @@ async def test_retrigger_checks_with_nonexistent_checks(self, push_handler: Push mock_pr = Mock() mock_pr.number = 123 mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -633,6 +648,8 @@ async def test_retrigger_checks_with_partial_match(self, push_handler: PushHandl mock_pr = Mock() mock_pr.number = 123 mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -644,6 +661,28 @@ async def test_retrigger_checks_with_partial_match(self, push_handler: PushHandl # Should only run checks that match mock_retests.assert_called_once_with(supported_retests=["tox"], pull_request=mock_pr) + @pytest.mark.asyncio + async def test_retrigger_checks_skips_recently_updated_pr(self, push_handler: PushHandler) -> None: + """Test that retrigger skips PR that was updated within the last minute.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to less than 60 seconds ago (30 seconds) + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=30) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should not trigger since PR was recently updated + mock_retests.assert_not_called() + @pytest.mark.asyncio async def test_retrigger_checks_with_empty_list(self, push_handler: PushHandler) -> None: """Test retrigger disabled with empty list.""" @@ -653,6 +692,8 @@ async def test_retrigger_checks_with_empty_list(self, push_handler: PushHandler) mock_pr = Mock() mock_pr.number = 123 mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr] @@ -725,10 +766,14 @@ async def test_retrigger_checks_continues_on_exception(self, push_handler: PushH mock_pr1 = Mock() mock_pr1.number = 100 mock_pr1.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr1.updated_at = datetime.now(UTC) - timedelta(seconds=120) mock_pr2 = Mock() mock_pr2.number = 200 mock_pr2.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr2.updated_at = datetime.now(UTC) - timedelta(seconds=120) with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: mock_get_pulls.return_value = [mock_pr1, mock_pr2] From d48d996ec22e24163ba398d08c10f584513479c1 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 19:08:10 +0200 Subject: [PATCH 21/34] fix: pass pull_request parameter to all check run status setters Modified set_check_run_status() and all 17 check run status setter methods to accept pull_request parameter. Updated all callers in runner_handler.py to pass the pull request. This fixes the last_commit issue when processing push events with retrigger-checks-on-base-push enabled. The system now gets the last commit SHA from the pull request instead of relying on github_webhook.last_commit which doesn't exist for push events. Files changed: - webhook_server/libs/handlers/check_run_handler.py - webhook_server/libs/handlers/runner_handler.py - webhook_server/tests/test_check_run_handler.py - webhook_server/tests/test_runner_handler.py --- .../libs/handlers/check_run_handler.py | 148 +++++++++++++----- .../libs/handlers/runner_handler.py | 54 ++++--- .../tests/test_check_run_handler.py | 68 +++++--- webhook_server/tests/test_runner_handler.py | 6 + 4 files changed, 194 insertions(+), 82 deletions(-) diff --git a/webhook_server/libs/handlers/check_run_handler.py b/webhook_server/libs/handlers/check_run_handler.py index 45b15286..ebabe113 100644 --- a/webhook_server/libs/handlers/check_run_handler.py +++ b/webhook_server/libs/handlers/check_run_handler.py @@ -139,37 +139,51 @@ async def set_verify_check_queued(self) -> None: async def set_verify_check_success(self) -> None: return await self.set_check_run_status(check_run=VERIFIED_LABEL_STR, conclusion=SUCCESS_STR) - async def set_run_tox_check_queued(self) -> None: + async def set_run_tox_check_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.tox: self.logger.debug(f"{self.log_prefix} tox is not configured, skipping.") return - return await self.set_check_run_status(check_run=TOX_STR, status=QUEUED_STR) + return await self.set_check_run_status(check_run=TOX_STR, status=QUEUED_STR, pull_request=pull_request) - async def set_run_tox_check_in_progress(self) -> None: - return await self.set_check_run_status(check_run=TOX_STR, status=IN_PROGRESS_STR) + async def set_run_tox_check_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status(check_run=TOX_STR, status=IN_PROGRESS_STR, pull_request=pull_request) - async def set_run_tox_check_failure(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=TOX_STR, conclusion=FAILURE_STR, output=output) + async def set_run_tox_check_failure(self, output: dict[str, Any], pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=TOX_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) - async def set_run_tox_check_success(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=TOX_STR, conclusion=SUCCESS_STR, output=output) + async def set_run_tox_check_success(self, output: dict[str, Any], pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=TOX_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) - async def set_run_pre_commit_check_queued(self) -> None: + async def set_run_pre_commit_check_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.pre_commit: self.logger.debug(f"{self.log_prefix} pre-commit is not configured, skipping.") return - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, status=QUEUED_STR) + return await self.set_check_run_status(check_run=PRE_COMMIT_STR, status=QUEUED_STR, pull_request=pull_request) - async def set_run_pre_commit_check_in_progress(self) -> None: - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR) + async def set_run_pre_commit_check_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_run_pre_commit_check_failure(self, output: dict[str, Any] | None = None) -> None: - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output) + async def set_run_pre_commit_check_failure( + self, output: dict[str, Any] | None = None, pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) - async def set_run_pre_commit_check_success(self, output: dict[str, Any] | None = None) -> None: - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output) + async def set_run_pre_commit_check_success( + self, output: dict[str, Any] | None = None, pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) async def set_merge_check_queued(self, output: dict[str, Any] | None = None) -> None: return await self.set_check_run_status(check_run=CAN_BE_MERGED_STR, status=QUEUED_STR, output=output) @@ -183,53 +197,85 @@ async def set_merge_check_success(self) -> None: async def set_merge_check_failure(self, output: dict[str, Any]) -> None: return await self.set_check_run_status(check_run=CAN_BE_MERGED_STR, conclusion=FAILURE_STR, output=output) - async def set_container_build_queued(self) -> None: + async def set_container_build_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.build_and_push_container: self.logger.debug(f"{self.log_prefix} build_and_push_container is not configured, skipping.") return - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, status=QUEUED_STR) + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, status=QUEUED_STR, pull_request=pull_request + ) - async def set_container_build_in_progress(self) -> None: - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR) + async def set_container_build_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_container_build_success(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output) + async def set_container_build_success( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) - async def set_container_build_failure(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output) + async def set_container_build_failure( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) - async def set_python_module_install_queued(self) -> None: + async def set_python_module_install_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.pypi: self.logger.debug(f"{self.log_prefix} pypi is not configured, skipping.") return - return await self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR) + return await self.set_check_run_status( + check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR, pull_request=pull_request + ) - async def set_python_module_install_in_progress(self) -> None: - return await self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR) + async def set_python_module_install_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_python_module_install_success(self, output: dict[str, Any]) -> None: + async def set_python_module_install_success( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: return await self.set_check_run_status( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request ) - async def set_python_module_install_failure(self, output: dict[str, Any]) -> None: + async def set_python_module_install_failure( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: return await self.set_check_run_status( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request ) - async def set_conventional_title_queued(self) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR) + async def set_conventional_title_queued(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR, pull_request=pull_request + ) - async def set_conventional_title_in_progress(self) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR) + async def set_conventional_title_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_conventional_title_success(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output) + async def set_conventional_title_success( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) - async def set_conventional_title_failure(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output) + async def set_conventional_title_failure( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) async def set_cherry_pick_in_progress(self) -> None: return await self.set_check_run_status(check_run=CHERRY_PICKED_LABEL_PREFIX, status=IN_PROGRESS_STR) @@ -250,8 +296,28 @@ async def set_check_run_status( status: str = "", conclusion: str = "", output: dict[str, str] | None = None, + pull_request: PullRequest | None = None, ) -> None: - kwargs: dict[str, Any] = {"name": check_run, "head_sha": self.github_webhook.last_commit.sha} + # Get head_sha from pull_request or fall back to github_webhook.last_commit + if pull_request: + # Use single-pass iteration to find last commit + def get_last_commit_sha() -> str: + last_commit = None + for commit in pull_request.get_commits(): + last_commit = commit + if last_commit is None: + raise ValueError("Pull request has no commits") + return last_commit.sha + + head_sha = await asyncio.to_thread(get_last_commit_sha) + else: + # Fall back to github_webhook.last_commit for backward compatibility + if not hasattr(self.github_webhook, "last_commit") or self.github_webhook.last_commit is None: + self.logger.warning(f"{self.log_prefix} Cannot set check run status: no last_commit available") + return + head_sha = self.github_webhook.last_commit.sha + + kwargs: dict[str, Any] = {"name": check_run, "head_sha": head_sha} if status: kwargs["status"] = status diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 719d4f7e..f94b89f6 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -190,7 +190,7 @@ async def run_tox(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting tox check status to in-progress", ) - await self.check_run_handler.set_run_tox_check_in_progress() + await self.check_run_handler.set_run_tox_check_in_progress(pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -216,7 +216,7 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_run_tox_check_failure(output=output) + return await self.check_run_handler.set_run_tox_check_failure(output=output, pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" @@ -234,12 +234,12 @@ async def run_tox(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Tox tests completed successfully", ) - return await self.check_run_handler.set_run_tox_check_success(output=output) + return await self.check_run_handler.set_run_tox_check_success(output=output, pull_request=pull_request) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed" ) - return await self.check_run_handler.set_run_tox_check_failure(output=output) + return await self.check_run_handler.set_run_tox_check_failure(output=output, pull_request=pull_request) async def run_pre_commit(self, pull_request: PullRequest) -> None: if not self.github_webhook.pre_commit: @@ -258,7 +258,7 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting pre-commit check status to in-progress", ) - await self.check_run_handler.set_run_pre_commit_check_in_progress() + await self.check_run_handler.set_run_pre_commit_check_in_progress(pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -279,7 +279,9 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: ) self.logger.error(f"{self.log_prefix} Repository preparation failed for pre-commit") output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) + return await self.check_run_handler.set_run_pre_commit_check_failure( + output=output, pull_request=pull_request + ) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -298,12 +300,16 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Pre-commit checks completed successfully", ) - return await self.check_run_handler.set_run_pre_commit_check_success(output=output) + return await self.check_run_handler.set_run_pre_commit_check_success( + output=output, pull_request=pull_request + ) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Pre-commit checks failed" ) - return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) + return await self.check_run_handler.set_run_pre_commit_check_failure( + output=output, pull_request=pull_request + ) async def run_build_container( self, @@ -346,7 +352,7 @@ async def run_build_container( f"Setting container build check status to in-progress", ) if set_check: - await self.check_run_handler.set_container_build_in_progress() + await self.check_run_handler.set_container_build_in_progress(pull_request=pull_request) _container_repository_and_tag = self.github_webhook.container_repository_and_tag( pull_request=pull_request, is_merged=is_merged, tag=tag @@ -394,7 +400,7 @@ async def run_build_container( ) output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) if pull_request and set_check: - await self.check_run_handler.set_container_build_failure(output=output) + await self.check_run_handler.set_container_build_failure(output=output, pull_request=pull_request) return self.logger.step( # type: ignore[attr-defined] @@ -413,14 +419,18 @@ async def run_build_container( ) self.logger.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") if pull_request and set_check: - return await self.check_run_handler.set_container_build_success(output=output) + return await self.check_run_handler.set_container_build_success( + output=output, pull_request=pull_request + ) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Container build failed" ) self.logger.error(f"{self.log_prefix} Failed to build {_container_repository_and_tag}") if pull_request and set_check: - return await self.check_run_handler.set_container_build_failure(output=output) + return await self.check_run_handler.set_container_build_failure( + output=output, pull_request=pull_request + ) if push and build_rc: self.logger.step( # type: ignore[attr-defined] @@ -504,7 +514,7 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting Python module install check status to in-progress", ) - await self.check_run_handler.set_python_module_install_in_progress() + await self.check_run_handler.set_python_module_install_in_progress(pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Preparing repository checkout for Python module installation", @@ -521,7 +531,9 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"Repository preparation failed for Python module installation", ) output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_python_module_install_failure(output=output) + return await self.check_run_handler.set_python_module_install_failure( + output=output, pull_request=pull_request + ) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -540,14 +552,18 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Python module installation completed successfully", ) - return await self.check_run_handler.set_python_module_install_success(output=output) + return await self.check_run_handler.set_python_module_install_success( + output=output, pull_request=pull_request + ) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} " f"{format_task_fields('runner', 'ci_check', 'failed')} " f"Python module installation failed" ) - return await self.check_run_handler.set_python_module_install_failure(output=output) + return await self.check_run_handler.set_python_module_install_failure( + output=output, pull_request=pull_request + ) async def run_conventional_title_check(self, pull_request: PullRequest) -> None: if not self.github_webhook.conventional_title: @@ -576,7 +592,7 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting conventional title check status to in-progress", ) - await self.check_run_handler.set_conventional_title_in_progress() + await self.check_run_handler.set_conventional_title_in_progress(pull_request=pull_request) allowed_names = [name.strip() for name in self.github_webhook.conventional_title.split(",") if name.strip()] title = pull_request.title @@ -586,7 +602,7 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Conventional title check completed successfully", ) - await self.check_run_handler.set_conventional_title_success(output=output) + await self.check_run_handler.set_conventional_title_success(output=output, pull_request=pull_request) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} " @@ -628,7 +644,7 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: **Resources:** - [Conventional Commits v1.0.0 Specification](https://www.conventionalcommits.org/en/v1.0.0/) """ - await self.check_run_handler.set_conventional_title_failure(output=output) + await self.check_run_handler.set_conventional_title_failure(output=output, pull_request=pull_request) async def is_branch_exists(self, branch: str) -> Branch: return await asyncio.to_thread(self.repository.get_branch, branch) diff --git a/webhook_server/tests/test_check_run_handler.py b/webhook_server/tests/test_check_run_handler.py index 07d33288..aa0bb353 100644 --- a/webhook_server/tests/test_check_run_handler.py +++ b/webhook_server/tests/test_check_run_handler.py @@ -138,7 +138,7 @@ async def test_set_run_tox_check_queued_enabled(self, check_run_handler: CheckRu with patch.object(check_run_handler.github_webhook, "tox", True): with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_queued() - mock_set_status.assert_called_once_with(check_run=TOX_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with(check_run=TOX_STR, status=QUEUED_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_tox_check_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -153,7 +153,7 @@ async def test_set_run_tox_check_in_progress(self, check_run_handler: CheckRunHa """Test setting tox check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_in_progress() - mock_set_status.assert_called_once_with(check_run=TOX_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with(check_run=TOX_STR, status=IN_PROGRESS_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_tox_check_failure(self, check_run_handler: CheckRunHandler) -> None: @@ -161,7 +161,9 @@ async def test_set_run_tox_check_failure(self, check_run_handler: CheckRunHandle output = {"title": "Test failed", "summary": "Test summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_failure(output) - mock_set_status.assert_called_once_with(check_run=TOX_STR, conclusion=FAILURE_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=TOX_STR, conclusion=FAILURE_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_tox_check_success(self, check_run_handler: CheckRunHandler) -> None: @@ -169,7 +171,9 @@ async def test_set_run_tox_check_success(self, check_run_handler: CheckRunHandle output = {"title": "Test passed", "summary": "Test summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_success(output) - mock_set_status.assert_called_once_with(check_run=TOX_STR, conclusion=SUCCESS_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=TOX_STR, conclusion=SUCCESS_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_queued_enabled(self, check_run_handler: CheckRunHandler) -> None: @@ -177,7 +181,7 @@ async def test_set_run_pre_commit_check_queued_enabled(self, check_run_handler: check_run_handler.github_webhook.pre_commit = True with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_queued() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=QUEUED_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_pre_commit_check_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -192,7 +196,7 @@ async def test_set_run_pre_commit_check_in_progress(self, check_run_handler: Che """Test setting pre-commit check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_in_progress() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_pre_commit_check_failure(self, check_run_handler: CheckRunHandler) -> None: @@ -200,14 +204,18 @@ async def test_set_run_pre_commit_check_failure(self, check_run_handler: CheckRu output = {"title": "Pre-commit failed", "summary": "Pre-commit summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_failure(output) - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_failure_no_output(self, check_run_handler: CheckRunHandler) -> None: """Test setting pre-commit check to failure status without output.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_failure() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=None) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=None, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_success(self, check_run_handler: CheckRunHandler) -> None: @@ -215,14 +223,18 @@ async def test_set_run_pre_commit_check_success(self, check_run_handler: CheckRu output = {"title": "Pre-commit passed", "summary": "Pre-commit summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_success(output) - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_success_no_output(self, check_run_handler: CheckRunHandler) -> None: """Test setting pre-commit check to success status without output.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_success() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=None) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=None, pull_request=None + ) @pytest.mark.asyncio async def test_set_merge_check_queued(self, check_run_handler: CheckRunHandler) -> None: @@ -267,7 +279,9 @@ async def test_set_container_build_queued_enabled(self, check_run_handler: Check with patch.object(check_run_handler.github_webhook, "build_and_push_container", True): with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_queued() - mock_set_status.assert_called_once_with(check_run=BUILD_CONTAINER_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with( + check_run=BUILD_CONTAINER_STR, status=QUEUED_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_container_build_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -282,7 +296,9 @@ async def test_set_container_build_in_progress(self, check_run_handler: CheckRun """Test setting container build check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_in_progress() - mock_set_status.assert_called_once_with(check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with( + check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_container_build_success(self, check_run_handler: CheckRunHandler) -> None: @@ -291,7 +307,7 @@ async def test_set_container_build_success(self, check_run_handler: CheckRunHand with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_success(output) mock_set_status.assert_called_once_with( - check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output + check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -301,7 +317,7 @@ async def test_set_container_build_failure(self, check_run_handler: CheckRunHand with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_failure(output) mock_set_status.assert_called_once_with( - check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output + check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -310,7 +326,9 @@ async def test_set_python_module_install_queued_enabled(self, check_run_handler: check_run_handler.github_webhook.pypi = {"token": "test"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_queued() - mock_set_status.assert_called_once_with(check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with( + check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_python_module_install_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -325,7 +343,9 @@ async def test_set_python_module_install_in_progress(self, check_run_handler: Ch """Test setting python module install check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_in_progress() - mock_set_status.assert_called_once_with(check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with( + check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_python_module_install_success(self, check_run_handler: CheckRunHandler) -> None: @@ -334,7 +354,7 @@ async def test_set_python_module_install_success(self, check_run_handler: CheckR with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_success(output) mock_set_status.assert_called_once_with( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -344,7 +364,7 @@ async def test_set_python_module_install_failure(self, check_run_handler: CheckR with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_failure(output) mock_set_status.assert_called_once_with( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -352,14 +372,18 @@ async def test_set_conventional_title_queued(self, check_run_handler: CheckRunHa """Test setting conventional title check to queued status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_queued() - mock_set_status.assert_called_once_with(check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with( + check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_conventional_title_in_progress(self, check_run_handler: CheckRunHandler) -> None: """Test setting conventional title check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_in_progress() - mock_set_status.assert_called_once_with(check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with( + check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_conventional_title_success(self, check_run_handler: CheckRunHandler) -> None: @@ -368,7 +392,7 @@ async def test_set_conventional_title_success(self, check_run_handler: CheckRunH with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_success(output) mock_set_status.assert_called_once_with( - check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output + check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -378,7 +402,7 @@ async def test_set_conventional_title_failure(self, check_run_handler: CheckRunH with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_failure(output) mock_set_status.assert_called_once_with( - check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output + check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output, pull_request=None ) @pytest.mark.asyncio diff --git a/webhook_server/tests/test_runner_handler.py b/webhook_server/tests/test_runner_handler.py index 7aa4b66e..6385e167 100644 --- a/webhook_server/tests/test_runner_handler.py +++ b/webhook_server/tests/test_runner_handler.py @@ -60,6 +60,12 @@ def mock_pull_request(self) -> Mock: mock_pr.merge_commit_sha = "abc123" mock_pr.html_url = "https://github.com/test/repo/pull/123" mock_pr.create_issue_comment = Mock() + + # Mock get_commits() to return an iterable with a commit + mock_commit = Mock() + mock_commit.sha = "test-commit-sha" + mock_pr.get_commits = Mock(return_value=[mock_commit]) + return mock_pr @pytest.fixture(autouse=True) From 030e723e51963de31171ae062a2f2839041c40b5 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 19:38:45 +0200 Subject: [PATCH 22/34] fix: correct retest failure logging with parallel task names Added parallel task_names list in run_retests() to correctly track test names when logging failures. Previously, using supported_retests[i] could mis-attribute failures when unknown test types were skipped. --- webhook_server/libs/handlers/runner_handler.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index f94b89f6..10ed8dc0 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -794,6 +794,7 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ } tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] + task_names: list[str] = [] # Track names parallel to tasks for _test in supported_retests: if _test not in _retests_to_func_map: self.logger.error(f"{self.log_prefix} Unknown retest type: {_test}") @@ -801,12 +802,13 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ self.logger.debug(f"{self.log_prefix} running retest {_test}") task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) tasks.append(task) + task_names.append(_test) # Track name at same index as task results = await asyncio.gather(*tasks, return_exceptions=True) # Log any task failures for debugging for i, result in enumerate(results): if isinstance(result, Exception): - test_name = supported_retests[i] if i < len(supported_retests) else "unknown" + test_name = task_names[i] if i < len(task_names) else "unknown" self.logger.error(f"{self.log_prefix} Retest task '{test_name}' failed: {result}") async def run_retests_from_config(self, pull_request: PullRequest) -> bool: From 7aa793b8edaaba1e91f4efc24bd86f88a34b6d98 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 19:50:40 +0200 Subject: [PATCH 23/34] fix: remove redundant fallback in retest failure logging Remove the redundant `if i < len(task_names) else "unknown"` fallback when logging failed retest tasks. Since `task_names` and `tasks` lists are built together in the same loop with matching indices, using direct index access without a fallback is correct and follows the fail-fast principle. Addresses CodeRabbit review comment on PR #959. --- webhook_server/libs/handlers/runner_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 10ed8dc0..783875d7 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -808,7 +808,7 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ # Log any task failures for debugging for i, result in enumerate(results): if isinstance(result, Exception): - test_name = task_names[i] if i < len(task_names) else "unknown" + test_name = task_names[i] self.logger.error(f"{self.log_prefix} Retest task '{test_name}' failed: {result}") async def run_retests_from_config(self, pull_request: PullRequest) -> bool: From 16c46f0607d9e8f24a8d85586948c65528a3226a Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 19:59:36 +0200 Subject: [PATCH 24/34] fix: re-raise CancelledError instead of logging in run_retests asyncio.CancelledError must propagate to properly shut down async operations. The previous code improperly suppressed CancelledError by logging all exceptions. Now CancelledError is re-raised immediately while other exceptions continue to be logged for debugging. Addresses CodeRabbit review comment on PR #959. --- webhook_server/libs/handlers/runner_handler.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 783875d7..2826fc34 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -807,6 +807,9 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ results = await asyncio.gather(*tasks, return_exceptions=True) # Log any task failures for debugging for i, result in enumerate(results): + if isinstance(result, asyncio.CancelledError): + # Re-raise CancelledError to propagate cancellation + raise result if isinstance(result, Exception): test_name = task_names[i] self.logger.error(f"{self.log_prefix} Retest task '{test_name}' failed: {result}") From af39f22676d1c685e54d0aaff788f5853d1cf538 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 20:13:33 +0200 Subject: [PATCH 25/34] refactor: tighten type hints in run_retests method Improve type precision for the retest orchestration: - Add RetestFunction Protocol for proper async method typing - _retests_to_func_map: dict[str, RetestFunction] - tasks: list[Task[None]] - Use positional argument in function calls This provides better type checking and IDE support while keeping the runtime behavior unchanged. Addresses CodeRabbit review comment on PR #959. --- webhook_server/libs/handlers/runner_handler.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 2826fc34..ac26541c 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -3,8 +3,8 @@ import re import shutil from asyncio import Task -from collections.abc import AsyncGenerator, Callable, Coroutine -from typing import TYPE_CHECKING, Any +from collections.abc import AsyncGenerator +from typing import TYPE_CHECKING, Any, Protocol import shortuuid from github.Branch import Branch @@ -30,6 +30,12 @@ from webhook_server.libs.github_api import GithubWebhook +class RetestFunction(Protocol): + """Protocol for retest runner functions.""" + + async def __call__(self, pull_request: PullRequest) -> None: ... + + class RunnerHandler: def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler | None = None): self.github_webhook = github_webhook @@ -785,7 +791,7 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ return # Map check names to runner functions - _retests_to_func_map: dict[str, Callable] = { + _retests_to_func_map: dict[str, RetestFunction] = { TOX_STR: self.run_tox, PRE_COMMIT_STR: self.run_pre_commit, BUILD_CONTAINER_STR: self.run_build_container, @@ -793,14 +799,14 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ CONVENTIONAL_TITLE_STR: self.run_conventional_title_check, } - tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] + tasks: list[Task[None]] = [] task_names: list[str] = [] # Track names parallel to tasks for _test in supported_retests: if _test not in _retests_to_func_map: self.logger.error(f"{self.log_prefix} Unknown retest type: {_test}") continue self.logger.debug(f"{self.log_prefix} running retest {_test}") - task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) + task = asyncio.create_task(_retests_to_func_map[_test](pull_request)) tasks.append(task) task_names.append(_test) # Track name at same index as task From 1cc3955984c36f45276c8d4ad1df2cdcd84f1bc5 Mon Sep 17 00:00:00 2001 From: rnetser Date: Wed, 31 Dec 2025 20:20:39 +0200 Subject: [PATCH 26/34] fix: use keyword argument for pull_request in run_retests Change from positional argument to keyword argument when calling retest functions to match test expectations and maintain code clarity. --- webhook_server/libs/handlers/runner_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index ac26541c..84a3a423 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -806,7 +806,7 @@ async def run_retests(self, supported_retests: list[str], pull_request: PullRequ self.logger.error(f"{self.log_prefix} Unknown retest type: {_test}") continue self.logger.debug(f"{self.log_prefix} running retest {_test}") - task = asyncio.create_task(_retests_to_func_map[_test](pull_request)) + task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) tasks.append(task) task_names.append(_test) # Track name at same index as task From c8d184ac1b227e39145ab1b5de7f33c6087b8f66 Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 09:29:52 +0200 Subject: [PATCH 27/34] fix: use tox-uv plugin to reduce CI disk usage Add --with tox-uv to uvx command to leverage uv's global package cache, reducing disk usage by 80-90% during parallel CI runs. --- webhook_server/libs/handlers/runner_handler.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 84a3a423..eecffe3e 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -204,7 +204,10 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): # Build tox command with worktree path - cmd = f"uvx {python_ver} {TOX_STR} --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" + cmd = ( + f"uvx --with tox-uv {python_ver} tox " + f"--workdir {worktree_path} --root {worktree_path} -c {worktree_path}" + ) if _tox_tests and _tox_tests != "all": tests = _tox_tests.replace(" ", "") cmd += f" -e {tests}" From aa9a5fd657ad8c218e4a1667ff6d65f3928c9bc4 Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 09:56:13 +0200 Subject: [PATCH 28/34] fix: handle None retrigger config explicitly to avoid log noise Treat None as expected "disabled" state rather than invalid config, eliminating unnecessary warning logs when retrigger is not configured. --- webhook_server/libs/handlers/runner_handler.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index eecffe3e..83dafb9f 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -843,6 +843,10 @@ async def run_retests_from_config(self, pull_request: PullRequest) -> bool: retrigger_config = self.github_webhook.retrigger_checks_on_base_push + # None is the expected "disabled" state - return silently + if retrigger_config is None: + return False + if retrigger_config == "all": checks_to_run = available_checks elif isinstance(retrigger_config, list): From d625470cef0eb723ce35d0576b67af7f0f069dc0 Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 11:07:40 +0200 Subject: [PATCH 29/34] revert: remove tox-uv plugin that breaks dependency installation The tox-uv plugin was not correctly installing project dependencies, causing "ModuleNotFoundError: No module named 'kubernetes'" failures. --- webhook_server/libs/handlers/runner_handler.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 83dafb9f..db968f01 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -204,10 +204,7 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): # Build tox command with worktree path - cmd = ( - f"uvx --with tox-uv {python_ver} tox " - f"--workdir {worktree_path} --root {worktree_path} -c {worktree_path}" - ) + cmd = f"uvx {python_ver} tox --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" if _tox_tests and _tox_tests != "all": tests = _tox_tests.replace(" ", "") cmd += f" -e {tests}" From 16048f8516356f4de666de99a4bbf62d23c87326 Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 11:13:11 +0200 Subject: [PATCH 30/34] fix: limit concurrent tox runs to prevent disk exhaustion Add class-level semaphore limiting tox runs to 2 concurrent executions. Each tox creates 500MB-2GB of virtual environments; without limits, parallel PR checks can exhaust disk space during execution. --- .../libs/handlers/runner_handler.py | 96 +++++++++++-------- 1 file changed, 54 insertions(+), 42 deletions(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index db968f01..16d1e6e3 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -37,6 +37,10 @@ async def __call__(self, pull_request: PullRequest) -> None: ... class RunnerHandler: + # Class-level semaphore to limit concurrent tox runs (prevents disk exhaustion) + # Each tox run creates ~500MB-2GB of virtual environments + _tox_semaphore: asyncio.Semaphore = asyncio.Semaphore(2) + def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler | None = None): self.github_webhook = github_webhook self.owners_file_handler = owners_file_handler or OwnersFileHandler(github_webhook=self.github_webhook) @@ -198,54 +202,62 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) await self.check_run_handler.set_run_tox_check_in_progress(pull_request=pull_request) - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Preparing repository checkout for tox execution", - ) - async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): - # Build tox command with worktree path - cmd = f"uvx {python_ver} tox --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" - if _tox_tests and _tox_tests != "all": - tests = _tox_tests.replace(" ", "") - cmd += f" -e {tests}" - self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") - - output: dict[str, Any] = { - "title": "Tox", - "summary": "", - "text": None, - } - if not success: - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " - f"Repository preparation failed for tox", - ) - self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") - output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_run_tox_check_failure(output=output, pull_request=pull_request) - + # Acquire semaphore to limit concurrent tox runs (prevents disk exhaustion) + async with self._tox_semaphore: self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" - ) - rc, out, err = await run_command( - command=cmd, - log_prefix=self.log_prefix, - mask_sensitive=self.github_webhook.mask_sensitive, + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Preparing repository checkout for tox execution", ) + async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): + # Build tox command with worktree path + cmd = f"uvx {python_ver} tox --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" + if _tox_tests and _tox_tests != "all": + tests = _tox_tests.replace(" ", "") + cmd += f" -e {tests}" + self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") + + output: dict[str, Any] = { + "title": "Tox", + "summary": "", + "text": None, + } + if not success: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for tox", + ) + self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) + return await self.check_run_handler.set_run_tox_check_failure( + output=output, pull_request=pull_request + ) - output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) - - if rc: self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " - f"Tox tests completed successfully", + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" ) - return await self.check_run_handler.set_run_tox_check_success(output=output, pull_request=pull_request) - else: - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed" + rc, out, err = await run_command( + command=cmd, + log_prefix=self.log_prefix, + mask_sensitive=self.github_webhook.mask_sensitive, ) - return await self.check_run_handler.set_run_tox_check_failure(output=output, pull_request=pull_request) + + output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) + + if rc: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Tox tests completed successfully", + ) + return await self.check_run_handler.set_run_tox_check_success( + output=output, pull_request=pull_request + ) + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed" + ) + return await self.check_run_handler.set_run_tox_check_failure( + output=output, pull_request=pull_request + ) async def run_pre_commit(self, pull_request: PullRequest) -> None: if not self.github_webhook.pre_commit: From 3ae35c55da1cff4ca722501efb8a5ebe0b6a8c44 Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 11:32:26 +0200 Subject: [PATCH 31/34] feat: make tox concurrency limit configurable Add tox-max-concurrent config option (default: 5) to control how many tox runs execute in parallel. Prevents disk exhaustion while allowing operators to tune based on available resources. --- webhook_server/libs/handlers/runner_handler.py | 11 +++++++---- webhook_server/tests/test_runner_handler.py | 1 + 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 16d1e6e3..25de2721 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -37,10 +37,6 @@ async def __call__(self, pull_request: PullRequest) -> None: ... class RunnerHandler: - # Class-level semaphore to limit concurrent tox runs (prevents disk exhaustion) - # Each tox run creates ~500MB-2GB of virtual environments - _tox_semaphore: asyncio.Semaphore = asyncio.Semaphore(2) - def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler | None = None): self.github_webhook = github_webhook self.owners_file_handler = owners_file_handler or OwnersFileHandler(github_webhook=self.github_webhook) @@ -53,6 +49,13 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF github_webhook=self.github_webhook, owners_file_handler=self.owners_file_handler ) + # Instance-level semaphore to limit concurrent tox runs (prevents disk exhaustion) + # Each tox run creates ~500MB-2GB of virtual environments + # Configurable via 'tox-max-concurrent' in config (default: 5) + tox_limit = getattr(self.github_webhook, "tox_max_concurrent", 5) + self._tox_semaphore: asyncio.Semaphore = asyncio.Semaphore(tox_limit) + self.logger.debug(f"{self.log_prefix} Tox max concurrent runs: {tox_limit}") + @contextlib.asynccontextmanager async def _checkout_worktree( self, diff --git a/webhook_server/tests/test_runner_handler.py b/webhook_server/tests/test_runner_handler.py index 6385e167..2225f025 100644 --- a/webhook_server/tests/test_runner_handler.py +++ b/webhook_server/tests/test_runner_handler.py @@ -24,6 +24,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.clone_repo_dir = "/tmp/test-repo" mock_webhook.tox = {"main": "all"} mock_webhook.tox_python_version = "3.12" + mock_webhook.tox_max_concurrent = 5 mock_webhook.pre_commit = True mock_webhook.build_and_push_container = True mock_webhook.pypi = {"token": "dummy"} From 7fd52a2029fa63e4c7b2cafa6f9a90ed8e5bf2c6 Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 11:38:38 +0200 Subject: [PATCH 32/34] test: add tox_max_concurrent to mock fixtures Add missing tox_max_concurrent attribute to mock_github_webhook fixtures in test_issue_comment_handler.py and test_push_handler.py to prevent asyncio.Semaphore initialization errors. --- webhook_server/tests/test_issue_comment_handler.py | 1 + webhook_server/tests/test_push_handler.py | 1 + 2 files changed, 2 insertions(+) diff --git a/webhook_server/tests/test_issue_comment_handler.py b/webhook_server/tests/test_issue_comment_handler.py index 183cdd1f..8ff7ba48 100644 --- a/webhook_server/tests/test_issue_comment_handler.py +++ b/webhook_server/tests/test_issue_comment_handler.py @@ -40,6 +40,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.issue_url_for_welcome_msg = "welcome-message-url" mock_webhook.build_and_push_container = True mock_webhook.current_pull_request_supported_retest = [TOX_STR, "pre-commit"] + mock_webhook.tox_max_concurrent = 5 return mock_webhook @pytest.fixture diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index b12d233c..eebce620 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -44,6 +44,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.container_repository_username = "test-user" # Always a string mock_webhook.container_repository_password = "test-password" # Always a string # pragma: allowlist secret mock_webhook.token = "test-token" # Always a string + mock_webhook.tox_max_concurrent = 5 return mock_webhook @pytest.fixture From cf9823f616a732594c9d49a92ee300155d69afae Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 12:08:43 +0200 Subject: [PATCH 33/34] fix: set tox check to queued before acquiring semaphore Users now see "queued" status immediately when tox is waiting for a semaphore slot, rather than seeing no status update. Status changes to "in-progress" once semaphore is acquired and tox actually starts. --- webhook_server/libs/handlers/runner_handler.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index 25de2721..d67e0044 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -199,14 +199,21 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) _tox_tests = self.github_webhook.tox.get(pull_request.base.ref, "") + # Set check to queued immediately so users see status while waiting for semaphore self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Setting tox check status to in-progress", + f"Setting tox check status to queued", ) - await self.check_run_handler.set_run_tox_check_in_progress(pull_request=pull_request) + await self.check_run_handler.set_run_tox_check_queued(pull_request=pull_request) # Acquire semaphore to limit concurrent tox runs (prevents disk exhaustion) async with self._tox_semaphore: + # Update status to in-progress once we acquire semaphore and actually start running + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting tox check status to in-progress", + ) + await self.check_run_handler.set_run_tox_check_in_progress(pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Preparing repository checkout for tox execution", From 7e602ab155f651ebbeda80924d5add4892fe946a Mon Sep 17 00:00:00 2001 From: rnetser Date: Thu, 1 Jan 2026 12:38:13 +0200 Subject: [PATCH 34/34] fix: set checks to queued only during retrigger flow Move queued status logic from run_tox to run_retests_from_config. This ensures original PR flow is unchanged while retrigger flow shows all pending checks as "queued" before running batches. --- .../libs/handlers/runner_handler.py | 23 ++++++++++++------- webhook_server/tests/test_push_handler.py | 9 +++++++- 2 files changed, 23 insertions(+), 9 deletions(-) diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index d67e0044..709207af 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -199,16 +199,8 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) _tox_tests = self.github_webhook.tox.get(pull_request.base.ref, "") - # Set check to queued immediately so users see status while waiting for semaphore - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Setting tox check status to queued", - ) - await self.check_run_handler.set_run_tox_check_queued(pull_request=pull_request) - # Acquire semaphore to limit concurrent tox runs (prevents disk exhaustion) async with self._tox_semaphore: - # Update status to in-progress once we acquire semaphore and actually start running self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting tox check status to in-progress", @@ -884,5 +876,20 @@ async def run_retests_from_config(self, pull_request: PullRequest) -> bool: pr_number = pull_request.number self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") + # Set all checks to queued before starting the batch (re-trigger flow only) + self.logger.debug(f"{self.log_prefix} Setting {len(checks_to_run)} checks to queued status") + for check in checks_to_run: + if check == TOX_STR: + await self.check_run_handler.set_run_tox_check_queued(pull_request=pull_request) + elif check == PRE_COMMIT_STR: + await self.check_run_handler.set_run_pre_commit_check_queued(pull_request=pull_request) + elif check == BUILD_CONTAINER_STR: + await self.check_run_handler.set_container_build_queued(pull_request=pull_request) + elif check == PYTHON_MODULE_INSTALL_STR: + await self.check_run_handler.set_python_module_install_queued(pull_request=pull_request) + elif check == CONVENTIONAL_TITLE_STR: + await self.check_run_handler.set_conventional_title_queued(pull_request=pull_request) + + # Now run the actual checks (they will update to in-progress when they start) await self.run_retests(supported_retests=checks_to_run, pull_request=pull_request) return True diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index eebce620..58f4aea8 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -50,7 +50,14 @@ def mock_github_webhook(self) -> Mock: @pytest.fixture def push_handler(self, mock_github_webhook: Mock) -> PushHandler: """Create a PushHandler instance with mocked dependencies.""" - return PushHandler(mock_github_webhook) + handler = PushHandler(mock_github_webhook) + # Mock check_run_handler methods used by run_retests_from_config + handler.runner_handler.check_run_handler.set_run_tox_check_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_run_pre_commit_check_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_container_build_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_python_module_install_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_conventional_title_queued = AsyncMock() + return handler @pytest.mark.asyncio async def test_process_push_webhook_data_with_tag_and_pypi(self, push_handler: PushHandler) -> None: