diff --git a/webhook_server/config/schema.yaml b/webhook_server/config/schema.yaml index c63156aa..9de7d7e9 100644 --- a/webhook_server/config/schema.yaml +++ b/webhook_server/config/schema.yaml @@ -83,6 +83,16 @@ properties: type: boolean description: Create a tracking issue for new pull requests (global default) default: true + retrigger-checks-on-base-push: + oneOf: + - type: string + enum: ["all"] + description: Re-trigger all available CI checks for out-of-date PRs + - type: array + items: + type: string + description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) + description: Re-trigger CI checks for out-of-date PRs when their base branch is updated (triggered by both merged PRs and direct non-tag branch pushes). Value can be "all" (string) to re-trigger all checks, or an array of specific check names. If not configured, defaults to disabled. pr-size-thresholds: type: object @@ -299,6 +309,16 @@ properties: type: boolean description: Create a tracking issue for new pull requests default: true + retrigger-checks-on-base-push: + oneOf: + - type: string + enum: ["all"] + description: Re-trigger all available CI checks for out-of-date PRs + - type: array + items: + type: string + description: Re-trigger specific CI checks (e.g., ["tox", "pre-commit"]) + description: Re-trigger CI checks for out-of-date PRs when their base branch is updated (triggered by both merged PRs and direct non-tag branch pushes). Value can be "all" (string) to re-trigger all checks, or an array of specific check names. If not configured, defaults to disabled. pr-size-thresholds: type: object description: Custom PR size thresholds with label names and colors (repository-specific override) diff --git a/webhook_server/libs/github_api.py b/webhook_server/libs/github_api.py index 41c6493e..d12fd796 100644 --- a/webhook_server/libs/github_api.py +++ b/webhook_server/libs/github_api.py @@ -709,6 +709,9 @@ def _repo_data_from_config(self, repository_config: dict[str, Any]) -> None: ) self.mask_sensitive = self.config.get_value("mask-sensitive-data", return_on_none=True) + self.retrigger_checks_on_base_push: list[str] | str | None = self.config.get_value( + value="retrigger-checks-on-base-push", return_on_none=None, extra_dict=repository_config + ) async def get_pull_request(self, number: int | None = None) -> PullRequest | None: if number: diff --git a/webhook_server/libs/handlers/check_run_handler.py b/webhook_server/libs/handlers/check_run_handler.py index ab189c62..ebabe113 100644 --- a/webhook_server/libs/handlers/check_run_handler.py +++ b/webhook_server/libs/handlers/check_run_handler.py @@ -2,6 +2,7 @@ from typing import TYPE_CHECKING, Any from github.CheckRun import CheckRun +from github.Commit import Commit from github.CommitStatus import CommitStatus from github.PullRequest import PullRequest from github.Repository import Repository @@ -138,37 +139,51 @@ async def set_verify_check_queued(self) -> None: async def set_verify_check_success(self) -> None: return await self.set_check_run_status(check_run=VERIFIED_LABEL_STR, conclusion=SUCCESS_STR) - async def set_run_tox_check_queued(self) -> None: + async def set_run_tox_check_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.tox: self.logger.debug(f"{self.log_prefix} tox is not configured, skipping.") return - return await self.set_check_run_status(check_run=TOX_STR, status=QUEUED_STR) + return await self.set_check_run_status(check_run=TOX_STR, status=QUEUED_STR, pull_request=pull_request) - async def set_run_tox_check_in_progress(self) -> None: - return await self.set_check_run_status(check_run=TOX_STR, status=IN_PROGRESS_STR) + async def set_run_tox_check_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status(check_run=TOX_STR, status=IN_PROGRESS_STR, pull_request=pull_request) - async def set_run_tox_check_failure(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=TOX_STR, conclusion=FAILURE_STR, output=output) + async def set_run_tox_check_failure(self, output: dict[str, Any], pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=TOX_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) - async def set_run_tox_check_success(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=TOX_STR, conclusion=SUCCESS_STR, output=output) + async def set_run_tox_check_success(self, output: dict[str, Any], pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=TOX_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) - async def set_run_pre_commit_check_queued(self) -> None: + async def set_run_pre_commit_check_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.pre_commit: self.logger.debug(f"{self.log_prefix} pre-commit is not configured, skipping.") return - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, status=QUEUED_STR) + return await self.set_check_run_status(check_run=PRE_COMMIT_STR, status=QUEUED_STR, pull_request=pull_request) - async def set_run_pre_commit_check_in_progress(self) -> None: - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR) + async def set_run_pre_commit_check_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_run_pre_commit_check_failure(self, output: dict[str, Any] | None = None) -> None: - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output) + async def set_run_pre_commit_check_failure( + self, output: dict[str, Any] | None = None, pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) - async def set_run_pre_commit_check_success(self, output: dict[str, Any] | None = None) -> None: - return await self.set_check_run_status(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output) + async def set_run_pre_commit_check_success( + self, output: dict[str, Any] | None = None, pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) async def set_merge_check_queued(self, output: dict[str, Any] | None = None) -> None: return await self.set_check_run_status(check_run=CAN_BE_MERGED_STR, status=QUEUED_STR, output=output) @@ -182,53 +197,85 @@ async def set_merge_check_success(self) -> None: async def set_merge_check_failure(self, output: dict[str, Any]) -> None: return await self.set_check_run_status(check_run=CAN_BE_MERGED_STR, conclusion=FAILURE_STR, output=output) - async def set_container_build_queued(self) -> None: + async def set_container_build_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.build_and_push_container: self.logger.debug(f"{self.log_prefix} build_and_push_container is not configured, skipping.") return - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, status=QUEUED_STR) + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, status=QUEUED_STR, pull_request=pull_request + ) - async def set_container_build_in_progress(self) -> None: - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR) + async def set_container_build_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_container_build_success(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output) + async def set_container_build_success( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) - async def set_container_build_failure(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output) + async def set_container_build_failure( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) - async def set_python_module_install_queued(self) -> None: + async def set_python_module_install_queued(self, pull_request: PullRequest | None = None) -> None: if not self.github_webhook.pypi: self.logger.debug(f"{self.log_prefix} pypi is not configured, skipping.") return - return await self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR) + return await self.set_check_run_status( + check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR, pull_request=pull_request + ) - async def set_python_module_install_in_progress(self) -> None: - return await self.set_check_run_status(check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR) + async def set_python_module_install_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_python_module_install_success(self, output: dict[str, Any]) -> None: + async def set_python_module_install_success( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: return await self.set_check_run_status( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request ) - async def set_python_module_install_failure(self, output: dict[str, Any]) -> None: + async def set_python_module_install_failure( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: return await self.set_check_run_status( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request ) - async def set_conventional_title_queued(self) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR) + async def set_conventional_title_queued(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR, pull_request=pull_request + ) - async def set_conventional_title_in_progress(self) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR) + async def set_conventional_title_in_progress(self, pull_request: PullRequest | None = None) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR, pull_request=pull_request + ) - async def set_conventional_title_success(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output) + async def set_conventional_title_success( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output, pull_request=pull_request + ) - async def set_conventional_title_failure(self, output: dict[str, Any]) -> None: - return await self.set_check_run_status(check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output) + async def set_conventional_title_failure( + self, output: dict[str, Any], pull_request: PullRequest | None = None + ) -> None: + return await self.set_check_run_status( + check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output, pull_request=pull_request + ) async def set_cherry_pick_in_progress(self) -> None: return await self.set_check_run_status(check_run=CHERRY_PICKED_LABEL_PREFIX, status=IN_PROGRESS_STR) @@ -249,8 +296,28 @@ async def set_check_run_status( status: str = "", conclusion: str = "", output: dict[str, str] | None = None, + pull_request: PullRequest | None = None, ) -> None: - kwargs: dict[str, Any] = {"name": check_run, "head_sha": self.github_webhook.last_commit.sha} + # Get head_sha from pull_request or fall back to github_webhook.last_commit + if pull_request: + # Use single-pass iteration to find last commit + def get_last_commit_sha() -> str: + last_commit = None + for commit in pull_request.get_commits(): + last_commit = commit + if last_commit is None: + raise ValueError("Pull request has no commits") + return last_commit.sha + + head_sha = await asyncio.to_thread(get_last_commit_sha) + else: + # Fall back to github_webhook.last_commit for backward compatibility + if not hasattr(self.github_webhook, "last_commit") or self.github_webhook.last_commit is None: + self.logger.warning(f"{self.log_prefix} Cannot set check run status: no last_commit available") + return + head_sha = self.github_webhook.last_commit.sha + + kwargs: dict[str, Any] = {"name": check_run, "head_sha": head_sha} if status: kwargs["status"] = status @@ -353,12 +420,43 @@ def get_check_run_text(self, err: str, out: str) -> str: return _output - async def is_check_run_in_progress(self, check_run: str) -> bool: - if self.github_webhook.last_commit: - for run in await asyncio.to_thread(self.github_webhook.last_commit.get_check_runs): - if run.name == check_run and run.status == IN_PROGRESS_STR: - self.logger.debug(f"{self.log_prefix} Check run {check_run} is in progress.") - return True + async def is_check_run_in_progress(self, check_run: str, pull_request: PullRequest | None = None) -> bool: + """Check if a specific check run is in progress. + + Args: + check_run: Name of the check run to check + pull_request: Optional pull request to get last commit from. If provided, + gets last commit from PR. Otherwise, falls back to github_webhook.last_commit + + Returns: + True if check run is in progress, False otherwise + """ + last_commit = None + if pull_request: + # Use single-pass iteration to find last commit - O(1) memory instead of O(N) + def get_last_commit_from_pr() -> Commit | None: + last = None + for commit in pull_request.get_commits(): + last = commit + return last + + last_commit = await asyncio.to_thread(get_last_commit_from_pr) + else: + # last_commit may not exist on github_webhook for push events (optional attribute) + last_commit = self.github_webhook.last_commit if hasattr(self.github_webhook, "last_commit") else None + + if last_commit: + # Optimize PaginatedList iteration with early exit + def find_check_run_in_progress() -> bool: + for run in last_commit.get_check_runs(): + if run.name == check_run and run.status == IN_PROGRESS_STR: + return True + return False + + is_in_progress = await asyncio.to_thread(find_check_run_in_progress) + if is_in_progress: + self.logger.debug(f"{self.log_prefix} Check run {check_run} is in progress.") + return True return False async def required_check_failed_or_no_status( diff --git a/webhook_server/libs/handlers/issue_comment_handler.py b/webhook_server/libs/handlers/issue_comment_handler.py index e0b77dea..afec27cc 100644 --- a/webhook_server/libs/handlers/issue_comment_handler.py +++ b/webhook_server/libs/handlers/issue_comment_handler.py @@ -2,7 +2,7 @@ import asyncio from asyncio import Task -from collections.abc import Callable, Coroutine +from collections.abc import Coroutine from typing import TYPE_CHECKING, Any from github.PullRequest import PullRequest @@ -16,7 +16,6 @@ from webhook_server.utils.constants import ( AUTOMERGE_LABEL_STR, BUILD_AND_PUSH_CONTAINER_STR, - BUILD_CONTAINER_STR, CHERRY_PICK_LABEL_PREFIX, COMMAND_ADD_ALLOWED_USER_STR, COMMAND_ASSIGN_REVIEWER_STR, @@ -25,12 +24,8 @@ COMMAND_CHERRY_PICK_STR, COMMAND_REPROCESS_STR, COMMAND_RETEST_STR, - CONVENTIONAL_TITLE_STR, HOLD_LABEL_STR, - PRE_COMMIT_STR, - PYTHON_MODULE_INSTALL_STR, REACTIONS, - TOX_STR, USER_LABELS_DICT, VERIFIED_LABEL_STR, WIP_STR, @@ -415,14 +410,6 @@ async def process_retest_command( self.logger.debug(f"{self.log_prefix} Target tests for re-test: {_target_tests}") _not_supported_retests: list[str] = [] _supported_retests: list[str] = [] - _retests_to_func_map: dict[str, Callable] = { - TOX_STR: self.runner_handler.run_tox, - PRE_COMMIT_STR: self.runner_handler.run_pre_commit, - BUILD_CONTAINER_STR: self.runner_handler.run_build_container, - PYTHON_MODULE_INSTALL_STR: self.runner_handler.run_install_python_module, - CONVENTIONAL_TITLE_STR: self.runner_handler.run_conventional_title_check, - } - self.logger.debug(f"{self.log_prefix} Retest map is {_retests_to_func_map}") if not _target_tests: msg = "No test defined to retest" @@ -459,17 +446,8 @@ async def process_retest_command( self.logger.debug(error_msg) await asyncio.to_thread(pull_request.create_issue_comment, msg) - if _supported_retests: - tasks: list[Coroutine[Any, Any, Any] | Task[Any]] = [] - for _test in _supported_retests: - self.logger.debug(f"{self.log_prefix} running retest {_test}") - task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) - tasks.append(task) - - results = await asyncio.gather(*tasks, return_exceptions=True) - for result in results: - if isinstance(result, Exception): - self.logger.error(f"{self.log_prefix} Async task failed: {result}") + # Run all supported retests using the shared runner handler method + await self.runner_handler.run_retests(supported_retests=_supported_retests, pull_request=pull_request) if automerge: await self.labels_handler._add_label(pull_request=pull_request, label=AUTOMERGE_LABEL_STR) diff --git a/webhook_server/libs/handlers/pull_request_handler.py b/webhook_server/libs/handlers/pull_request_handler.py index 126b4671..c5200d9a 100644 --- a/webhook_server/libs/handlers/pull_request_handler.py +++ b/webhook_server/libs/handlers/pull_request_handler.py @@ -164,7 +164,7 @@ async def process_pull_request_webhook_data(self, pull_request: PullRequest) -> pull_request=pull_request, ) - await self.label_all_opened_pull_requests_merge_state_after_merged() + await self.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() # Log completion - task_status reflects the result of our action self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('pr_handler', 'pr_management', 'completed')} " @@ -407,21 +407,50 @@ def _prepare_retest_welcome_comment(self) -> str: return " * No retest actions are configured for this repository" if not retest_msg else retest_msg - async def label_all_opened_pull_requests_merge_state_after_merged(self) -> None: - """ - Labels pull requests based on their mergeable state. + async def label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged(self) -> None: + """Labels pull requests based on their mergeable state and optionally re-triggers checks. + + Primary action: + Adds/removes labels based on merge state: + - 'needs rebase' label when merge_state is 'behind' + - 'has conflicts' label when merge_state is 'dirty' + + Secondary action (if retrigger-checks-on-base-push is enabled): + Re-triggers CI check suites for out-of-date PRs (merge_state in 'behind' or 'blocked'). + This ensures CI checks run against the updated base branch. - If the mergeable state is 'behind', the 'needs rebase' label is added. - If the mergeable state is 'dirty', the 'has conflicts' label is added. + Note: + Waits 30 seconds before processing to allow GitHub's merge state calculation to complete. """ time_sleep = 30 self.logger.info(f"{self.log_prefix} Sleep for {time_sleep} seconds before getting all opened PRs") await asyncio.sleep(time_sleep) pulls = await asyncio.to_thread(lambda: list(self.repository.get_pulls(state="open"))) - for pull_request in pulls: - self.logger.info(f"{self.log_prefix} check label pull request after merge") - await self.label_pull_request_by_merge_state(pull_request=pull_request) + + # Move config lookup outside the loop (static value) + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + + # Process all PRs in parallel with error isolation + async def process_single_pr(pull_request: PullRequest) -> None: + """Process a single PR with error handling.""" + try: + self.logger.info(f"{self.log_prefix} check label pull request after merge") + merge_state = await self.label_pull_request_by_merge_state(pull_request=pull_request) + + # If retrigger is enabled and PR is behind or blocked, retrigger checks + if retrigger_config and merge_state in ("behind", "blocked"): + await self._retrigger_check_suites_for_pr(pull_request=pull_request) + except Exception: + self.logger.exception( + f"{self.log_prefix} Failed to process PR #{pull_request.number} during label/retrigger operation" + ) + + # Process all PRs concurrently + await asyncio.gather( + *[process_single_pr(pr) for pr in pulls], + return_exceptions=True, + ) async def delete_remote_tag_for_merged_or_closed_pr(self, pull_request: PullRequest) -> None: self.logger.step( # type: ignore[attr-defined] @@ -864,11 +893,16 @@ async def remove_labels_when_pull_request_sync(self, pull_request: PullRequest) if isinstance(result, Exception): self.logger.error(f"{self.log_prefix} Async task failed: {result}") - async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> None: + async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> str: + """Label PR based on merge state (needs-rebase, has-conflicts). + + Returns: + The mergeable_state value for further processing. + """ merge_state = await asyncio.to_thread(lambda: pull_request.mergeable_state) self.logger.debug(f"{self.log_prefix} Mergeable state is {merge_state}") if merge_state == "unknown": - return + return merge_state if merge_state == "behind": await self.labels_handler._add_label(pull_request=pull_request, label=NEEDS_REBASE_LABEL_STR) @@ -880,6 +914,23 @@ async def label_pull_request_by_merge_state(self, pull_request: PullRequest) -> else: await self.labels_handler._remove_label(pull_request=pull_request, label=HAS_CONFLICTS_LABEL_STR) + return merge_state + + async def _retrigger_check_suites_for_pr(self, pull_request: PullRequest) -> None: + """Re-trigger configured checks for a PR when base branch is updated.""" + pr_number = pull_request.number + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " + f"Re-triggering checks for out-of-date PR #{pr_number}", + ) + + try: + # Run configured checks using the shared runner handler method + await self.runner_handler.run_retests_from_config(pull_request=pull_request) + except Exception: + self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") + async def _process_verified_for_update_or_new_pull_request(self, pull_request: PullRequest) -> None: if not self.github_webhook.verified_job: return diff --git a/webhook_server/libs/handlers/push_handler.py b/webhook_server/libs/handlers/push_handler.py index 8ff09873..48c2f491 100644 --- a/webhook_server/libs/handlers/push_handler.py +++ b/webhook_server/libs/handlers/push_handler.py @@ -1,7 +1,9 @@ import asyncio import re +from datetime import UTC, datetime from typing import TYPE_CHECKING +from github.PullRequest import PullRequest from github.Repository import Repository from webhook_server.libs.handlers.check_run_handler import CheckRunHandler @@ -69,11 +71,129 @@ async def process_push_webhook_data(self) -> None: ) self.logger.exception(f"{self.log_prefix} Container build and push failed: {ex}") else: + # Non-tag push - check if this is a push to a branch that could be a base for PRs self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'processing')} " - f"Non-tag push detected, skipping processing", + f"Processing branch push event", ) + # Check if retrigger is enabled (not None or empty list) + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + if not retrigger_config: + self.logger.debug(f"{self.log_prefix} retrigger-checks-on-base-push not configured, skipping") + else: + # Extract branch name from ref (refs/heads/main -> main) + branch_match = re.search(r"^refs/heads/(.+)$", self.hook_data["ref"]) + if branch_match: + branch_name = branch_match.group(1) + self.logger.info(f"{self.log_prefix} Branch push detected: {branch_name}") + await self._retrigger_checks_for_prs_targeting_branch(branch_name=branch_name) + else: + self.logger.debug( + f"{self.log_prefix} Could not extract branch name from ref: {self.hook_data['ref']}" + ) + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('push_processing', 'webhook_event', 'completed')} " + f"Branch push processing completed", + ) + + async def _retrigger_checks_for_prs_targeting_branch(self, branch_name: str) -> None: + """Re-trigger CI checks for PRs targeting the updated branch that are behind or blocked. + + Args: + branch_name: The branch that was pushed to (e.g., 'main') + """ + time_sleep = 30 + self.logger.info(f"{self.log_prefix} Waiting {time_sleep}s for GitHub to update merge states") + await asyncio.sleep(time_sleep) + + # Get all open PRs targeting this branch + def get_pulls() -> list[PullRequest]: + return list(self.repository.get_pulls(state="open", base=branch_name)) + + pulls = await asyncio.to_thread(get_pulls) + + if not pulls: + self.logger.info(f"{self.log_prefix} No open PRs targeting branch {branch_name}") + return + + self.logger.info(f"{self.log_prefix} Found {len(pulls)} open PRs targeting {branch_name}") + + for pull_request in pulls: + # pr.number is in-memory data from get_pulls() result - no wrapping needed + pr_number = pull_request.number + # mergeable_state triggers API call - must wrap to avoid blocking + + # Use default parameter to capture current iteration's pull_request (closure pattern) + # This ensures each lambda captures the correct PR object, not the loop variable + def get_merge_state(pr: PullRequest = pull_request) -> str | None: + return pr.mergeable_state + + # Handle None/unknown merge states with retry + max_retries = 5 + retry_delay = 10 # seconds + merge_state: str | None = None + + for attempt in range(1, max_retries + 1): + merge_state = await asyncio.to_thread(get_merge_state) + self.logger.debug(f"{self.log_prefix} PR #{pr_number} merge state: {merge_state}") + + if merge_state not in (None, "unknown"): + break + + if attempt < max_retries: + self.logger.info( + f"{self.log_prefix} PR #{pr_number} merge state is '{merge_state}' - " + f"waiting {retry_delay}s for GitHub to calculate (attempt {attempt}/{max_retries})" + ) + await asyncio.sleep(retry_delay) + else: + # Loop completed without break - merge_state is still None or "unknown" + self.logger.warning( + f"{self.log_prefix} PR #{pr_number} merge state still '{merge_state}' after " + f"{max_retries} attempts, skipping" + ) + continue + + # Only re-trigger for PRs that are behind or blocked + if merge_state in ("behind", "blocked"): + # Skip if PR was updated very recently (likely already has fresh checks) + def get_updated_at(pr: PullRequest = pull_request) -> datetime: + return pr.updated_at + + pr_updated_at = await asyncio.to_thread(get_updated_at) + time_since_update = (datetime.now(UTC) - pr_updated_at).total_seconds() + if time_since_update < 60: # Skip if updated within last minute + self.logger.debug( + f"{self.log_prefix} PR #{pr_number} was updated {time_since_update:.0f}s ago, " + "skipping retrigger to avoid duplicate work" + ) + continue + + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('retrigger_checks', 'push_processing', 'processing')} " + f"Re-triggering checks for out-of-date PR #{pr_number} (state: {merge_state})", + ) + + try: + checks_triggered = await self.runner_handler.run_retests_from_config(pull_request=pull_request) + if checks_triggered: + self.logger.info(f"{self.log_prefix} Successfully re-triggered checks for PR #{pr_number}") + else: + self.logger.debug(f"{self.log_prefix} No checks triggered for PR #{pr_number}") + except Exception as e: + # Re-raise CancelledError immediately to allow cooperative cancellation + if isinstance(e, asyncio.CancelledError): + raise + # Log all other exceptions and continue processing other PRs + self.logger.exception(f"{self.log_prefix} Failed to re-trigger checks for PR #{pr_number}") + # Continue processing other PRs + else: + self.logger.debug( + f"{self.log_prefix} PR #{pr_number} merge state is '{merge_state}', not re-triggering" + ) + async def upload_to_pypi(self, tag_name: str) -> None: async def _issue_on_error(_error: str) -> None: # Sanitize title: replace newlines, remove backticks, strip whitespace, truncate diff --git a/webhook_server/libs/handlers/runner_handler.py b/webhook_server/libs/handlers/runner_handler.py index d81c2d80..709207af 100644 --- a/webhook_server/libs/handlers/runner_handler.py +++ b/webhook_server/libs/handlers/runner_handler.py @@ -2,8 +2,9 @@ import contextlib import re import shutil +from asyncio import Task from collections.abc import AsyncGenerator -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Protocol import shortuuid from github.Branch import Branch @@ -29,6 +30,12 @@ from webhook_server.libs.github_api import GithubWebhook +class RetestFunction(Protocol): + """Protocol for retest runner functions.""" + + async def __call__(self, pull_request: PullRequest) -> None: ... + + class RunnerHandler: def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersFileHandler | None = None): self.github_webhook = github_webhook @@ -42,6 +49,13 @@ def __init__(self, github_webhook: "GithubWebhook", owners_file_handler: OwnersF github_webhook=self.github_webhook, owners_file_handler=self.owners_file_handler ) + # Instance-level semaphore to limit concurrent tox runs (prevents disk exhaustion) + # Each tox run creates ~500MB-2GB of virtual environments + # Configurable via 'tox-max-concurrent' in config (default: 5) + tox_limit = getattr(self.github_webhook, "tox_max_concurrent", 5) + self._tox_semaphore: asyncio.Semaphore = asyncio.Semaphore(tox_limit) + self.logger.debug(f"{self.log_prefix} Tox max concurrent runs: {tox_limit}") + @contextlib.asynccontextmanager async def _checkout_worktree( self, @@ -177,7 +191,7 @@ async def run_tox(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'started')} Starting tox tests execution" ) - if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR): + if await self.check_run_handler.is_check_run_in_progress(check_run=TOX_STR, pull_request=pull_request): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {TOX_STR}.") python_ver = ( @@ -185,60 +199,67 @@ async def run_tox(self, pull_request: PullRequest) -> None: ) _tox_tests = self.github_webhook.tox.get(pull_request.base.ref, "") - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Setting tox check status to in-progress", - ) - await self.check_run_handler.set_run_tox_check_in_progress() - - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " - f"Preparing repository checkout for tox execution", - ) - async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): - # Build tox command with worktree path - cmd = f"uvx {python_ver} {TOX_STR} --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" - if _tox_tests and _tox_tests != "all": - tests = _tox_tests.replace(" ", "") - cmd += f" -e {tests}" - self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") - - output: dict[str, Any] = { - "title": "Tox", - "summary": "", - "text": None, - } - if not success: - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " - f"Repository preparation failed for tox", - ) - self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") - output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_run_tox_check_failure(output=output) - + # Acquire semaphore to limit concurrent tox runs (prevents disk exhaustion) + async with self._tox_semaphore: self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Setting tox check status to in-progress", ) - rc, out, err = await run_command( - command=cmd, - log_prefix=self.log_prefix, - mask_sensitive=self.github_webhook.mask_sensitive, + await self.check_run_handler.set_run_tox_check_in_progress(pull_request=pull_request) + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " + f"Preparing repository checkout for tox execution", ) + async with self._checkout_worktree(pull_request=pull_request) as (success, worktree_path, out, err): + # Build tox command with worktree path + cmd = f"uvx {python_ver} tox --workdir {worktree_path} --root {worktree_path} -c {worktree_path}" + if _tox_tests and _tox_tests != "all": + tests = _tox_tests.replace(" ", "") + cmd += f" -e {tests}" + self.logger.debug(f"{self.log_prefix} Tox command to run: {cmd}") + + output: dict[str, Any] = { + "title": "Tox", + "summary": "", + "text": None, + } + if not success: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} " + f"Repository preparation failed for tox", + ) + self.logger.error(f"{self.log_prefix} Repository preparation failed for tox") + output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) + return await self.check_run_handler.set_run_tox_check_failure( + output=output, pull_request=pull_request + ) - output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) - - if rc: self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " - f"Tox tests completed successfully", + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} Executing tox command" ) - return await self.check_run_handler.set_run_tox_check_success(output=output) - else: - self.logger.step( # type: ignore[attr-defined] - f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed" + rc, out, err = await run_command( + command=cmd, + log_prefix=self.log_prefix, + mask_sensitive=self.github_webhook.mask_sensitive, ) - return await self.check_run_handler.set_run_tox_check_failure(output=output) + + output["text"] = self.check_run_handler.get_check_run_text(err=err, out=out) + + if rc: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " + f"Tox tests completed successfully", + ) + return await self.check_run_handler.set_run_tox_check_success( + output=output, pull_request=pull_request + ) + else: + self.logger.step( # type: ignore[attr-defined] + f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Tox tests failed" + ) + return await self.check_run_handler.set_run_tox_check_failure( + output=output, pull_request=pull_request + ) async def run_pre_commit(self, pull_request: PullRequest) -> None: if not self.github_webhook.pre_commit: @@ -250,14 +271,14 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: f"Starting pre-commit checks execution", ) - if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR): + if await self.check_run_handler.is_check_run_in_progress(check_run=PRE_COMMIT_STR, pull_request=pull_request): self.logger.debug(f"{self.log_prefix} Check run is in progress, re-running {PRE_COMMIT_STR}.") self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting pre-commit check status to in-progress", ) - await self.check_run_handler.set_run_pre_commit_check_in_progress() + await self.check_run_handler.set_run_pre_commit_check_in_progress(pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -278,7 +299,9 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: ) self.logger.error(f"{self.log_prefix} Repository preparation failed for pre-commit") output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) + return await self.check_run_handler.set_run_pre_commit_check_failure( + output=output, pull_request=pull_request + ) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -297,12 +320,16 @@ async def run_pre_commit(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Pre-commit checks completed successfully", ) - return await self.check_run_handler.set_run_pre_commit_check_success(output=output) + return await self.check_run_handler.set_run_pre_commit_check_success( + output=output, pull_request=pull_request + ) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Pre-commit checks failed" ) - return await self.check_run_handler.set_run_pre_commit_check_failure(output=output) + return await self.check_run_handler.set_run_pre_commit_check_failure( + output=output, pull_request=pull_request + ) async def run_build_container( self, @@ -332,7 +359,12 @@ async def run_build_container( return if pull_request and set_check: - if await self.check_run_handler.is_check_run_in_progress(check_run=BUILD_CONTAINER_STR) and not is_merged: + if ( + await self.check_run_handler.is_check_run_in_progress( + check_run=BUILD_CONTAINER_STR, pull_request=pull_request + ) + and not is_merged + ): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {BUILD_CONTAINER_STR}.") self.logger.step( # type: ignore[attr-defined] @@ -340,7 +372,7 @@ async def run_build_container( f"Setting container build check status to in-progress", ) if set_check: - await self.check_run_handler.set_container_build_in_progress() + await self.check_run_handler.set_container_build_in_progress(pull_request=pull_request) _container_repository_and_tag = self.github_webhook.container_repository_and_tag( pull_request=pull_request, is_merged=is_merged, tag=tag @@ -388,7 +420,7 @@ async def run_build_container( ) output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) if pull_request and set_check: - await self.check_run_handler.set_container_build_failure(output=output) + await self.check_run_handler.set_container_build_failure(output=output, pull_request=pull_request) return self.logger.step( # type: ignore[attr-defined] @@ -407,14 +439,18 @@ async def run_build_container( ) self.logger.info(f"{self.log_prefix} Done building {_container_repository_and_tag}") if pull_request and set_check: - return await self.check_run_handler.set_container_build_success(output=output) + return await self.check_run_handler.set_container_build_success( + output=output, pull_request=pull_request + ) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'failed')} Container build failed" ) self.logger.error(f"{self.log_prefix} Failed to build {_container_repository_and_tag}") if pull_request and set_check: - return await self.check_run_handler.set_container_build_failure(output=output) + return await self.check_run_handler.set_container_build_failure( + output=output, pull_request=pull_request + ) if push and build_rc: self.logger.step( # type: ignore[attr-defined] @@ -488,7 +524,9 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"Starting Python module installation" ) - if await self.check_run_handler.is_check_run_in_progress(check_run=PYTHON_MODULE_INSTALL_STR): + if await self.check_run_handler.is_check_run_in_progress( + check_run=PYTHON_MODULE_INSTALL_STR, pull_request=pull_request + ): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {PYTHON_MODULE_INSTALL_STR}.") self.logger.info(f"{self.log_prefix} Installing python module") @@ -496,7 +534,7 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting Python module install check status to in-progress", ) - await self.check_run_handler.set_python_module_install_in_progress() + await self.check_run_handler.set_python_module_install_in_progress(pull_request=pull_request) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Preparing repository checkout for Python module installation", @@ -513,7 +551,9 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"Repository preparation failed for Python module installation", ) output["text"] = self.check_run_handler.get_check_run_text(out=out, err=err) - return await self.check_run_handler.set_python_module_install_failure(output=output) + return await self.check_run_handler.set_python_module_install_failure( + output=output, pull_request=pull_request + ) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " @@ -532,14 +572,18 @@ async def run_install_python_module(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Python module installation completed successfully", ) - return await self.check_run_handler.set_python_module_install_success(output=output) + return await self.check_run_handler.set_python_module_install_success( + output=output, pull_request=pull_request + ) self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} " f"{format_task_fields('runner', 'ci_check', 'failed')} " f"Python module installation failed" ) - return await self.check_run_handler.set_python_module_install_failure(output=output) + return await self.check_run_handler.set_python_module_install_failure( + output=output, pull_request=pull_request + ) async def run_conventional_title_check(self, pull_request: PullRequest) -> None: if not self.github_webhook.conventional_title: @@ -559,14 +603,16 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: ), } - if await self.check_run_handler.is_check_run_in_progress(check_run=CONVENTIONAL_TITLE_STR): + if await self.check_run_handler.is_check_run_in_progress( + check_run=CONVENTIONAL_TITLE_STR, pull_request=pull_request + ): self.logger.info(f"{self.log_prefix} Check run is in progress, re-running {CONVENTIONAL_TITLE_STR}.") self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'processing')} " f"Setting conventional title check status to in-progress", ) - await self.check_run_handler.set_conventional_title_in_progress() + await self.check_run_handler.set_conventional_title_in_progress(pull_request=pull_request) allowed_names = [name.strip() for name in self.github_webhook.conventional_title.split(",") if name.strip()] title = pull_request.title @@ -576,7 +622,7 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: f"{self.log_prefix} {format_task_fields('runner', 'ci_check', 'completed')} " f"Conventional title check completed successfully", ) - await self.check_run_handler.set_conventional_title_success(output=output) + await self.check_run_handler.set_conventional_title_success(output=output, pull_request=pull_request) else: self.logger.step( # type: ignore[attr-defined] f"{self.log_prefix} " @@ -618,7 +664,7 @@ async def run_conventional_title_check(self, pull_request: PullRequest) -> None: **Resources:** - [Conventional Commits v1.0.0 Specification](https://www.conventionalcommits.org/en/v1.0.0/) """ - await self.check_run_handler.set_conventional_title_failure(output=output) + await self.check_run_handler.set_conventional_title_failure(output=output, pull_request=pull_request) async def is_branch_exists(self, branch: str) -> Branch: return await asyncio.to_thread(self.repository.get_branch, branch) @@ -742,3 +788,108 @@ async def cherry_pick(self, pull_request: PullRequest, target_branch: str, revie await asyncio.to_thread( pull_request.create_issue_comment, f"Cherry-picked PR {pull_request.title} into {target_branch}" ) + + async def run_retests(self, supported_retests: list[str], pull_request: PullRequest) -> None: + """Run the specified retests for a pull request. + + Args: + supported_retests: List of test names to run (e.g., ['tox', 'pre-commit']) + pull_request: The PullRequest object to run tests for + + Note: + Uses asyncio.gather with return_exceptions=True to continue processing + even if some tasks fail. Failed tasks are logged for debugging. + """ + if not supported_retests: + self.logger.debug(f"{self.log_prefix} No retests to run") + return + + # Map check names to runner functions + _retests_to_func_map: dict[str, RetestFunction] = { + TOX_STR: self.run_tox, + PRE_COMMIT_STR: self.run_pre_commit, + BUILD_CONTAINER_STR: self.run_build_container, + PYTHON_MODULE_INSTALL_STR: self.run_install_python_module, + CONVENTIONAL_TITLE_STR: self.run_conventional_title_check, + } + + tasks: list[Task[None]] = [] + task_names: list[str] = [] # Track names parallel to tasks + for _test in supported_retests: + if _test not in _retests_to_func_map: + self.logger.error(f"{self.log_prefix} Unknown retest type: {_test}") + continue + self.logger.debug(f"{self.log_prefix} running retest {_test}") + task = asyncio.create_task(_retests_to_func_map[_test](pull_request=pull_request)) + tasks.append(task) + task_names.append(_test) # Track name at same index as task + + results = await asyncio.gather(*tasks, return_exceptions=True) + # Log any task failures for debugging + for i, result in enumerate(results): + if isinstance(result, asyncio.CancelledError): + # Re-raise CancelledError to propagate cancellation + raise result + if isinstance(result, Exception): + test_name = task_names[i] + self.logger.error(f"{self.log_prefix} Retest task '{test_name}' failed: {result}") + + async def run_retests_from_config(self, pull_request: PullRequest) -> bool: + """Run retests based on retrigger-checks-on-base-push configuration. + + Determines which checks to run based on the configuration and available checks, + then calls run_retests() to execute them. + + Args: + pull_request: The pull request to run checks on + + Returns: + True if checks were triggered, False if skipped (no config, no available checks, etc.) + """ + available_checks = self.github_webhook.current_pull_request_supported_retest + + if not available_checks: + self.logger.debug(f"{self.log_prefix} No checks configured for this repository") + return False + + retrigger_config = self.github_webhook.retrigger_checks_on_base_push + + # None is the expected "disabled" state - return silently + if retrigger_config is None: + return False + + if retrigger_config == "all": + checks_to_run = available_checks + elif isinstance(retrigger_config, list): + # Filter to only configured checks that are available + checks_to_run = [check for check in retrigger_config if check in available_checks] + if not checks_to_run: + self.logger.warning( + f"{self.log_prefix} None of the configured retrigger checks {retrigger_config} " + f"are available. Available: {available_checks}" + ) + return False + else: + self.logger.warning(f"{self.log_prefix} Invalid retrigger config: {retrigger_config}") + return False + + pr_number = pull_request.number + self.logger.info(f"{self.log_prefix} Re-triggering checks for PR #{pr_number}: {checks_to_run}") + + # Set all checks to queued before starting the batch (re-trigger flow only) + self.logger.debug(f"{self.log_prefix} Setting {len(checks_to_run)} checks to queued status") + for check in checks_to_run: + if check == TOX_STR: + await self.check_run_handler.set_run_tox_check_queued(pull_request=pull_request) + elif check == PRE_COMMIT_STR: + await self.check_run_handler.set_run_pre_commit_check_queued(pull_request=pull_request) + elif check == BUILD_CONTAINER_STR: + await self.check_run_handler.set_container_build_queued(pull_request=pull_request) + elif check == PYTHON_MODULE_INSTALL_STR: + await self.check_run_handler.set_python_module_install_queued(pull_request=pull_request) + elif check == CONVENTIONAL_TITLE_STR: + await self.check_run_handler.set_conventional_title_queued(pull_request=pull_request) + + # Now run the actual checks (they will update to in-progress when they start) + await self.run_retests(supported_retests=checks_to_run, pull_request=pull_request) + return True diff --git a/webhook_server/tests/test_check_run_handler.py b/webhook_server/tests/test_check_run_handler.py index 07d33288..aa0bb353 100644 --- a/webhook_server/tests/test_check_run_handler.py +++ b/webhook_server/tests/test_check_run_handler.py @@ -138,7 +138,7 @@ async def test_set_run_tox_check_queued_enabled(self, check_run_handler: CheckRu with patch.object(check_run_handler.github_webhook, "tox", True): with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_queued() - mock_set_status.assert_called_once_with(check_run=TOX_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with(check_run=TOX_STR, status=QUEUED_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_tox_check_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -153,7 +153,7 @@ async def test_set_run_tox_check_in_progress(self, check_run_handler: CheckRunHa """Test setting tox check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_in_progress() - mock_set_status.assert_called_once_with(check_run=TOX_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with(check_run=TOX_STR, status=IN_PROGRESS_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_tox_check_failure(self, check_run_handler: CheckRunHandler) -> None: @@ -161,7 +161,9 @@ async def test_set_run_tox_check_failure(self, check_run_handler: CheckRunHandle output = {"title": "Test failed", "summary": "Test summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_failure(output) - mock_set_status.assert_called_once_with(check_run=TOX_STR, conclusion=FAILURE_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=TOX_STR, conclusion=FAILURE_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_tox_check_success(self, check_run_handler: CheckRunHandler) -> None: @@ -169,7 +171,9 @@ async def test_set_run_tox_check_success(self, check_run_handler: CheckRunHandle output = {"title": "Test passed", "summary": "Test summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_tox_check_success(output) - mock_set_status.assert_called_once_with(check_run=TOX_STR, conclusion=SUCCESS_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=TOX_STR, conclusion=SUCCESS_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_queued_enabled(self, check_run_handler: CheckRunHandler) -> None: @@ -177,7 +181,7 @@ async def test_set_run_pre_commit_check_queued_enabled(self, check_run_handler: check_run_handler.github_webhook.pre_commit = True with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_queued() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=QUEUED_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_pre_commit_check_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -192,7 +196,7 @@ async def test_set_run_pre_commit_check_in_progress(self, check_run_handler: Che """Test setting pre-commit check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_in_progress() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, status=IN_PROGRESS_STR, pull_request=None) @pytest.mark.asyncio async def test_set_run_pre_commit_check_failure(self, check_run_handler: CheckRunHandler) -> None: @@ -200,14 +204,18 @@ async def test_set_run_pre_commit_check_failure(self, check_run_handler: CheckRu output = {"title": "Pre-commit failed", "summary": "Pre-commit summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_failure(output) - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_failure_no_output(self, check_run_handler: CheckRunHandler) -> None: """Test setting pre-commit check to failure status without output.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_failure() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=None) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=FAILURE_STR, output=None, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_success(self, check_run_handler: CheckRunHandler) -> None: @@ -215,14 +223,18 @@ async def test_set_run_pre_commit_check_success(self, check_run_handler: CheckRu output = {"title": "Pre-commit passed", "summary": "Pre-commit summary"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_success(output) - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=output, pull_request=None + ) @pytest.mark.asyncio async def test_set_run_pre_commit_check_success_no_output(self, check_run_handler: CheckRunHandler) -> None: """Test setting pre-commit check to success status without output.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_run_pre_commit_check_success() - mock_set_status.assert_called_once_with(check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=None) + mock_set_status.assert_called_once_with( + check_run=PRE_COMMIT_STR, conclusion=SUCCESS_STR, output=None, pull_request=None + ) @pytest.mark.asyncio async def test_set_merge_check_queued(self, check_run_handler: CheckRunHandler) -> None: @@ -267,7 +279,9 @@ async def test_set_container_build_queued_enabled(self, check_run_handler: Check with patch.object(check_run_handler.github_webhook, "build_and_push_container", True): with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_queued() - mock_set_status.assert_called_once_with(check_run=BUILD_CONTAINER_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with( + check_run=BUILD_CONTAINER_STR, status=QUEUED_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_container_build_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -282,7 +296,9 @@ async def test_set_container_build_in_progress(self, check_run_handler: CheckRun """Test setting container build check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_in_progress() - mock_set_status.assert_called_once_with(check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with( + check_run=BUILD_CONTAINER_STR, status=IN_PROGRESS_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_container_build_success(self, check_run_handler: CheckRunHandler) -> None: @@ -291,7 +307,7 @@ async def test_set_container_build_success(self, check_run_handler: CheckRunHand with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_success(output) mock_set_status.assert_called_once_with( - check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output + check_run=BUILD_CONTAINER_STR, conclusion=SUCCESS_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -301,7 +317,7 @@ async def test_set_container_build_failure(self, check_run_handler: CheckRunHand with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_container_build_failure(output) mock_set_status.assert_called_once_with( - check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output + check_run=BUILD_CONTAINER_STR, conclusion=FAILURE_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -310,7 +326,9 @@ async def test_set_python_module_install_queued_enabled(self, check_run_handler: check_run_handler.github_webhook.pypi = {"token": "test"} with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_queued() - mock_set_status.assert_called_once_with(check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with( + check_run=PYTHON_MODULE_INSTALL_STR, status=QUEUED_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_python_module_install_queued_disabled(self, check_run_handler: CheckRunHandler) -> None: @@ -325,7 +343,9 @@ async def test_set_python_module_install_in_progress(self, check_run_handler: Ch """Test setting python module install check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_in_progress() - mock_set_status.assert_called_once_with(check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with( + check_run=PYTHON_MODULE_INSTALL_STR, status=IN_PROGRESS_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_python_module_install_success(self, check_run_handler: CheckRunHandler) -> None: @@ -334,7 +354,7 @@ async def test_set_python_module_install_success(self, check_run_handler: CheckR with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_success(output) mock_set_status.assert_called_once_with( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=SUCCESS_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -344,7 +364,7 @@ async def test_set_python_module_install_failure(self, check_run_handler: CheckR with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_python_module_install_failure(output) mock_set_status.assert_called_once_with( - check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output + check_run=PYTHON_MODULE_INSTALL_STR, conclusion=FAILURE_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -352,14 +372,18 @@ async def test_set_conventional_title_queued(self, check_run_handler: CheckRunHa """Test setting conventional title check to queued status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_queued() - mock_set_status.assert_called_once_with(check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR) + mock_set_status.assert_called_once_with( + check_run=CONVENTIONAL_TITLE_STR, status=QUEUED_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_conventional_title_in_progress(self, check_run_handler: CheckRunHandler) -> None: """Test setting conventional title check to in progress status.""" with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_in_progress() - mock_set_status.assert_called_once_with(check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR) + mock_set_status.assert_called_once_with( + check_run=CONVENTIONAL_TITLE_STR, status=IN_PROGRESS_STR, pull_request=None + ) @pytest.mark.asyncio async def test_set_conventional_title_success(self, check_run_handler: CheckRunHandler) -> None: @@ -368,7 +392,7 @@ async def test_set_conventional_title_success(self, check_run_handler: CheckRunH with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_success(output) mock_set_status.assert_called_once_with( - check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output + check_run=CONVENTIONAL_TITLE_STR, conclusion=SUCCESS_STR, output=output, pull_request=None ) @pytest.mark.asyncio @@ -378,7 +402,7 @@ async def test_set_conventional_title_failure(self, check_run_handler: CheckRunH with patch.object(check_run_handler, "set_check_run_status") as mock_set_status: await check_run_handler.set_conventional_title_failure(output) mock_set_status.assert_called_once_with( - check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output + check_run=CONVENTIONAL_TITLE_STR, conclusion=FAILURE_STR, output=output, pull_request=None ) @pytest.mark.asyncio diff --git a/webhook_server/tests/test_issue_comment_handler.py b/webhook_server/tests/test_issue_comment_handler.py index 183cdd1f..8ff7ba48 100644 --- a/webhook_server/tests/test_issue_comment_handler.py +++ b/webhook_server/tests/test_issue_comment_handler.py @@ -40,6 +40,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.issue_url_for_welcome_msg = "welcome-message-url" mock_webhook.build_and_push_container = True mock_webhook.current_pull_request_supported_retest = [TOX_STR, "pre-commit"] + mock_webhook.tox_max_concurrent = 5 return mock_webhook @pytest.fixture diff --git a/webhook_server/tests/test_pull_request_handler.py b/webhook_server/tests/test_pull_request_handler.py index 93e1a3bd..55e8bfe0 100644 --- a/webhook_server/tests/test_pull_request_handler.py +++ b/webhook_server/tests/test_pull_request_handler.py @@ -1,4 +1,6 @@ import asyncio +from collections.abc import Callable +from typing import Any, TypeVar from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -18,11 +20,21 @@ HAS_CONFLICTS_LABEL_STR, LGTM_BY_LABEL_PREFIX, NEEDS_REBASE_LABEL_STR, + PRE_COMMIT_STR, TOX_STR, VERIFIED_LABEL_STR, WIP_STR, ) +T = TypeVar("T") + + +# Async shim for mocking asyncio.to_thread in tests +# This allows us to run sync functions in tests while preserving async/await semantics +async def _sync_to_thread(func: Callable[..., T], *args: Any, **kwargs: Any) -> T: # noqa: UP047 + """Mock implementation of asyncio.to_thread that runs synchronously but returns awaitable.""" + return func(*args, **kwargs) + class _AwaitableValue: def __init__(self, return_value: dict | None = None) -> None: @@ -280,7 +292,8 @@ async def test_process_pull_request_webhook_data_closed_action_merged( pull_request_handler.runner_handler, "run_build_container", new_callable=AsyncMock ) as mock_build: with patch.object( - pull_request_handler, "label_all_opened_pull_requests_merge_state_after_merged" + pull_request_handler, + "label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged", ) as mock_label_all: await pull_request_handler.process_pull_request_webhook_data(mock_pull_request) mock_close_issue.assert_called_once_with( @@ -401,21 +414,108 @@ def test_prepare_retest_welcome_comment(self, pull_request_handler: PullRequestH assert "pre-commit" in result @pytest.mark.asyncio - async def test_label_all_opened_pull_requests_merge_state_after_merged( + async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged( self, pull_request_handler: PullRequestHandler ) -> None: - """Test labeling all opened pull requests merge state after merged.""" + """Test labeling all opened pull requests merge state after merged with retrigger not configured.""" mock_pr1 = Mock() mock_pr2 = Mock() mock_pr1.number = 1 mock_pr2.number = 2 + mock_pr1.mergeable_state = "clean" + mock_pr2.mergeable_state = "clean" + + pull_request_handler.github_webhook.retrigger_checks_on_base_push = None with patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]): with patch.object(pull_request_handler, "label_pull_request_by_merge_state", new=AsyncMock()) as mock_label: with patch("asyncio.sleep", new=AsyncMock()): - await pull_request_handler.label_all_opened_pull_requests_merge_state_after_merged() + await ( + pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + ) assert mock_label.await_count == 2 + @pytest.mark.asyncio + async def test_label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged_with_retrigger( + self, pull_request_handler: PullRequestHandler + ) -> None: + """Test labeling all opened pull requests with retrigger enabled.""" + mock_pr1 = Mock() + mock_pr2 = Mock() + mock_pr1.number = 1 + mock_pr2.number = 2 + mock_pr1.mergeable_state = "behind" + mock_pr2.mergeable_state = "clean" + + pull_request_handler.github_webhook.retrigger_checks_on_base_push = "all" + + async def mock_label_side_effect(pull_request: Mock | PullRequest) -> str: + return pull_request.mergeable_state + + with ( + patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr1, mock_pr2]), + patch.object( + pull_request_handler, + "label_pull_request_by_merge_state", + new=AsyncMock(side_effect=mock_label_side_effect), + ) as mock_label, + patch.object(pull_request_handler, "_retrigger_check_suites_for_pr", new=AsyncMock()) as mock_retrigger, + patch("asyncio.sleep", new=AsyncMock()), + patch("asyncio.to_thread", new=_sync_to_thread), + ): + await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + # Verify labeling called for both PRs + assert mock_label.await_count == 2 + # Verify retrigger only called for PR with merge_state="behind" + mock_retrigger.assert_called_once_with(pull_request=mock_pr1) + + @pytest.mark.asyncio + @pytest.mark.parametrize( + "merge_state,should_retrigger", + [ + ("behind", True), # Out-of-date - should retrigger + ("blocked", True), # Blocked by reviews/checks - currently triggers (per implementation) + ("clean", False), # Up-to-date - no retrigger needed + ("dirty", False), # Has conflicts - retrigger won't help + ("unstable", False), # Failing checks - no retrigger + ("unknown", False), # Unknown state - skip processing + ], + ) + async def test_label_all_opened_prs_retrigger_for_different_merge_states( + self, + pull_request_handler: PullRequestHandler, + mock_github_webhook: Mock, + merge_state: str, + should_retrigger: bool, + ) -> None: + """Test retrigger behavior for different merge states.""" + mock_github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr = Mock() + mock_pr.number = 1 + mock_pr.mergeable_state = merge_state + + with ( + patch("asyncio.sleep", new_callable=AsyncMock), + patch("asyncio.to_thread", new=_sync_to_thread), + patch.object(pull_request_handler.repository, "get_pulls", return_value=[mock_pr]), + patch.object( + pull_request_handler, + "label_pull_request_by_merge_state", + new_callable=AsyncMock, + return_value=merge_state, + ), + patch.object( + pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock + ) as mock_retrigger, + ): + await pull_request_handler.label_and_rerun_checks_all_opened_pull_requests_merge_state_after_merged() + + if should_retrigger: + mock_retrigger.assert_called_once_with(pull_request=mock_pr) + else: + mock_retrigger.assert_not_called() + @pytest.mark.asyncio async def test_delete_remote_tag_for_merged_or_closed_pr_with_tag( self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock @@ -1547,7 +1647,7 @@ async def test_process_labeled_wip( mock_pull_request.labels = [mock_label] with patch.object(pull_request_handler, "check_if_can_be_merged", new=AsyncMock()) as mock_check_merge: - with patch("asyncio.to_thread", side_effect=lambda f, *args: f(*args) if callable(f) else None): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.process_pull_request_webhook_data(mock_pull_request) mock_check_merge.assert_awaited_once() @@ -1593,9 +1693,7 @@ async def test_delete_ghcr_tag_exceptions( side_effect=GithubException(404, "Not Found") ) - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler._delete_ghcr_tag_via_github_api(mock_pull_request, "ghcr.io/org/pkg:123", "123") pull_request_handler.logger.warning.assert_called_with("[TEST] Package pkg not found for owner org on GHCR") @@ -1612,9 +1710,7 @@ async def test_add_assignee_exception( pull_request_handler.owners_file_handler.root_approvers = ["approver1"] - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.add_pull_request_owner_as_assingee(mock_pull_request) pull_request_handler.logger.debug.assert_any_call("[TEST] Exception while adding PR owner as assignee: Failed") @@ -1725,9 +1821,7 @@ async def test_set_pull_request_automerge_exception( pull_request_handler.github_webhook.set_auto_merge_prs = ["main"] mock_pull_request.base.ref = "main" - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.set_pull_request_automerge(mock_pull_request) pull_request_handler.logger.error.assert_called_with( @@ -1741,9 +1835,7 @@ async def test_label_pull_request_by_merge_state_unknown( """Test label_pull_request_by_merge_state when unknown.""" mock_pull_request.mergeable_state = "unknown" - with patch( - "asyncio.to_thread", side_effect=lambda f, *args, **kwargs: f(*args, **kwargs) if callable(f) else None - ): + with patch("asyncio.to_thread", new=_sync_to_thread): await pull_request_handler.label_pull_request_by_merge_state(mock_pull_request) # Should return early @@ -1782,3 +1874,139 @@ async def test_delete_registry_tag_via_regctl_failure( pull_request_handler.logger.error.assert_called_with( "[TEST] Failed to delete tag: tag. OUT:Delete failed. ERR:Error" ) + + @pytest.mark.asyncio + async def test_label_pull_request_by_merge_state_only_labels( + self, pull_request_handler: PullRequestHandler, mock_pull_request: Mock + ) -> None: + """Test label_pull_request_by_merge_state only handles labeling, not check triggering.""" + mock_pull_request.mergeable_state = "behind" + + with ( + patch("asyncio.to_thread", new=_sync_to_thread), + patch.object(pull_request_handler.labels_handler, "_add_label", new_callable=AsyncMock) as mock_add_label, + patch.object( + pull_request_handler, "_retrigger_check_suites_for_pr", new_callable=AsyncMock + ) as mock_retrigger, + ): + await pull_request_handler.label_pull_request_by_merge_state(pull_request=mock_pull_request) + + # Verify labeling happened + mock_add_label.assert_called_once_with(pull_request=mock_pull_request, label=NEEDS_REBASE_LABEL_STR) + # Verify check triggering did NOT happen (separation of concerns) + mock_retrigger.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_success( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr successfully runs configured checks.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] + mock_github_webhook.retrigger_checks_on_base_push = "all" + + # Mock the run_retests_from_config method + mock_run_retests_from_config = AsyncMock(return_value=True) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Verify run_retests_from_config was called with the correct arguments + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_no_check_suites( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr when repository has no configured checks.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [] + + # Mock the run_retests_from_config method (returns False when no checks) + mock_run_retests_from_config = AsyncMock(return_value=False) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_exception( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr catches and logs exceptions from runners.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR] + mock_github_webhook.retrigger_checks_on_base_push = "all" + + # Mock run_retests_from_config to raise exception + mock_run_retests_from_config = AsyncMock(side_effect=Exception("Runner failed")) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + + with patch("asyncio.to_thread", new=_sync_to_thread): + # The exception should be caught and logged, not propagated + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + # Verify exception was logged + pull_request_handler.logger.exception.assert_called_once_with( + "[TEST] Failed to re-trigger checks for PR #123" + ) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_with_specific_checks_list( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr with specific checks list.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR, "build-container"] + mock_github_webhook.retrigger_checks_on_base_push = [TOX_STR, PRE_COMMIT_STR] + + # Mock the run_retests_from_config method + mock_run_retests_from_config = AsyncMock(return_value=True) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_with_nonexistent_checks( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr when configured checks don't exist.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] + mock_github_webhook.retrigger_checks_on_base_push = ["nonexistent-check"] + + # Mock the run_retests_from_config method (returns False when no matching checks) + mock_run_retests_from_config = AsyncMock(return_value=False) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) + + @pytest.mark.asyncio + async def test_retrigger_check_suites_for_pr_with_partial_match( + self, pull_request_handler: PullRequestHandler, mock_github_webhook: Mock, mock_pull_request: Mock + ) -> None: + """Test _retrigger_check_suites_for_pr with partial match.""" + mock_pull_request.number = 123 + mock_github_webhook.current_pull_request_supported_retest = [TOX_STR, PRE_COMMIT_STR] + mock_github_webhook.retrigger_checks_on_base_push = [TOX_STR, "nonexistent-check"] + + # Mock the run_retests_from_config method (returns True when checks match) + mock_run_retests_from_config = AsyncMock(return_value=True) + pull_request_handler.runner_handler.run_retests_from_config = mock_run_retests_from_config + + with patch("asyncio.to_thread", new=_sync_to_thread): + await pull_request_handler._retrigger_check_suites_for_pr(mock_pull_request) + + # Verify run_retests_from_config was called + mock_run_retests_from_config.assert_called_once_with(pull_request=mock_pull_request) diff --git a/webhook_server/tests/test_push_handler.py b/webhook_server/tests/test_push_handler.py index 0fac3555..58f4aea8 100644 --- a/webhook_server/tests/test_push_handler.py +++ b/webhook_server/tests/test_push_handler.py @@ -1,6 +1,7 @@ """Tests for webhook_server.libs.handlers.push_handler module.""" from contextlib import asynccontextmanager +from datetime import UTC, datetime, timedelta from unittest.mock import AsyncMock, Mock, patch import pytest @@ -43,12 +44,20 @@ def mock_github_webhook(self) -> Mock: mock_webhook.container_repository_username = "test-user" # Always a string mock_webhook.container_repository_password = "test-password" # Always a string # pragma: allowlist secret mock_webhook.token = "test-token" # Always a string + mock_webhook.tox_max_concurrent = 5 return mock_webhook @pytest.fixture def push_handler(self, mock_github_webhook: Mock) -> PushHandler: """Create a PushHandler instance with mocked dependencies.""" - return PushHandler(mock_github_webhook) + handler = PushHandler(mock_github_webhook) + # Mock check_run_handler methods used by run_retests_from_config + handler.runner_handler.check_run_handler.set_run_tox_check_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_run_pre_commit_check_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_container_build_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_python_module_install_queued = AsyncMock() + handler.runner_handler.check_run_handler.set_conventional_title_queued = AsyncMock() + return handler @pytest.mark.asyncio async def test_process_push_webhook_data_with_tag_and_pypi(self, push_handler: PushHandler) -> None: @@ -100,6 +109,7 @@ async def test_process_push_webhook_data_with_tag_no_container_release(self, pus async def test_process_push_webhook_data_no_tag(self, push_handler: PushHandler) -> None: """Test processing push webhook data without tag.""" push_handler.hook_data["ref"] = "refs/heads/main" + push_handler.github_webhook.retrigger_checks_on_base_push = None with patch.object(push_handler, "upload_to_pypi", new_callable=AsyncMock) as mock_upload: with patch.object(push_handler.runner_handler, "run_build_container", new_callable=AsyncMock) as mock_build: @@ -410,3 +420,379 @@ async def test_upload_to_pypi_slack_message_format(self, push_handler: PushHandl assert "published to PYPI" in call_args[1]["message"] assert call_args[1]["logger"] == push_handler.logger assert call_args[1]["log_prefix"] == push_handler.log_prefix + + @pytest.mark.asyncio + async def test_process_push_webhook_data_branch_push_retrigger_enabled(self, push_handler: PushHandler) -> None: + """Test processing branch push with retrigger enabled.""" + push_handler.hook_data["ref"] = "refs/heads/main" + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + with patch.object( + push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock + ) as mock_retrigger: + await push_handler.process_push_webhook_data() + + mock_retrigger.assert_called_once_with(branch_name="main") + + @pytest.mark.asyncio + async def test_process_push_webhook_data_branch_push_retrigger_disabled(self, push_handler: PushHandler) -> None: + """Test processing branch push with retrigger not configured.""" + push_handler.hook_data["ref"] = "refs/heads/main" + push_handler.github_webhook.retrigger_checks_on_base_push = None + + with patch.object( + push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock + ) as mock_retrigger: + await push_handler.process_push_webhook_data() + + mock_retrigger.assert_not_called() + + @pytest.mark.asyncio + async def test_process_push_webhook_data_branch_push_feature_branch(self, push_handler: PushHandler) -> None: + """Test processing push to feature branch.""" + push_handler.hook_data["ref"] = "refs/heads/feature/my-feature" + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + with patch.object( + push_handler, "_retrigger_checks_for_prs_targeting_branch", new_callable=AsyncMock + ) as mock_retrigger: + await push_handler.process_push_webhook_data() + + mock_retrigger.assert_called_once_with(branch_name="feature/my-feature") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_no_prs(self, push_handler: PushHandler) -> None: + """Test retrigger when no PRs target the branch.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [] + + with patch("asyncio.sleep", new_callable=AsyncMock): + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_get_pulls.assert_called_once_with(state="open", base="main") + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_pr_behind(self, push_handler: PushHandler) -> None: + """Test retrigger for PR with merge state 'behind'.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_called_once_with(supported_retests=["tox", "pre-commit"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_pr_blocked(self, push_handler: PushHandler) -> None: + """Test retrigger for PR with merge state 'blocked'.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr = Mock() + mock_pr.number = 456 + mock_pr.mergeable_state = "blocked" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_called_once_with(supported_retests=["tox"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_pr_clean(self, push_handler: PushHandler) -> None: + """Test that retrigger skips PR with merge state 'clean'.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 789 + mock_pr.mergeable_state = "clean" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_multiple_prs(self, push_handler: PushHandler) -> None: + """Test retrigger with multiple PRs in different states.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr1 = Mock() + mock_pr1.number = 100 + mock_pr1.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr1.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + mock_pr2 = Mock() + mock_pr2.number = 200 + mock_pr2.mergeable_state = "clean" + + mock_pr3 = Mock() + mock_pr3.number = 300 + mock_pr3.mergeable_state = "blocked" + # Set updated_at to more than 60 seconds ago + mock_pr3.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr1, mock_pr2, mock_pr3] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should be called twice: for PR 100 (behind) and PR 300 (blocked) + assert mock_retests.call_count == 2 + calls = mock_retests.call_args_list + assert calls[0].kwargs["pull_request"] == mock_pr1 + assert calls[1].kwargs["pull_request"] == mock_pr3 + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_targeting_branch_no_checks_configured( + self, push_handler: PushHandler + ) -> None: + """Test retrigger when no checks are configured.""" + push_handler.github_webhook.current_pull_request_supported_retest = [] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_waits_for_github(self, push_handler: PushHandler) -> None: + """Test that retrigger waits 30 seconds for GitHub to update merge states.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [] + + with patch("asyncio.sleep", new_callable=AsyncMock) as mock_sleep: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + mock_sleep.assert_called_once_with(30) + + @pytest.mark.asyncio + async def test_retrigger_checks_with_specific_checks_list(self, push_handler: PushHandler) -> None: + """Test retrigger with specific checks list.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit", "build-container"] + push_handler.github_webhook.retrigger_checks_on_base_push = ["tox", "pre-commit"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should only run configured checks, not all available + mock_retests.assert_called_once_with(supported_retests=["tox", "pre-commit"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_with_nonexistent_checks(self, push_handler: PushHandler) -> None: + """Test retrigger when configured checks don't exist.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = ["nonexistent-check"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should not run any checks since none match + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_with_partial_match(self, push_handler: PushHandler) -> None: + """Test retrigger with some configured checks matching available checks.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = ["tox", "nonexistent-check"] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should only run checks that match + mock_retests.assert_called_once_with(supported_retests=["tox"], pull_request=mock_pr) + + @pytest.mark.asyncio + async def test_retrigger_checks_skips_recently_updated_pr(self, push_handler: PushHandler) -> None: + """Test that retrigger skips PR that was updated within the last minute.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to less than 60 seconds ago (30 seconds) + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=30) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Should not trigger since PR was recently updated + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_with_empty_list(self, push_handler: PushHandler) -> None: + """Test retrigger disabled with empty list.""" + push_handler.github_webhook.current_pull_request_supported_retest = ["tox", "pre-commit"] + push_handler.github_webhook.retrigger_checks_on_base_push = [] + + mock_pr = Mock() + mock_pr.number = 123 + mock_pr.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object(push_handler.runner_handler, "run_retests", new_callable=AsyncMock) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + + # Empty list is treated as disabled - should not trigger + # But the current implementation will process the PR since the check happens at webhook level + # This test validates current behavior + mock_retests.assert_not_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_with_unknown_merge_state(self, push_handler: PushHandler) -> None: + """Test that PRs with unknown merge state are skipped with warning. + + Note: These tests mock run_retests_from_config to exercise higher-level behavior + and exception propagation, while run_retests is tested directly in other tests. + """ + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 999 + mock_pr.mergeable_state = "unknown" + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + push_handler.runner_handler, "run_retests_from_config", new_callable=AsyncMock + ) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + mock_retests.assert_not_called() + push_handler.logger.warning.assert_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_for_prs_with_none_merge_state(self, push_handler: PushHandler) -> None: + """Test that PRs with None merge state are skipped with warning. + + Note: These tests mock run_retests_from_config to exercise higher-level behavior + and exception propagation, while run_retests is tested directly in other tests. + """ + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + + mock_pr = Mock() + mock_pr.number = 888 + mock_pr.mergeable_state = None + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + push_handler.runner_handler, "run_retests_from_config", new_callable=AsyncMock + ) as mock_retests: + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + mock_retests.assert_not_called() + push_handler.logger.warning.assert_called() + + @pytest.mark.asyncio + async def test_retrigger_checks_continues_on_exception(self, push_handler: PushHandler) -> None: + """Test that exception in one PR doesn't stop processing others. + + Note: These tests mock run_retests_from_config to exercise higher-level behavior + and exception propagation, while run_retests is tested directly in other tests. + """ + push_handler.github_webhook.current_pull_request_supported_retest = ["tox"] + push_handler.github_webhook.retrigger_checks_on_base_push = "all" + + mock_pr1 = Mock() + mock_pr1.number = 100 + mock_pr1.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr1.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + mock_pr2 = Mock() + mock_pr2.number = 200 + mock_pr2.mergeable_state = "behind" + # Set updated_at to more than 60 seconds ago + mock_pr2.updated_at = datetime.now(UTC) - timedelta(seconds=120) + + with patch.object(push_handler.repository, "get_pulls") as mock_get_pulls: + mock_get_pulls.return_value = [mock_pr1, mock_pr2] + with patch("asyncio.sleep", new_callable=AsyncMock): + with patch.object( + push_handler.runner_handler, "run_retests_from_config", new_callable=AsyncMock + ) as mock_retests: + # First call raises exception, second succeeds + mock_retests.side_effect = [Exception("Test error"), True] + await push_handler._retrigger_checks_for_prs_targeting_branch(branch_name="main") + # Both PRs should be attempted + assert mock_retests.call_count == 2 + # Exception should be logged + push_handler.logger.exception.assert_called() diff --git a/webhook_server/tests/test_runner_handler.py b/webhook_server/tests/test_runner_handler.py index 7aa4b66e..2225f025 100644 --- a/webhook_server/tests/test_runner_handler.py +++ b/webhook_server/tests/test_runner_handler.py @@ -24,6 +24,7 @@ def mock_github_webhook(self) -> Mock: mock_webhook.clone_repo_dir = "/tmp/test-repo" mock_webhook.tox = {"main": "all"} mock_webhook.tox_python_version = "3.12" + mock_webhook.tox_max_concurrent = 5 mock_webhook.pre_commit = True mock_webhook.build_and_push_container = True mock_webhook.pypi = {"token": "dummy"} @@ -60,6 +61,12 @@ def mock_pull_request(self) -> Mock: mock_pr.merge_commit_sha = "abc123" mock_pr.html_url = "https://github.com/test/repo/pull/123" mock_pr.create_issue_comment = Mock() + + # Mock get_commits() to return an iterable with a commit + mock_commit = Mock() + mock_commit.sha = "test-commit-sha" + mock_pr.get_commits = Mock(return_value=[mock_commit]) + return mock_pr @pytest.fixture(autouse=True)