Skip to content

Commit ad49f97

Browse files
Share default terminal-status predicate for polling jobs
Co-authored-by: Shri Sukhani <shrisukhani@users.noreply.github.com>
1 parent 8adf537 commit ad49f97

File tree

16 files changed

+76
-12
lines changed

16 files changed

+76
-12
lines changed

CONTRIBUTING.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -86,6 +86,7 @@ This runs lint, format checks, compile checks, tests, and package build.
8686
- `tests/test_contributing_architecture_guard_listing.py` (`CONTRIBUTING.md` architecture-guard inventory completeness enforcement),
8787
- `tests/test_core_type_helper_usage.py` (core transport/config/header/file/polling/session/error/parsing manager+tool module enforcement of shared plain-type helper usage),
8888
- `tests/test_default_serialization_helper_usage.py` (default optional-query serialization helper usage enforcement),
89+
- `tests/test_default_terminal_status_helper_usage.py` (default terminal-status helper usage enforcement for non-agent managers),
8990
- `tests/test_display_helper_usage.py` (display/key-format helper usage),
9091
- `tests/test_docs_python3_commands.py` (`README`/`CONTRIBUTING`/examples python3 command consistency enforcement),
9192
- `tests/test_example_run_instructions.py` (example run-instruction consistency enforcement),

hyperbrowser/client/managers/async_manager/crawl.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
build_job_paginated_page_merge_callback,
1313
initialize_job_paginated_response,
1414
)
15+
from ..job_status_utils import is_default_terminal_job_status
1516
from ..serialization_utils import (
1617
serialize_model_dump_or_default,
1718
serialize_model_dump_to_dict,
@@ -92,7 +93,7 @@ async def start_and_wait(
9293
job_status = await poll_until_terminal_status_async(
9394
operation_name=operation_name,
9495
get_status=lambda: self.get_status(job_id).status,
95-
is_terminal_status=lambda status: status in {"completed", "failed"},
96+
is_terminal_status=is_default_terminal_job_status,
9697
poll_interval_seconds=poll_interval_seconds,
9798
max_wait_seconds=max_wait_seconds,
9899
max_status_failures=max_status_failures,

hyperbrowser/client/managers/async_manager/extract.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
StartExtractJobResponse,
99
)
1010
from ..extract_payload_utils import build_extract_start_payload
11+
from ..job_status_utils import is_default_terminal_job_status
1112
from ..start_job_utils import build_started_job_context
1213
from ...polling import wait_for_job_result_async
1314
from ..response_utils import parse_response_model
@@ -67,7 +68,7 @@ async def start_and_wait(
6768
return await wait_for_job_result_async(
6869
operation_name=operation_name,
6970
get_status=lambda: self.get_status(job_id).status,
70-
is_terminal_status=lambda status: status in {"completed", "failed"},
71+
is_terminal_status=is_default_terminal_job_status,
7172
fetch_result=lambda: self.get(job_id),
7273
poll_interval_seconds=poll_interval_seconds,
7374
max_wait_seconds=max_wait_seconds,

hyperbrowser/client/managers/async_manager/scrape.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
build_job_paginated_page_merge_callback,
1414
initialize_job_paginated_response,
1515
)
16+
from ..job_status_utils import is_default_terminal_job_status
1617
from ..serialization_utils import (
1718
serialize_model_dump_or_default,
1819
serialize_model_dump_to_dict,
@@ -99,7 +100,7 @@ async def start_and_wait(
99100
job_status = await poll_until_terminal_status_async(
100101
operation_name=operation_name,
101102
get_status=lambda: self.get_status(job_id).status,
102-
is_terminal_status=lambda status: status in {"completed", "failed"},
103+
is_terminal_status=is_default_terminal_job_status,
103104
poll_interval_seconds=poll_interval_seconds,
104105
max_wait_seconds=max_wait_seconds,
105106
max_status_failures=max_status_failures,
@@ -204,7 +205,7 @@ async def start_and_wait(
204205
return await wait_for_job_result_async(
205206
operation_name=operation_name,
206207
get_status=lambda: self.get_status(job_id).status,
207-
is_terminal_status=lambda status: status in {"completed", "failed"},
208+
is_terminal_status=is_default_terminal_job_status,
208209
fetch_result=lambda: self.get(job_id),
209210
poll_interval_seconds=poll_interval_seconds,
210211
max_wait_seconds=max_wait_seconds,

hyperbrowser/client/managers/async_manager/web/batch_fetch.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
POLLING_ATTEMPTS,
1010
)
1111
from ...page_params_utils import build_page_batch_params
12+
from ...job_status_utils import is_default_terminal_job_status
1213
from ...web_payload_utils import build_batch_fetch_start_payload
1314
from ...web_payload_utils import build_batch_fetch_get_params
1415
from ...web_pagination_utils import (
@@ -86,7 +87,7 @@ async def start_and_wait(
8687
job_status = await poll_until_terminal_status_async(
8788
operation_name=operation_name,
8889
get_status=lambda: self.get_status(job_id).status,
89-
is_terminal_status=lambda status: status in {"completed", "failed"},
90+
is_terminal_status=is_default_terminal_job_status,
9091
poll_interval_seconds=poll_interval_seconds,
9192
max_wait_seconds=max_wait_seconds,
9293
max_status_failures=max_status_failures,

hyperbrowser/client/managers/async_manager/web/crawl.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
POLLING_ATTEMPTS,
1010
)
1111
from ...page_params_utils import build_page_batch_params
12+
from ...job_status_utils import is_default_terminal_job_status
1213
from ...web_payload_utils import build_web_crawl_start_payload
1314
from ...web_payload_utils import build_web_crawl_get_params
1415
from ...web_pagination_utils import (
@@ -84,7 +85,7 @@ async def start_and_wait(
8485
job_status = await poll_until_terminal_status_async(
8586
operation_name=operation_name,
8687
get_status=lambda: self.get_status(job_id).status,
87-
is_terminal_status=lambda status: status in {"completed", "failed"},
88+
is_terminal_status=is_default_terminal_job_status,
8889
poll_interval_seconds=poll_interval_seconds,
8990
max_wait_seconds=max_wait_seconds,
9091
max_status_failures=max_status_failures,
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from typing import FrozenSet
2+
3+
DEFAULT_TERMINAL_JOB_STATUSES: FrozenSet[str] = frozenset({"completed", "failed"})
4+
5+
6+
def is_default_terminal_job_status(status: str) -> bool:
7+
return status in DEFAULT_TERMINAL_JOB_STATUSES

hyperbrowser/client/managers/sync_manager/crawl.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
build_job_paginated_page_merge_callback,
1313
initialize_job_paginated_response,
1414
)
15+
from ..job_status_utils import is_default_terminal_job_status
1516
from ..serialization_utils import (
1617
serialize_model_dump_or_default,
1718
serialize_model_dump_to_dict,
@@ -92,7 +93,7 @@ def start_and_wait(
9293
job_status = poll_until_terminal_status(
9394
operation_name=operation_name,
9495
get_status=lambda: self.get_status(job_id).status,
95-
is_terminal_status=lambda status: status in {"completed", "failed"},
96+
is_terminal_status=is_default_terminal_job_status,
9697
poll_interval_seconds=poll_interval_seconds,
9798
max_wait_seconds=max_wait_seconds,
9899
max_status_failures=max_status_failures,

hyperbrowser/client/managers/sync_manager/extract.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
StartExtractJobResponse,
99
)
1010
from ..extract_payload_utils import build_extract_start_payload
11+
from ..job_status_utils import is_default_terminal_job_status
1112
from ..start_job_utils import build_started_job_context
1213
from ...polling import wait_for_job_result
1314
from ..response_utils import parse_response_model
@@ -67,7 +68,7 @@ def start_and_wait(
6768
return wait_for_job_result(
6869
operation_name=operation_name,
6970
get_status=lambda: self.get_status(job_id).status,
70-
is_terminal_status=lambda status: status in {"completed", "failed"},
71+
is_terminal_status=is_default_terminal_job_status,
7172
fetch_result=lambda: self.get(job_id),
7273
poll_interval_seconds=poll_interval_seconds,
7374
max_wait_seconds=max_wait_seconds,

hyperbrowser/client/managers/sync_manager/scrape.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
build_job_paginated_page_merge_callback,
1414
initialize_job_paginated_response,
1515
)
16+
from ..job_status_utils import is_default_terminal_job_status
1617
from ..serialization_utils import (
1718
serialize_model_dump_or_default,
1819
serialize_model_dump_to_dict,
@@ -97,7 +98,7 @@ def start_and_wait(
9798
job_status = poll_until_terminal_status(
9899
operation_name=operation_name,
99100
get_status=lambda: self.get_status(job_id).status,
100-
is_terminal_status=lambda status: status in {"completed", "failed"},
101+
is_terminal_status=is_default_terminal_job_status,
101102
poll_interval_seconds=poll_interval_seconds,
102103
max_wait_seconds=max_wait_seconds,
103104
max_status_failures=max_status_failures,
@@ -202,7 +203,7 @@ def start_and_wait(
202203
return wait_for_job_result(
203204
operation_name=operation_name,
204205
get_status=lambda: self.get_status(job_id).status,
205-
is_terminal_status=lambda status: status in {"completed", "failed"},
206+
is_terminal_status=is_default_terminal_job_status,
206207
fetch_result=lambda: self.get(job_id),
207208
poll_interval_seconds=poll_interval_seconds,
208209
max_wait_seconds=max_wait_seconds,

0 commit comments

Comments
 (0)