diff --git a/.gitignore b/.gitignore index a1480ec..4bb67d4 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# Ignore the root .env file. +/.env + # Ignore all data files. data/ diff --git a/CLAUDE.md b/CLAUDE.md index 5b8c06a..c911bbb 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -46,21 +46,31 @@ cd website && npm install && npm run dev # Dev server at localhost:4321 ## Architecture +### Library (`src/babel_validation/`) + +Shared library code used by the tests and potentially other consumers. + +- `core/testrow.py` — `TestRow` dataclass (models a single Google Sheet test row), `TestStatus` enum, `TestResult` dataclass +- `services/nodenorm.py` — `CachedNodeNorm`: wraps the NodeNorm `get_normalized_nodes` API with per-instance caching +- `services/nameres.py` — `CachedNameRes`: wraps the NameRes `lookup`/`bulk-lookup` APIs with per-instance caching +- `sources/google_sheets/google_sheet_test_cases.py` — `GoogleSheetTestCases`: downloads and parses the shared Google Sheet into `TestRow` instances and pytest `ParameterSet` lists +- `sources/github/github_issues_test_cases.py` — `GitHubIssueTest` and `GitHubIssuesTestCases`: pull test cases embedded in GitHub issues (wiki or YAML syntax) and evaluate them against NodeNorm/NameRes + ### Test Framework (`tests/`) The core of this project. Tests validate NodeNorm and NameRes services across multiple deployment environments. **Target system:** `tests/targets.ini` defines endpoints for each environment (dev, prod, test, ci, exp, localhost). Tests use `target_info` fixture to get URLs. The `conftest.py` parametrizes tests across targets via `--target` CLI option; default is `dev`. -**Google Sheet integration:** ~2000+ test cases are pulled from a [shared Google Sheet](https://docs.google.com/spreadsheets/d/11zebx8Qs1Tc3ShQR9nh4HRW8QSoo8k65w_xIaftN0no/). `tests/common/google_sheet_test_cases.py` fetches and parses these into `TestRow` dataclasses. Rows marked as not expected to pass are wrapped with `pytest.mark.xfail(strict=True)`. Tests are parametrized by row, with IDs like `gsheet:row=42`. +**Google Sheet integration:** ~2000+ test cases are pulled from a [shared Google Sheet](https://docs.google.com/spreadsheets/d/11zebx8Qs1Tc3ShQR9nh4HRW8QSoo8k65w_xIaftN0no/). `src/babel_validation/sources/google_sheets/google_sheet_test_cases.py` fetches and parses these into `TestRow` dataclasses. Rows marked as not expected to pass are wrapped with `pytest.mark.xfail(strict=True)`. Tests are parametrized by row, with IDs like `gsheet:row=42`. **Category filtering:** Google Sheet rows have a Category column. The `test_category` fixture (from conftest.py) returns a callable that tests use to `pytest.skip()` rows not matching `--category`/`--category-exclude` filters. **Test modules:** - `tests/nodenorm/` — NodeNorm tests (normalization accuracy, preferred IDs/labels, Biolink types, conflation, descriptions, OpenAPI spec, setid endpoint) - `tests/nameres/` — NameRes tests (label lookup, autocomplete, Biolink type filtering, blocklist, taxon_specific flag) -- `tests/nodenorm/by_issue/` — Tests tied to specific GitHub issues -- `tests/common/` — Shared utilities (`GoogleSheetTestCases`, `TestRow`) +- `tests/nodenorm/by_issue/` — Per-issue regression tests for NodeNorm (hand-written) +- `tests/github_issues/` — Dynamically-generated tests pulled from GitHub issue bodies via `GitHubIssuesTestCases` ### Web Applications @@ -79,4 +89,6 @@ When writing new tests: - Use the `target_info` fixture to get NodeNorm/NameRes URLs from targets.ini - For Google Sheet-based tests, parametrize with `gsheet.test_rows()` and use the `test_category` fixture for category filtering - Use `pytest.mark.xfail(strict=True)` for known failures (strict=True means unexpected passes also fail) -- Issue-specific tests go in `tests/nodenorm/by_issue/` or `tests/github_issues/` +- Hand-written per-issue regression tests go in `tests/nodenorm/by_issue/` +- GitHub-issue-driven tests are picked up automatically by `tests/github_issues/test_github_issues.py` via `GitHubIssuesTestCases` +- Import shared classes from `src.babel_validation.*` (e.g. `from src.babel_validation.services.nodenorm import CachedNodeNorm`) diff --git a/pyproject.toml b/pyproject.toml index b728bf0..7ccecf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,9 +8,14 @@ requires-python = ">=3.11" dependencies = [ "black>=25.9.0", "deepdiff>=8.6.1", + "dotenv>=0.9.9", "openapi-spec-validator>=0.7.2", - "pytest>=8.4.2", + "pygithub>=2.8.1", + "pytest>=9.0", + "pytest-xdist[psutil]", + "filelock", "requests>=2.32.5", + "tqdm>=4.67.1", ] [project.urls] diff --git a/tests/common/__init__.py b/src/__init__.py similarity index 100% rename from tests/common/__init__.py rename to src/__init__.py diff --git a/src/babel_validation/__init__.py b/src/babel_validation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/babel_validation/assertions/README.md b/src/babel_validation/assertions/README.md new file mode 100644 index 0000000..7dba611 --- /dev/null +++ b/src/babel_validation/assertions/README.md @@ -0,0 +1,186 @@ + + +# BabelTest Assertion Types + +This package defines the assertion types that can be embedded in GitHub issue bodies and evaluated against the NodeNorm and NameRes services. + +## Embedding Tests in Issues + +Two syntaxes are supported: + +**Wiki syntax** (one assertion per line): +``` +{{BabelTest|AssertionType|param1|param2|...}} +``` + +**YAML syntax** (multiple assertions, multiple param sets): +```` +```yaml +babel_tests: + AssertionType: + - param1 + - [param1, param2] +``` +```` + +Assertion names are case-insensitive. + +--- + +## NodeNorm Assertions + +These assertions test the [NodeNorm](https://nodenorm.transltr.io/docs) service. + +### Resolves + +**Applies to:** NodeNorm + +Each CURIE in each param_set must resolve to a non-null result in NodeNorm. + +**Parameters:** One or more CURIEs per param_set. + +**Wiki syntax:** +``` +{{BabelTest|Resolves|CHEBI:15365}} +{{BabelTest|Resolves|MONDO:0005015|DOID:9351}} +``` + +**YAML syntax:** +```yaml +babel_tests: + Resolves: + - CHEBI:15365 + - [MONDO:0005015, DOID:9351] +``` + +--- + +### DoesNotResolve + +**Applies to:** NodeNorm + +Each CURIE in each param_set must fail to resolve (return null) in NodeNorm. Use this to confirm that an identifier is intentionally not normalizable. + +**Parameters:** One or more CURIEs per param_set. + +**Wiki syntax:** +``` +{{BabelTest|DoesNotResolve|FAKENS:99999}} +``` + +**YAML syntax:** +```yaml +babel_tests: + DoesNotResolve: + - FAKENS:99999 +``` + +--- + +### ResolvesWith + +**Applies to:** NodeNorm + +All CURIEs within each param_set must resolve to the identical normalized result. Use this to assert that two identifiers are equivalent. + +**Parameters:** Two or more CURIEs per param_set. All must resolve to the same result. + +**Wiki syntax:** +``` +{{BabelTest|ResolvesWith|CHEBI:15365|PUBCHEM.COMPOUND:1}} +``` + +**YAML syntax:** +```yaml +babel_tests: + ResolvesWith: + - [CHEBI:15365, PUBCHEM.COMPOUND:1] + - [MONDO:0005015, DOID:9351] +``` + +--- + +### ResolvesWithType + +**Applies to:** NodeNorm + +Each param_set must have at least two elements: the first is the expected Biolink type (e.g. 'biolink:Gene'), and the remainder are CURIEs that must resolve with that type. + +**Parameters:** Each param_set: first element is the expected Biolink type (e.g. `biolink:Gene`), remaining elements are CURIEs. + +**Wiki syntax:** +``` +{{BabelTest|ResolvesWithType|biolink:Gene|NCBIGene:1}} +``` + +**YAML syntax:** +```yaml +babel_tests: + ResolvesWithType: + - [biolink:Gene, NCBIGene:1, HGNC:5] +``` + +--- + +## NameRes Assertions + +These assertions test the [NameRes](https://name-lookup.transltr.io/docs) service. + +### SearchByName + +**Applies to:** NameRes + +Each param_set must have at least two elements: a search query string and an expected CURIE. The test passes if the CURIE's normalized identifier appears within the top N results (default N=5) when NameRes looks up the search query. + +**Parameters:** Each param_set: the **search query string** and the **expected CURIE**. The CURIE is normalized via NodeNorm (drug/chemical conflation enabled) before matching. + +**Wiki syntax:** +``` +{{BabelTest|SearchByName|water|CHEBI:15377}} +``` + +**YAML syntax:** +```yaml +babel_tests: + SearchByName: + - [water, CHEBI:15377] + - [diabetes, MONDO:0005015] +``` + +--- + +## Special Assertions + +### Needed + +**Applies to:** NodeNorm and NameRes + +Marks an issue as needing a test — always fails as a reminder to add real assertions. + +**Wiki syntax:** +``` +{{BabelTest|Needed}} +``` + +**YAML syntax:** +```yaml +babel_tests: + Needed: + - placeholder +``` + +--- + +## Adding a New Assertion Type + +1. Choose the right module: + - `nodenorm.py` — for NodeNorm-only assertions (subclass `NodeNormTest`, override `test_param_set`) + - `nameres.py` — for NameRes-only assertions (subclass `NameResTest`, override `test_param_set`) + - `common.py` — for assertions that apply to both services (subclass `AssertionHandler`, override `test_with_nodenorm` and/or `test_with_nameres`) + +2. Define the class with `NAME`, `DESCRIPTION`, `PARAMETERS`, `WIKI_EXAMPLES`, `YAML_PARAMS`, and `test_param_set()` (or both `test_with_*` methods for `AssertionHandler` subclasses). + +3. Import it in `__init__.py` and add an instance to `ASSERTION_HANDLERS`. + +4. Run `uv run python -m src.babel_validation.assertions.gen_docs` to regenerate `README.md`. diff --git a/src/babel_validation/assertions/__init__.py b/src/babel_validation/assertions/__init__.py new file mode 100644 index 0000000..5a89db3 --- /dev/null +++ b/src/babel_validation/assertions/__init__.py @@ -0,0 +1,132 @@ +""" +babel_validation.assertions +=========================== + +This package defines the assertion types that can be embedded in GitHub issue bodies +and evaluated against the NodeNorm and NameRes services. + +Supported assertion types are registered in ASSERTION_HANDLERS. To see everything +that is currently supported, scan that dict or read assertions/README.md (auto-generated). + +Adding a new assertion type +--------------------------- +1. Create a subclass of NodeNormTest or NameResTest (or AssertionHandler for both) + in the appropriate module (nodenorm.py, nameres.py, or common.py). +2. Set NAME and DESCRIPTION class attributes. +3. Set PARAMETERS, WIKI_EXAMPLES, and YAML_PARAMS class attributes for documentation. +4. Override test_param_set(). +5. Import it here and add an instance to ASSERTION_HANDLERS. +6. Run `uv run python -m src.babel_validation.assertions.gen_docs` to regenerate README.md. +""" + +from typing import Iterator + +from src.babel_validation.core.testrow import TestResult, TestStatus + + +class AssertionHandler: + """Base class for all BabelTest assertion handlers.""" + NAME: str # lowercase assertion name as used in issue bodies + DESCRIPTION: str # one-line human-readable description + + def passed(self, message: str) -> TestResult: + return TestResult(status=TestStatus.Passed, message=message) + + def failed(self, message: str) -> TestResult: + return TestResult(status=TestStatus.Failed, message=message) + + def test_with_nodenorm(self, param_sets: list[list[str]], nodenorm, + label: str = "") -> Iterator[TestResult]: + """Evaluate this assertion against NodeNorm. Returns [] if not applicable.""" + return [] + + def test_with_nameres(self, param_sets: list[list[str]], nodenorm, nameres, + pass_if_found_in_top: int = 5, + label: str = "") -> Iterator[TestResult]: + """Evaluate this assertion against NameRes. Returns [] if not applicable.""" + return [] + + +class NodeNormTest(AssertionHandler): + """Base class for assertions that test NodeNorm. + + Subclasses implement test_param_set() instead of test_with_nodenorm(). + """ + + def test_with_nodenorm(self, param_sets: list[list[str]], nodenorm, + label: str = "") -> Iterator[TestResult]: + if not param_sets: + yield self.failed(f"No parameters provided in {label}") + return + # warm the cache for all CURIEs up front + nodenorm.normalize_curies([p for params in param_sets for p in params]) + found = False + for index, params in enumerate(param_sets): + if not params: + yield self.failed(f"No parameters in param_set {index} in {label}") + found = True + continue + for result in self.test_param_set(params, nodenorm, label): + found = True + yield result + if not found: + yield self.failed(f"No test results returned in {label}") + + def test_param_set(self, params: list[str], nodenorm, label: str = "") -> Iterator[TestResult]: + """Override this to implement the assertion. Called once per param_set.""" + raise NotImplementedError + + def resolved_message(self, curie: str, result: dict, nodenorm) -> str: + """Standard pass-message when a CURIE resolves.""" + return (f"Resolved {curie} to {result['id']['identifier']} " + f"({result['type'][0]}, \"{result['id']['label']}\") " + f"with NodeNormalization service {nodenorm}") + + +class NameResTest(AssertionHandler): + """Base class for assertions that test NameRes. + + Subclasses implement test_param_set() instead of test_with_nameres(). + """ + + def test_with_nameres(self, param_sets: list[list[str]], nodenorm, nameres, + pass_if_found_in_top: int = 5, + label: str = "") -> Iterator[TestResult]: + if not param_sets: + yield self.failed(f"No parameters provided in {label}") + return + found = False + for index, params in enumerate(param_sets): + if not params: + yield self.failed(f"No parameters in param_set {index} in {label}") + found = True + continue + for result in self.test_param_set(params, nodenorm, nameres, pass_if_found_in_top, label): + found = True + yield result + if not found: + yield self.failed(f"No test results returned in {label}") + + def test_param_set(self, params: list[str], nodenorm, nameres, + pass_if_found_in_top: int, label: str = "") -> Iterator[TestResult]: + """Override this to implement the assertion. Called once per param_set.""" + raise NotImplementedError + + +# Registry — import submodules after base classes are defined to avoid circular imports. +from src.babel_validation.assertions.nodenorm import ( # noqa: E402 + ResolvesHandler, DoesNotResolveHandler, ResolvesWithHandler, ResolvesWithTypeHandler, +) +from src.babel_validation.assertions.nameres import SearchByNameHandler # noqa: E402 +from src.babel_validation.assertions.common import NeededHandler # noqa: E402 + +ASSERTION_HANDLERS: dict[str, AssertionHandler] = { + h.NAME: h for h in [ + ResolvesHandler(), + DoesNotResolveHandler(), + ResolvesWithHandler(), + ResolvesWithTypeHandler(), + SearchByNameHandler(), + NeededHandler(), + ] +} diff --git a/src/babel_validation/assertions/common.py b/src/babel_validation/assertions/common.py new file mode 100644 index 0000000..ac12dac --- /dev/null +++ b/src/babel_validation/assertions/common.py @@ -0,0 +1,16 @@ +from src.babel_validation.assertions import AssertionHandler + + +class NeededHandler(AssertionHandler): + """Placeholder assertion indicating that a test still needs to be written for this issue.""" + NAME = "needed" + DESCRIPTION = "Marks an issue as needing a test — always fails as a reminder to add real assertions." + PARAMETERS = "" + WIKI_EXAMPLES = ["{{BabelTest|Needed}}"] + YAML_PARAMS = " - placeholder" + + def test_with_nodenorm(self, param_sets, nodenorm, label=""): + yield self.failed("Test needed for issue") + + def test_with_nameres(self, param_sets, nodenorm, nameres, pass_if_found_in_top=5, label=""): + yield self.failed("Test needed for issue") diff --git a/src/babel_validation/assertions/gen_docs.py b/src/babel_validation/assertions/gen_docs.py new file mode 100644 index 0000000..c2a5ea2 --- /dev/null +++ b/src/babel_validation/assertions/gen_docs.py @@ -0,0 +1,133 @@ +"""Generate assertions/README.md from handler class attributes. + +Run: + uv run python -m src.babel_validation.assertions.gen_docs +""" + +from pathlib import Path + +from src.babel_validation.assertions import ( + ASSERTION_HANDLERS, AssertionHandler, NodeNormTest, NameResTest, +) + +README_PATH = Path(__file__).parent / "README.md" + +INTRO = """\ + + +# BabelTest Assertion Types + +This package defines the assertion types that can be embedded in GitHub issue bodies and evaluated against the NodeNorm and NameRes services. + +## Embedding Tests in Issues + +Two syntaxes are supported: + +**Wiki syntax** (one assertion per line): +``` +{{BabelTest|AssertionType|param1|param2|...}} +``` + +**YAML syntax** (multiple assertions, multiple param sets): +```` +```yaml +babel_tests: + AssertionType: + - param1 + - [param1, param2] +``` +```` + +Assertion names are case-insensitive. + +--- +""" + +ADDING_NEW = """\ +## Adding a New Assertion Type + +1. Choose the right module: + - `nodenorm.py` — for NodeNorm-only assertions (subclass `NodeNormTest`, override `test_param_set`) + - `nameres.py` — for NameRes-only assertions (subclass `NameResTest`, override `test_param_set`) + - `common.py` — for assertions that apply to both services (subclass `AssertionHandler`, override `test_with_nodenorm` and/or `test_with_nameres`) + +2. Define the class with `NAME`, `DESCRIPTION`, `PARAMETERS`, `WIKI_EXAMPLES`, `YAML_PARAMS`, and `test_param_set()` (or both `test_with_*` methods for `AssertionHandler` subclasses). + +3. Import it in `__init__.py` and add an instance to `ASSERTION_HANDLERS`. + +4. Run `uv run python -m src.babel_validation.assertions.gen_docs` to regenerate `README.md`. +""" + +_GROUP_HEADERS: dict[str, str] = { + "NodeNorm": ( + "## NodeNorm Assertions\n\n" + "These assertions test the [NodeNorm](https://nodenorm.transltr.io/docs) service." + ), + "NameRes": ( + "## NameRes Assertions\n\n" + "These assertions test the [NameRes](https://name-lookup.transltr.io/docs) service." + ), + "NodeNorm and NameRes": "## Special Assertions", +} + + +def _display_name(h: AssertionHandler) -> str: + return type(h).__name__.removesuffix("Handler") + + +def _applies_to(h: AssertionHandler) -> str: + if isinstance(h, NodeNormTest): + return "NodeNorm" + if isinstance(h, NameResTest): + return "NameRes" + return "NodeNorm and NameRes" + + +def _render_handler(h: AssertionHandler) -> str: + name = _display_name(h) + service = _applies_to(h) + description = getattr(h, "DESCRIPTION", "") + parameters = getattr(h, "PARAMETERS", "") + wiki_examples = getattr(h, "WIKI_EXAMPLES", []) + yaml_params = getattr(h, "YAML_PARAMS", "") + + parts = [] + parts.append(f"### {name}\n") + parts.append(f"**Applies to:** {service}\n") + parts.append(f"{description}\n") + + if parameters: + parts.append(f"**Parameters:** {parameters}\n") + + wiki_block = "\n".join(wiki_examples) + parts.append(f"**Wiki syntax:**\n```\n{wiki_block}\n```\n") + + parts.append( + f"**YAML syntax:**\n```yaml\nbabel_tests:\n {name}:\n{yaml_params}\n```\n" + ) + + parts.append("---\n") + + return "\n".join(parts) + + +def generate_readme() -> str: + sections = [INTRO] + seen_groups: set[str] = set() + + for h in ASSERTION_HANDLERS.values(): + service = _applies_to(h) + if service not in seen_groups: + seen_groups.add(service) + sections.append(_GROUP_HEADERS[service] + "\n") + sections.append(_render_handler(h)) + + sections.append(ADDING_NEW) + return "\n".join(sections) + + +if __name__ == "__main__": + content = generate_readme() + README_PATH.write_text(content, encoding="utf-8") + print(f"Written to {README_PATH}") diff --git a/src/babel_validation/assertions/nameres.py b/src/babel_validation/assertions/nameres.py new file mode 100644 index 0000000..ce1b25d --- /dev/null +++ b/src/babel_validation/assertions/nameres.py @@ -0,0 +1,59 @@ +import json +from typing import Iterator + +from src.babel_validation.assertions import NameResTest +from src.babel_validation.core.testrow import TestResult +from src.babel_validation.services.nameres import CachedNameRes +from src.babel_validation.services.nodenorm import CachedNodeNorm + + +class SearchByNameHandler(NameResTest): + """Test that a name search returns an expected CURIE in the top-N results in NameRes.""" + NAME = "searchbyname" + DESCRIPTION = ( + "Each param_set must have at least two elements: a search query string and an expected CURIE. " + "The test passes if the CURIE's normalized identifier appears within the top N results " + "(default N=5) when NameRes looks up the search query." + ) + PARAMETERS = ( + "Each param_set: the **search query string** and the **expected CURIE**. " + "The CURIE is normalized via NodeNorm (drug/chemical conflation enabled) before matching." + ) + WIKI_EXAMPLES = ["{{BabelTest|SearchByName|water|CHEBI:15377}}"] + YAML_PARAMS = " - [water, CHEBI:15377]\n - [diabetes, MONDO:0005015]" + + def test_param_set(self, params: list[str], nodenorm: CachedNodeNorm, + nameres: CachedNameRes, pass_if_found_in_top: int = 5, + label: str = "") -> Iterator[TestResult]: + if len(params) < 2: + yield self.failed(f"Two parameters expected for SearchByName in {label}, but params = {params}") + return + + [search_query, expected_curie_from_test, *args] = params + expected_curie_result = nodenorm.normalize_curie(expected_curie_from_test, drug_chemical_conflate='true') + if not expected_curie_result: + yield self.failed(f"Unable to normalize CURIE {expected_curie_from_test} in {label}") + return + + expected_curie = expected_curie_result['id']['identifier'] + expected_curie_label = expected_curie_result['id']['label'] + expected_curie_string = f"Expected CURIE {expected_curie_from_test}, normalized to {expected_curie} '{expected_curie_label}'" + + # Search for the expected CURIE in the first {pass_if_found_in_top} results. + results = nameres.lookup(search_query, autocomplete='false', limit=(2 * pass_if_found_in_top)) + if not results: + yield self.failed(f"No results found for '{search_query}' on NameRes {nameres} ({expected_curie_string}") + return + + curies = [result['curie'] for result in results] + try: + found_index = curies.index(expected_curie) + except ValueError: + print(f"{expected_curie_string} not found when searching for '{search_query}' in NameRes {nameres}: {json.dumps(results, indent=2, sort_keys=True)}") + yield self.failed(f"{expected_curie_string} not found when searching for '{search_query}' in NameRes {nameres}") + return + + if found_index <= pass_if_found_in_top: + yield self.passed(f"{expected_curie_string} found at index {found_index + 1} on NameRes {nameres}") + else: + yield self.failed(f"{expected_curie_string} found at index {found_index + 1} which is greater than {pass_if_found_in_top} on NameRes {nameres}") diff --git a/src/babel_validation/assertions/nodenorm.py b/src/babel_validation/assertions/nodenorm.py new file mode 100644 index 0000000..870f68d --- /dev/null +++ b/src/babel_validation/assertions/nodenorm.py @@ -0,0 +1,116 @@ +import json +from typing import Iterator + +from src.babel_validation.assertions import NodeNormTest +from src.babel_validation.core.testrow import TestResult +from src.babel_validation.services.nodenorm import CachedNodeNorm + + +class ResolvesHandler(NodeNormTest): + """Test that every CURIE in every param_set resolves in NodeNorm.""" + NAME = "resolves" + DESCRIPTION = "Each CURIE in each param_set must resolve to a non-null result in NodeNorm." + PARAMETERS = "One or more CURIEs per param_set." + WIKI_EXAMPLES = [ + "{{BabelTest|Resolves|CHEBI:15365}}", + "{{BabelTest|Resolves|MONDO:0005015|DOID:9351}}", + ] + YAML_PARAMS = " - CHEBI:15365\n - [MONDO:0005015, DOID:9351]" + + def test_param_set(self, params: list[str], nodenorm: CachedNodeNorm, + label: str = "") -> Iterator[TestResult]: + for curie in params: + result = nodenorm.normalize_curie(curie) + if not result: + yield self.failed(f"Could not resolve {curie} with NodeNormalization service {nodenorm}") + else: + yield self.passed(self.resolved_message(curie, result, nodenorm)) + + +class DoesNotResolveHandler(NodeNormTest): + """Test that every CURIE in every param_set does NOT resolve in NodeNorm.""" + NAME = "doesnotresolve" + DESCRIPTION = ( + "Each CURIE in each param_set must fail to resolve (return null) in NodeNorm. " + "Use this to confirm that an identifier is intentionally not normalizable." + ) + PARAMETERS = "One or more CURIEs per param_set." + WIKI_EXAMPLES = ["{{BabelTest|DoesNotResolve|FAKENS:99999}}"] + YAML_PARAMS = " - FAKENS:99999" + + def test_param_set(self, params: list[str], nodenorm: CachedNodeNorm, + label: str = "") -> Iterator[TestResult]: + for curie in params: + result = nodenorm.normalize_curie(curie) + if not result: + yield self.passed(f"Could not resolve {curie} with NodeNormalization service {nodenorm} as expected") + else: + yield self.failed(f"Resolved {curie} to {result['id']['identifier']} ({result['type'][0]}, \"{result['id']['label']}\") with NodeNormalization service {nodenorm}, but expected not to resolve") + + +class ResolvesWithHandler(NodeNormTest): + """Test that all CURIEs in a param_set resolve to the same normalized result in NodeNorm.""" + NAME = "resolveswith" + DESCRIPTION = ( + "All CURIEs within each param_set must resolve to the identical normalized result. " + "Use this to assert that two identifiers are equivalent." + ) + PARAMETERS = "Two or more CURIEs per param_set. All must resolve to the same result." + WIKI_EXAMPLES = ["{{BabelTest|ResolvesWith|CHEBI:15365|PUBCHEM.COMPOUND:1}}"] + YAML_PARAMS = " - [CHEBI:15365, PUBCHEM.COMPOUND:1]\n - [MONDO:0005015, DOID:9351]" + + def test_param_set(self, params: list[str], nodenorm: CachedNodeNorm, + label: str = "") -> Iterator[TestResult]: + results = nodenorm.normalize_curies(params) + + # Find the first good result. + first_good_result = None + for curie, result in results.items(): + if result is not None and first_good_result is None: + first_good_result = result + break + + if first_good_result is None: + yield self.failed(f"None of the CURIEs {params} could be resolved on {nodenorm}") + return + + # Check all the results. + for curie, result in results.items(): + if result is None: + yield self.failed(f"CURIE {curie} could not be resolved, and so is not equal to the expected result {json.dumps(first_good_result, indent=2, sort_keys=True)} on {nodenorm}") + elif json.dumps(first_good_result, sort_keys=True) == json.dumps(result, sort_keys=True): + yield self.passed(f"Resolved {curie} to the expected result {json.dumps(first_good_result, indent=2, sort_keys=True)}") + else: + yield self.failed(f"Resolved {curie} to {result['id']['identifier']} ({result['type'][0]}, \"{result['id']['label']}\"), but expected {first_good_result['id']['identifier']} ({first_good_result['type'][0]}, \"{first_good_result['id']['label']}\") on {nodenorm}") + + +class ResolvesWithTypeHandler(NodeNormTest): + """Test that CURIEs resolve with a specific Biolink type in NodeNorm.""" + NAME = "resolveswithtype" + DESCRIPTION = ( + "Each param_set must have at least two elements: the first is the expected Biolink type " + "(e.g. 'biolink:Gene'), and the remainder are CURIEs that must resolve with that type." + ) + PARAMETERS = ( + "Each param_set: first element is the expected Biolink type (e.g. `biolink:Gene`), " + "remaining elements are CURIEs." + ) + WIKI_EXAMPLES = ["{{BabelTest|ResolvesWithType|biolink:Gene|NCBIGene:1}}"] + YAML_PARAMS = " - [biolink:Gene, NCBIGene:1, HGNC:5]" + + def test_param_set(self, params: list[str], nodenorm: CachedNodeNorm, + label: str = "") -> Iterator[TestResult]: + if len(params) < 2: + yield self.failed(f"Too few parameters provided in param_set in {label}: {params}") + return + + expected_biolink_type = params[0] + curies = params[1:] + + results = nodenorm.normalize_curies(curies) + for curie in curies: + biolink_types = results[curie]['type'] + if expected_biolink_type in biolink_types: + yield self.passed(f"Biolink types {biolink_types} for CURIE {curie} includes expected Biolink type {expected_biolink_type}") + else: + yield self.failed(f"Biolink types {biolink_types} for CURIE {curie} does not include expected Biolink type {expected_biolink_type}") diff --git a/src/babel_validation/core/__init__.py b/src/babel_validation/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/babel_validation/core/testrow.py b/src/babel_validation/core/testrow.py new file mode 100644 index 0000000..35cd1ca --- /dev/null +++ b/src/babel_validation/core/testrow.py @@ -0,0 +1,73 @@ +from dataclasses import dataclass +from enum import Enum + + +@dataclass(frozen=True) +class TestRow: + """ + A TestRow models a single row from a GoogleSheet. + """ + Category: str + ExpectPassInNodeNorm: bool + ExpectPassInNameRes: bool + Flags: set[str] + QueryLabel: str + PreferredLabel: str + AdditionalLabels: list[str] + QueryID: str + PreferredID: str + AdditionalIDs: list[str] + Conflations: set[str] + BiolinkClasses: set[str] + Prefixes: set[str] + Source: str + SourceURL: str + Notes: str + + # Mark as not a test despite starting with Test*. + __test__ = False + + # A string representation of this test row. + def __str__(self): + return f"TestRow of category {self.Category} for preferred {self.PreferredID} ({self.PreferredLabel}) with " + \ + f"query {self.QueryID} ({self.QueryLabel}) from source {self.Source} ({self.SourceURL})" + + + @staticmethod + def from_data_row(row): + return TestRow( + Category=row.get('Category', ''), + ExpectPassInNodeNorm=row.get('Passes in NodeNorm', '').strip().lower() == 'y', + ExpectPassInNameRes=row.get('Passes in NameRes', '').strip().lower() == 'y', + Flags=set(row.get('Flags', '').split('|')), + QueryLabel=row.get('Query Label', ''), + QueryID=row.get('Query ID', ''), + PreferredID=row.get('Preferred ID', ''), + AdditionalIDs=row.get('Additional IDs', '').split('|'), + PreferredLabel=row.get('Preferred Label', ''), + AdditionalLabels=row.get('Additional Labels', '').split('|'), + Conflations=set(row.get('Conflations', '').split('|')), + BiolinkClasses=set(row.get('Biolink Classes', '').split('|')), + Prefixes=set(row.get('Prefixes', '').split('|')), + Source=row.get('Source', ''), + SourceURL=row.get('Source URL', ''), + Notes=row.get('Notes', '') + ) + +class TestStatus(Enum): + Passed = "pass" + Failed = "fail" + Skipped = "skip" + + # Mark as not a test despite starting with Test*. + __test__ = False + +@dataclass +class TestResult: + status: TestStatus + message: str = "" + github_issue_test: 'GitHubIssueTest' = None + + # Mark as not a test despite starting with Test*. + __test__ = False + diff --git a/src/babel_validation/services/__init__.py b/src/babel_validation/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/babel_validation/services/nameres.py b/src/babel_validation/services/nameres.py new file mode 100644 index 0000000..5dcee5c --- /dev/null +++ b/src/babel_validation/services/nameres.py @@ -0,0 +1,72 @@ +import logging +import time + +import requests + +cached_nameres_by_url = {} + +class CachedNameRes: + # TODO: actually cache once we've implemented a param-based cache. + def __init__(self, nameres_url: str): + self.nameres_url = nameres_url + self.logger = logging.getLogger(str(self)) + self.cache = {} + + def __str__(self): + return f"CachedNameRes({self.nameres_url})" + + @staticmethod + def from_url(nameres_url: str) -> 'CachedNameRes': + if nameres_url not in cached_nameres_by_url: + cached_nameres_by_url[nameres_url] = CachedNameRes(nameres_url) + return cached_nameres_by_url[nameres_url] + + def bulk_lookup(self, queries: list[str], **params) -> dict[str, dict]: + if not queries: + raise ValueError(f"queries must not be empty when calling bulk_lookup({queries}, {params}) on {self}") + if not isinstance(queries, list): + raise ValueError(f"queries must be a list when calling normalize_curies({queries}, {params}) on {self}") + + time_started = time.time_ns() + queries_set = set(queries) + cached_queries = queries_set & self.cache.keys() + queries_to_be_queried = queries_set - cached_queries + + # Make query. + result = {} + if queries_to_be_queried: + params['strings'] = list(queries_to_be_queried) + + self.logger.debug(f"Called NameRes {self} with params {params}") + response = requests.post(self.nameres_url + "bulk-lookup", json=params) + response.raise_for_status() + result = response.json() + + for query in queries_to_be_queried: + self.cache[query] = result.get(query, None) + + for query in cached_queries: + result[query] = self.cache[query] + + time_taken_sec = (time.time_ns() - time_started) / 1E9 + self.logger.info(f"Looked up {len(queries_to_be_queried)} queries {queries_to_be_queried} (with {len(cached_queries)} queries cached) with params {params} on {self} in {time_taken_sec:.3f}s") + + return result + + def lookup(self, query, **params): + if query in self.cache: + return self.cache[query] + + params['string'] = query + self.logger.debug(f"Querying NameRes with params {params}") + + response = requests.post(self.nameres_url + "lookup", params=params) + response.raise_for_status() + result = response.json() + + self.cache[query] = result + return result + + def delete_query(self, query): + if query in self.cache: + del self.cache[query] diff --git a/src/babel_validation/services/nodenorm.py b/src/babel_validation/services/nodenorm.py new file mode 100644 index 0000000..88a3353 --- /dev/null +++ b/src/babel_validation/services/nodenorm.py @@ -0,0 +1,64 @@ +import logging +import time + +import requests + +cached_node_norms_by_url = {} + +class CachedNodeNorm: + def __init__(self, nodenorm_url: str): + self.nodenorm_url = nodenorm_url + self.logger = logging.getLogger(str(self)) + self.cache = {} + + def __str__(self): + return f"CachedNodeNorm({self.nodenorm_url})" + + @staticmethod + def from_url(nodenorm_url: str) -> 'CachedNodeNorm': + if nodenorm_url not in cached_node_norms_by_url: + cached_node_norms_by_url[nodenorm_url] = CachedNodeNorm(nodenorm_url) + return cached_node_norms_by_url[nodenorm_url] + + def normalize_curies(self, curies: list[str], **params) -> dict[str, dict]: + # TODO: eventually we'll need some way to cache the parameters along with the curie. + if not curies: + raise ValueError(f"curies must not be empty when calling normalize_curies({curies}, {params}) on {self}") + if not isinstance(curies, list): + raise ValueError(f"curies must be a list when calling normalize_curies({curies}, {params}) on {self}") + + time_started = time.time_ns() + curies_set = set(curies) + cached_curies = curies_set & self.cache.keys() + curies_to_be_queried = curies_set - cached_curies + + # Make query. + result = {} + if curies_to_be_queried: + params['curies'] = list(curies_to_be_queried) + + self.logger.debug(f"Called NodeNorm {self} with params {params}") + response = requests.post(self.nodenorm_url + "get_normalized_nodes", json=params) + response.raise_for_status() + result = response.json() + + for curie in curies_to_be_queried: + self.cache[curie] = result.get(curie, None) + + for curie in cached_curies: + result[curie] = self.cache[curie] + + time_taken_sec = (time.time_ns() - time_started) / 1E9 + self.logger.info(f"Normalizing {len(curies_to_be_queried)} CURIEs {curies_to_be_queried} (with {len(cached_curies)} CURIEs cached) with params {params} on {self} in {time_taken_sec:.3f}s") + + return result + + def normalize_curie(self, curie, **params): + if curie in self.cache: + return self.cache[curie] + return self.normalize_curies([curie], **params)[curie] + + def clear_curie(self, curie): + # This will be needed if you need to call a CURIE with different parameters. + if curie in self.cache: + del self.cache[curie] diff --git a/src/babel_validation/sources/__init__.py b/src/babel_validation/sources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/babel_validation/sources/github/__init__.py b/src/babel_validation/sources/github/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/babel_validation/sources/github/github_issues_test_cases.py b/src/babel_validation/sources/github/github_issues_test_cases.py new file mode 100644 index 0000000..bec4d50 --- /dev/null +++ b/src/babel_validation/sources/github/github_issues_test_cases.py @@ -0,0 +1,220 @@ +import json +import logging +import re +from typing import Iterator + +import yaml + +from github import Github, Auth, Issue +from tqdm import tqdm + +from src.babel_validation.assertions import ASSERTION_HANDLERS +from src.babel_validation.core.testrow import TestResult +from src.babel_validation.services.nameres import CachedNameRes +from src.babel_validation.services.nodenorm import CachedNodeNorm + + +class GitHubIssueTest: + def __init__(self, github_issue_id: str, github_issue: Issue.Issue, assertion: str, param_sets: list[list[str]] = None): + self.github_issue = github_issue + self.assertion = assertion + if param_sets is None: + param_sets = [] + self.param_sets = param_sets + if not isinstance(self.param_sets, list): + raise ValueError(f"param_sets must be a list when creating a GitHubIssueTest({self.github_issue}, {self.assertion}, {self.param_sets})") + + self.github_issue_id = github_issue_id + + self.logger = logging.getLogger(str(self)) + self.logger.info(f"Creating GitHubIssueTest for {github_issue.html_url} {assertion}({param_sets})") + + def __str__(self): + return f"{self.github_issue_id}: {self.assertion}({len(self.param_sets)} param sets: {json.dumps(self.param_sets)})" + + def test_with_nodenorm(self, nodenorm: CachedNodeNorm) -> Iterator[TestResult]: + handler = ASSERTION_HANDLERS.get(self.assertion.lower()) + if handler is None: + raise ValueError(f"Unknown assertion type for {self}: {self.assertion}") + return handler.test_with_nodenorm(self.param_sets, nodenorm, label=str(self)) + + def test_with_nameres(self, nodenorm: CachedNodeNorm, nameres: CachedNameRes, pass_if_found_in_top=5) -> Iterator[TestResult]: + handler = ASSERTION_HANDLERS.get(self.assertion.lower()) + if handler is None: + raise ValueError(f"Unknown assertion type: {self.assertion}") + return handler.test_with_nameres(self.param_sets, nodenorm, nameres, pass_if_found_in_top, label=str(self)) + + +class GitHubIssuesTestCases: + """ + The idea here is to allow test cases to be efficiently embedded within GitHub issues, to test them + regularly, and to provide a list of cases where either: + - An open issue has test cases that are now passing (and so should be updated or maybe even closed). + - A closed issue has test cases that are now failing (and so should be reopened). + """ + + def __init__(self, github_token: str, github_repositories=None): + """ + Create a GitHubIssuesTestCase object. + + Requires a GitHub authentication token. You can generate a personal authentication token + at https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#about-personal-access-tokens, + or you can read the GITHUB_TOKEN during a GitHub Action (https://docs.github.com/en/actions/tutorials/authenticate-with-github_token). + + :param github_token: A GitHub authentication to use for making these queries. + :param github_repositories: A list of GitHub repositories to pull issues from, specified as 'organization/repo'. + """ + self.github_token = github_token + if not self.github_token or self.github_token.strip() == '': + raise ValueError("No GitHub authentication token provided.") + + self.github = Github(auth=Auth.Token(self.github_token)) + self.logger = logging.getLogger(self.__class__.__name__) + self.logger.info("Set up GitHub object ({self.github})") + + if github_repositories is None: + github_repositories = [ + 'NCATSTranslator/Babel', # https://github.com/NCATSTranslator/Babel + 'NCATSTranslator/NodeNormalization', # https://github.com/NCATSTranslator/NodeNormalization + 'NCATSTranslator/NameResolution', # https://github.com/NCATSTranslator/NameResolution + 'TranslatorSRI/babel-validation', # https://github.com/TranslatorSRI/babel-validation + ] + self.github_repositories = github_repositories + + # Prepare regular expressions. + self.babeltest_pattern = re.compile(r'{{BabelTest\|.*?}}') + self.babeltest_yaml_pattern = re.compile(r'```yaml\s+babel_tests:\s+.*?\s+```', re.DOTALL) + + def get_test_issues_from_issue(self, github_issue: Issue.Issue) -> list[GitHubIssueTest]: + """ + Extract test rows from a single GitHub issue. + + Two syntaxes are supported: + - Wiki syntax: {{BabelTest|AssertionType|param1|param2|...}} + - YAML syntax: + + ```yaml + babel_tests: + assertion: + - param1 + - ['param1', 'param2'] + ``` + + For the full list of supported assertion types and their parameters, see + src/babel_validation/assertions/README.md or inspect ASSERTION_HANDLERS.keys(). + + :param github_issue: A single GitHub issue to extract test cases from. + :return: A list of GitHubIssueTest objects found in the issue body. + """ + + github_issue_id = f"{github_issue.repository.full_name}#{github_issue.number}" + self.logger.debug(f"Looking for tests in issue {github_issue_id}: {github_issue.title} ({str(github_issue.state)}, {github_issue.html_url})") + + # Is there an issue body at all? + if not github_issue.body or github_issue.body.strip() == '': + return [] + + # Look for BabelTest syntax. + testrows = [] + + babeltest_matches = re.findall(self.babeltest_pattern, github_issue.body) + if babeltest_matches: + for match in babeltest_matches: + self.logger.info(f"Found BabelTest in issue {github_issue_id}: {match}") + + # Figure out parameters. + test_string = match + if test_string.startswith("{{BabelTest|"): + test_string = test_string[12:] + if test_string.endswith("}}"): + test_string = test_string[:-2] + params = test_string.split("|") + if len(params) < 2: + raise ValueError(f"Too few parameters found in BabelTest in issue {github_issue_id}: {match}") + else: + testrows.append(GitHubIssueTest(github_issue_id, github_issue, params[0], [params[1:]])) + + babeltest_yaml_matches = re.findall(self.babeltest_yaml_pattern, github_issue.body) + if babeltest_yaml_matches: + for match in babeltest_yaml_matches: + self.logger.info(f"Found BabelTest YAML in issue {github_issue_id}: {match}") + + # Parse string as YAML. + if match.startswith("```yaml"): + match = match[7:] + if match.endswith("```"): + match = match[:-3] + yaml_dict = yaml.safe_load(match) + + for assertion, original_param_sets in yaml_dict['babel_tests'].items(): + param_sets = [] + for param_set in original_param_sets: + if isinstance(param_set, str): + param_sets.append([param_set]) + elif isinstance(param_set, list): + param_sets.append(param_set) + else: + raise RuntimeError(f"Unknown parameter set type {param_set} in issue {github_issue_id}") + testrows.append(GitHubIssueTest(github_issue_id, github_issue, assertion, param_sets)) + + return testrows + + def issue_has_tests(self, issue: Issue.Issue) -> bool: + """Quick regex check to see if an issue body contains any BabelTest syntax.""" + if not issue.body or issue.body.strip() == '': + return False + return bool(self.babeltest_pattern.search(issue.body) or + self.babeltest_yaml_pattern.search(issue.body)) + + def get_issues_by_ids(self, issue_ids: list[str]) -> list[Issue.Issue]: + """ + Fetch specific GitHub issues by their ID strings, supporting three formats: + - 'org/repo#N' → direct fetch from that repo + - 'repo#N' → search self.github_repositories for matching repo name + - 'N' → fetch #N from all configured repositories + """ + from github import GithubException + issues = [] + for issue_id in issue_ids: + if m := re.match(r'^([^/]+)/([^#]+)#(\d+)$', issue_id): + # org/repo#N + issue = self.github.get_repo(f"{m.group(1)}/{m.group(2)}").get_issue(int(m.group(3))) + issues.append(issue) + elif m := re.match(r'^([^/#]+)#(\d+)$', issue_id): + # repo#N — find repo in configured list + repo_name, num = m.group(1), int(m.group(2)) + for full_repo in self.github_repositories: + if full_repo.split('/')[1] == repo_name: + issues.append(self.github.get_repo(full_repo).get_issue(num)) + break + elif m := re.match(r'^(\d+)$', issue_id): + # N — try all configured repos + num = int(m.group(1)) + for full_repo in self.github_repositories: + try: + issues.append(self.github.get_repo(full_repo).get_issue(num)) + except GithubException: + pass + return issues + + def get_all_issues(self, github_repositories = None) -> Iterator[Issue.Issue]: + """ + Get a list of test rows from one or more repositories. + + :param github_repositories: A list of GitHub repositories to search for test cases. If none is provided, + we default to the list specified when creating this GitHubIssuesTestCases class. + :return: A list of TestRows to process. + """ + if github_repositories is None: + github_repositories = self.github_repositories + + for repo_id in github_repositories: + self.logger.info(f"Looking up issues in GitHub repository {repo_id}") + repo = self.github.get_repo(repo_id, lazy=True) + + issue_count = 0 + for issue in tqdm(repo.get_issues(state='all', sort='updated'), desc=f"Processing issues in {repo_id}"): + issue_count += 1 + yield issue + + self.logger.info(f"Found {issue_count} issues in GitHub repository {repo_id}") diff --git a/src/babel_validation/sources/google_sheets/__init__.py b/src/babel_validation/sources/google_sheets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/babel_validation/sources/google_sheets/blocklist.py b/src/babel_validation/sources/google_sheets/blocklist.py new file mode 100644 index 0000000..6e349d4 --- /dev/null +++ b/src/babel_validation/sources/google_sheets/blocklist.py @@ -0,0 +1,66 @@ +import csv +import io +from dataclasses import dataclass +from typing import Optional + +import requests + + +@dataclass(frozen=True) +class BlocklistEntry: + """ + A single Blocklist entry. + """ + Query: Optional[str] = None + CURIE: Optional[str] = None + Blocked: str = None + Status: str = None + Issue: str = None + TreatsOnly: str = None + Submitter: str = None + Comment: str = None + + def is_blocked(self): + """ Is this term supposed to be blocked? """ + if self.Blocked is not None and self.Blocked == 'y': + return True + return False + + @staticmethod + def from_gsheet_dict(row): + """ + Given a dictionary from a row in Google Sheets, fill in the necessary fields. + + :return: A BlocklistEntry with the filled in fields. + """ + return BlocklistEntry( + Query=row.get('String (optional)', None), + CURIE=row.get('CURIE (optional)', None), + Blocked=row['Blocked?'], + Status=row['Status (Feb 21, 2024)'], + Issue=row['Blocklist issue'], + TreatsOnly=row['Block for "treats" only?'], + Submitter=row['Submitter'], + Comment=row['Comment (optional)'], + ) + + +def load_blocklist_from_gsheet(): + """ + Load the Blocklist from a Google Sheet. + + :return: A list of BlocklistEntry. + """ + google_sheet_id = '1UR2eplHBvFRwaSIVOhlB44wpfNPY1z7AVzUkqzDqIWA' + csv_url = f"https://docs.google.com/spreadsheets/d/{google_sheet_id}/gviz/tq?tqx=out:csv&sheet=Tests" + + response = requests.get(csv_url) + csv_content = response.text + + rows = [] + with io.StringIO(csv_content) as f: + reader = csv.DictReader(f) + for row in reader: + rows.append(BlocklistEntry.from_gsheet_dict(row)) + + return rows diff --git a/tests/common/google_sheet_test_cases.py b/src/babel_validation/sources/google_sheets/google_sheet_test_cases.py similarity index 57% rename from tests/common/google_sheet_test_cases.py rename to src/babel_validation/sources/google_sheets/google_sheet_test_cases.py index aecdda9..af076a7 100644 --- a/tests/common/google_sheet_test_cases.py +++ b/src/babel_validation/sources/google_sheets/google_sheet_test_cases.py @@ -3,66 +3,18 @@ # # This library contains classes and methods for accessing those test cases. import csv +import hashlib import io -from dataclasses import dataclass +import tempfile from collections import Counter +from pathlib import Path import pytest import requests from _pytest.mark import ParameterSet +from filelock import FileLock - -@dataclass(frozen=True) -class TestRow: - """ - A TestRow models a single row from a GoogleSheet. - """ - Category: str - ExpectPassInNodeNorm: bool - ExpectPassInNameRes: bool - Flags: set[str] - QueryLabel: str - PreferredLabel: str - AdditionalLabels: list[str] - QueryID: str - PreferredID: str - AdditionalIDs: list[str] - Conflations: set[str] - BiolinkClasses: set[str] - Prefixes: set[str] - Source: str - SourceURL: str - Notes: str - - # Mark as not a test despite starting with TestRow. - __test__ = False - - # A string representation of this test row. - def __str__(self): - return f"TestRow of category {self.Category} for preferred {self.PreferredID} ({self.PreferredLabel}) with " + \ - f"query {self.QueryID} ({self.QueryLabel}) from source {self.Source} ({self.SourceURL})" - - - @staticmethod - def from_data_row(row): - return TestRow( - Category=row.get('Category', ''), - ExpectPassInNodeNorm=row.get('Passes in NodeNorm', '') == 'y', - ExpectPassInNameRes=row.get('Passes in NameRes', '') == 'y', - Flags=set(row.get('Flags', '').split('|')), - QueryLabel=row.get('Query Label', ''), - QueryID=row.get('Query ID', ''), - PreferredID=row.get('Preferred ID', ''), - AdditionalIDs=row.get('Additional IDs', '').split('|'), - PreferredLabel=row.get('Preferred Label', ''), - AdditionalLabels=row.get('Additional Labels', '').split('|'), - Conflations=set(row.get('Conflations', '').split('|')), - BiolinkClasses=set(row.get('Biolink Classes', '').split('|')), - Prefixes=set(row.get('Prefixes', '').split('|')), - Source=row.get('Source', ''), - SourceURL=row.get('Source URL', ''), - Notes=row.get('Notes', '') - ) +from src.babel_validation.core.testrow import TestRow class GoogleSheetTestCases: @@ -80,9 +32,19 @@ def __init__(self, google_sheet_id="11zebx8Qs1Tc3ShQR9nh4HRW8QSoo8k65w_xIaftN0no """ self.google_sheet_id = google_sheet_id - csv_url = f"https://docs.google.com/spreadsheets/d/{google_sheet_id}/gviz/tq?tqx=out:csv&sheet=Tests" - response = requests.get(csv_url) - self.csv_content = response.text + + sheet_hash = hashlib.md5(google_sheet_id.encode()).hexdigest()[:8] + cache_file = Path(tempfile.gettempdir()) / f"babel_validation_gsheet_{sheet_hash}.csv" + lock_file = cache_file.with_suffix(".lock") + + with FileLock(lock_file): + if cache_file.exists(): + self.csv_content = cache_file.read_text(encoding="utf-8") + else: + csv_url = f"https://docs.google.com/spreadsheets/d/{google_sheet_id}/gviz/tq?tqx=out:csv&sheet=Tests" + response = requests.get(csv_url) + self.csv_content = response.text + cache_file.write_text(self.csv_content, encoding="utf-8") self.rows = [] with io.StringIO(self.csv_content) as f: @@ -106,7 +68,8 @@ def has_nonempty_value(d: dict): for count, row in enumerate(self.rows): # Note that count is off by two: presumably one for the header row and one because we count from zero # but Google Sheets counts from one. - row_id = f"{test_id_prefix}:row={count + 2}" + row_count = count + 2 + row_id = f"{test_id_prefix}:row={row_count}" if has_nonempty_value(row): tr = TestRow.from_data_row(row) @@ -118,7 +81,7 @@ def has_nonempty_value(d: dict): trows.append(pytest.param( tr, marks=pytest.mark.xfail( - reason=f"Test row {count + 2} is marked as not expected to pass NodeNorm in the " + reason=f"Test row {row_count} is marked as not expected to pass NodeNorm in the " f"Google Sheet: {tr}", strict=True), id=row_id @@ -131,7 +94,7 @@ def has_nonempty_value(d: dict): trows.append(pytest.param( tr, marks=pytest.mark.xfail( - reason=f"Test row {count + 2} is marked as not expected to pass NameRes in the " + reason=f"Test row {row_count} is marked as not expected to pass NameRes in the " f"Google Sheet: {tr}", strict=True), id=row_id @@ -141,4 +104,4 @@ def has_nonempty_value(d: dict): def categories(self): """ Return a dict of all the categories of tests available with their counts. """ - return Counter(map(lambda t: t.get('Category', ''), self.rows)) \ No newline at end of file + return Counter(map(lambda t: t.get('Category', ''), self.rows)) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py index 2515e22..7de6e13 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,10 @@ # # conftest.py - pytest configuration settings # +import glob +import os import os.path +import tempfile import pytest import configparser @@ -25,6 +28,22 @@ def get_targets_ini_path(config): return config_path +def pytest_configure(config): + # Delete the Google Sheet CSV cache at the start of each run so tests always + # use a fresh download. Only the controller does this — xdist workers skip it + # so they can share the cache file written by the controller. + if not os.environ.get('PYTEST_XDIST_WORKER'): + for f in glob.glob(os.path.join(tempfile.gettempdir(), 'babel_validation_gsheet_*.csv')): + try: + os.unlink(f) + except FileNotFoundError: + pass + try: + os.unlink(os.path.join(tempfile.gettempdir(), 'babel_validation_issues_cache.json')) + except FileNotFoundError: + pass + + def pytest_addoption(parser): # The target environment(s) to target. parser.addoption( @@ -47,6 +66,14 @@ def pytest_addoption(parser): help="The categories of tests to exclude." ) + # The issue option is only used by + parser.addoption( + '--issue', + default=[], + action='append', + help="One or more GitHub issues to test. Should be specified as either 'organization/repo#110', 'repo#110' or '110'" + ) + def read_targets(config_path): cp = configparser.ConfigParser() @@ -119,4 +146,9 @@ def category_test(cat): return False return True - return category_test \ No newline at end of file + return category_test + +# Issue is only used by the GitHub issue tests (tests/github_issues/*) +@pytest.fixture +def selected_github_issues(pytestconfig): + return pytestconfig.getoption('issue') diff --git a/tests/github_issues/conftest.py b/tests/github_issues/conftest.py new file mode 100644 index 0000000..6e9f74d --- /dev/null +++ b/tests/github_issues/conftest.py @@ -0,0 +1,65 @@ +import json +import os +import tempfile +from pathlib import Path + +import dotenv +import pytest +from filelock import FileLock +from github import Issue + +from src.babel_validation.sources.github.github_issues_test_cases import GitHubIssuesTestCases + +dotenv.load_dotenv() +_github_token = os.getenv('GITHUB_TOKEN') +_repos = [ + 'NCATSTranslator/Babel', + 'NCATSTranslator/NodeNormalization', + 'NCATSTranslator/NameResolution', + 'TranslatorSRI/babel-validation', +] +github_issues_test_cases = GitHubIssuesTestCases(_github_token, _repos) + +_CACHE_FILE = Path(tempfile.gettempdir()) / "babel_validation_issues_cache.json" +_LOCK_FILE = _CACHE_FILE.with_suffix(".lock") + + +def _issue_id(issue: Issue.Issue) -> str: + """Derive a test ID from an issue without making extra API calls.""" + parts = issue.html_url.split('/') + return f"{parts[3]}/{parts[4]}#{issue.number}" + + +def _get_all_test_issue_ids() -> list[str]: + """Return IDs of all issues that contain tests, using a file-based cache.""" + with FileLock(_LOCK_FILE): + if _CACHE_FILE.exists(): + return json.loads(_CACHE_FILE.read_text()) + issues = [i for i in github_issues_test_cases.get_all_issues() + if github_issues_test_cases.issue_has_tests(i)] + ids = [_issue_id(i) for i in issues] + _CACHE_FILE.write_text(json.dumps(ids)) + return ids + + +def pytest_generate_tests(metafunc): + if "github_issue_id" not in metafunc.fixturenames: + return + issue_id_filter = metafunc.config.getoption("issue", default=[]) + if issue_id_filter: + issues = github_issues_test_cases.get_issues_by_ids(issue_id_filter) + ids = [_issue_id(i) for i in issues] + else: + ids = _get_all_test_issue_ids() + metafunc.parametrize("github_issue_id", ids, ids=ids) + + +@pytest.fixture +def github_issue(github_issue_id): + """Hydrate a GitHub Issue object from its string ID.""" + return github_issues_test_cases.get_issues_by_ids([github_issue_id])[0] + + +@pytest.fixture(scope="session") +def github_issues_test_cases_fixture(): + return github_issues_test_cases diff --git a/tests/github_issues/test_github_issues.py b/tests/github_issues/test_github_issues.py new file mode 100644 index 0000000..80c97dc --- /dev/null +++ b/tests/github_issues/test_github_issues.py @@ -0,0 +1,46 @@ +import itertools + +import pytest + +from src.babel_validation.services.nameres import CachedNameRes +from src.babel_validation.services.nodenorm import CachedNodeNorm +from src.babel_validation.core.testrow import TestResult, TestStatus + + +def test_github_issue(request, target_info, github_issue, github_issues_test_cases_fixture, subtests): + nodenorm = CachedNodeNorm.from_url(target_info['NodeNormURL']) + nameres = CachedNameRes.from_url(target_info['NameResURL']) + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(github_issue) + if not tests: + pytest.skip(f"No tests found in issue {github_issue}") + return + + parts = github_issue.html_url.split('/') + issue_id = f"{parts[3]}/{parts[4]}#{github_issue.number}" + + # Open issues are expected to have failing tests. Mark as xfail(strict=False) so + # that failures show as XFAIL (x) and unexpected full passes show as XPASS (X). + if github_issue.state == "open": + request.node.add_marker(pytest.mark.xfail( + reason=f"Issue {issue_id} is still open", + strict=False, + )) + + for test_issue in tests: + results_nodenorm = test_issue.test_with_nodenorm(nodenorm) + results_nameres = test_issue.test_with_nameres(nodenorm, nameres) + + for result in itertools.chain(results_nodenorm, results_nameres): + with subtests.test(msg=issue_id): + match result: + case TestResult(status=TestStatus.Passed, message=message): + assert True, f"{issue_id} ({github_issue.state}): {message}" + + case TestResult(status=TestStatus.Failed, message=message): + assert False, f"{issue_id} ({github_issue.state}): {message}" + + case TestResult(status=TestStatus.Skipped, message=message): + pytest.skip(f"{issue_id} ({github_issue.state}): {message}") + + case _: + assert False, f"Unknown result from {issue_id}: {result}" diff --git a/tests/github_issues/test_system.py b/tests/github_issues/test_system.py new file mode 100644 index 0000000..ae1fe4f --- /dev/null +++ b/tests/github_issues/test_system.py @@ -0,0 +1,61 @@ +"""System tests for BabelTest trigger detection in GitHub issue bodies.""" + +from unittest.mock import MagicMock +import pytest + +INVALID_NAME = "NotARealAssertion" + + +def _mock_issue(body: str, number: int = 999) -> MagicMock: + """Minimal mock GitHub Issue for get_test_issues_from_issue().""" + issue = MagicMock() + issue.body = body + issue.number = number + issue.repository.full_name = "test-org/test-repo" + return issue + + +class TestInvalidAssertionNameDetection: + """Invalid assertion names are parsed but raise ValueError at execution time.""" + + def _wiki_issue(self): + return _mock_issue(f"{{{{BabelTest|{INVALID_NAME}|CHEBI:90926}}}}") + + def _yaml_issue(self): + return _mock_issue( + f"```yaml\nbabel_tests:\n {INVALID_NAME}:\n - CHEBI:90926\n```" + ) + + # --- parsing: invalid names are extracted, not rejected --- + + def test_wiki_syntax_parses_invalid_name(self, github_issues_test_cases_fixture): + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(self._wiki_issue()) + assert len(tests) == 1 + assert tests[0].assertion == INVALID_NAME + + def test_yaml_syntax_parses_invalid_name(self, github_issues_test_cases_fixture): + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(self._yaml_issue()) + assert len(tests) == 1 + assert tests[0].assertion == INVALID_NAME + + # --- execution: invalid names raise ValueError before any service call --- + + def test_wiki_invalid_name_raises_on_nodenorm(self, github_issues_test_cases_fixture): + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(self._wiki_issue()) + with pytest.raises(ValueError, match="Unknown assertion type"): + list(tests[0].test_with_nodenorm(None)) + + def test_wiki_invalid_name_raises_on_nameres(self, github_issues_test_cases_fixture): + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(self._wiki_issue()) + with pytest.raises(ValueError, match="Unknown assertion type"): + list(tests[0].test_with_nameres(None, None)) + + def test_yaml_invalid_name_raises_on_nodenorm(self, github_issues_test_cases_fixture): + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(self._yaml_issue()) + with pytest.raises(ValueError, match="Unknown assertion type"): + list(tests[0].test_with_nodenorm(None)) + + def test_yaml_invalid_name_raises_on_nameres(self, github_issues_test_cases_fixture): + tests = github_issues_test_cases_fixture.get_test_issues_from_issue(self._yaml_issue()) + with pytest.raises(ValueError, match="Unknown assertion type"): + list(tests[0].test_with_nameres(None, None)) diff --git a/tests/nameres/test_blocklist.py b/tests/nameres/test_blocklist.py index 3e7c2c4..3f3eb2e 100644 --- a/tests/nameres/test_blocklist.py +++ b/tests/nameres/test_blocklist.py @@ -1,79 +1,9 @@ -import csv -import io import logging -import urllib.parse -from dataclasses import dataclass -from typing import Optional import requests import pytest - -# The Translator Blocklist is stored in a private GitHub repository; however, -# we are currently using a spreadsheet to manage "Red Team" exercises where -# multiple Translator members try out different offensive terms and log them -# into a single spreadsheet. Eventually this test will support both, but since -# my immediate need is to check the spreadsheet, I'll start with that. -@dataclass(frozen=True) -class BlocklistEntry: - """ - A single Blocklist entry. - """ - Query: Optional[str] = None - CURIE: Optional[str] = None - Blocked: str = None - Status: str = None - Issue: str = None - TreatsOnly: str = None - Submitter: str = None - Comment: str = None - - def is_blocked(self): - """ Is this term supposed to be blocked? """ - if self.Blocked is not None and self.Blocked == 'y': - return True - return False - - @staticmethod - def from_gsheet_dict(row): - """ - Given a dictionary from a row in Google Sheets, fill in the necessary fields. - - :return: A BlocklistEntry with the filled in fields. - """ - - return BlocklistEntry( - Query=row.get('String (optional)', None), - CURIE=row.get('CURIE (optional)', None), - Blocked=row['Blocked?'], - Status=row['Status (Feb 21, 2024)'], - Issue=row['Blocklist issue'], - TreatsOnly=row['Block for "treats" only?'], - Submitter=row['Submitter'], - Comment=row['Comment (optional)'], - ) - - -def load_blocklist_from_gsheet(): - """ - Load the Blocklist from a Google Sheet. - - :param google_sheet_id: The Google Sheet ID containing the blocklist. - :return: A list of BlocklistEntry. - """ - google_sheet_id = '1UR2eplHBvFRwaSIVOhlB44wpfNPY1z7AVzUkqzDqIWA' - csv_url = f"https://docs.google.com/spreadsheets/d/{google_sheet_id}/gviz/tq?tqx=out:csv&sheet=Tests" - - response = requests.get(csv_url) - csv_content = response.text - - rows = [] - with io.StringIO(csv_content) as f: - reader = csv.DictReader(f) - for row in reader: - rows.append(BlocklistEntry.from_gsheet_dict(row)) - - return rows +from src.babel_validation.sources.google_sheets.blocklist import BlocklistEntry, load_blocklist_from_gsheet # Parameterize blocklist entries. blocklist_entries = load_blocklist_from_gsheet() diff --git a/tests/nameres/test_nameres_from_gsheet.py b/tests/nameres/test_nameres_from_gsheet.py index 199e074..5c5993d 100644 --- a/tests/nameres/test_nameres_from_gsheet.py +++ b/tests/nameres/test_nameres_from_gsheet.py @@ -1,7 +1,7 @@ import urllib.parse import requests import pytest -from common.google_sheet_test_cases import GoogleSheetTestCases, TestRow +from src.babel_validation.sources.google_sheets.google_sheet_test_cases import GoogleSheetTestCases # Configuration options NAMERES_TIMEOUT = 10 # If we don't get a response in 10 seconds, that's a fail. diff --git a/tests/nodenorm/test_nodenorm_descriptions.py b/tests/nodenorm/test_nodenorm_descriptions.py index 7583194..1b82c6f 100644 --- a/tests/nodenorm/test_nodenorm_descriptions.py +++ b/tests/nodenorm/test_nodenorm_descriptions.py @@ -6,15 +6,15 @@ import pytest import requests -IDENTIFIERS_WITH_DESCRIPTIONS = { +IDENTIFIERS_WITH_DESCRIPTIONS = [ 'MESH:D014867', 'NCIT:C34373', 'NCBIGene:1756', -} +] -IDENTIFIERS_WITHOUT_DESCRIPTIONS = { +IDENTIFIERS_WITHOUT_DESCRIPTIONS = [ 'UMLS:C0665297', # natalizumab -} +] @pytest.mark.parametrize('curie', IDENTIFIERS_WITH_DESCRIPTIONS) def test_descriptions(target_info, curie): diff --git a/tests/nodenorm/test_nodenorm_from_gsheet.py b/tests/nodenorm/test_nodenorm_from_gsheet.py index 5dfe43f..ebf4dfe 100644 --- a/tests/nodenorm/test_nodenorm_from_gsheet.py +++ b/tests/nodenorm/test_nodenorm_from_gsheet.py @@ -1,8 +1,7 @@ -import itertools import urllib.parse import requests import pytest -from common.google_sheet_test_cases import GoogleSheetTestCases, TestRow +from src.babel_validation.sources.google_sheets.google_sheet_test_cases import GoogleSheetTestCases # We generate a set of tests from the GoogleSheetTestCases. gsheet = GoogleSheetTestCases() @@ -89,4 +88,4 @@ def test_normalization(target_info, test_row, test_category): f"found in types: {biolink_types}") else: assert biolink_type in set(biolink_types), (f"{test_summary} biolink type {biolink_type} not found in " - f"types: {biolink_types}") \ No newline at end of file + f"types: {biolink_types}") diff --git a/tests/test_assertions_docs.py b/tests/test_assertions_docs.py new file mode 100644 index 0000000..d98130f --- /dev/null +++ b/tests/test_assertions_docs.py @@ -0,0 +1,11 @@ +from src.babel_validation.assertions.gen_docs import generate_readme, README_PATH + + +def test_assertions_readme_is_up_to_date(): + expected = generate_readme() + actual = README_PATH.read_text(encoding="utf-8") + assert actual == expected, ( + "assertions/README.md is out of date.\n" + "Regenerate it with:\n" + " uv run python -m src.babel_validation.assertions.gen_docs" + ) diff --git a/tests/test_env.py b/tests/test_environment/test_env.py similarity index 79% rename from tests/test_env.py rename to tests/test_environment/test_env.py index 87a4f6a..5302198 100644 --- a/tests/test_env.py +++ b/tests/test_environment/test_env.py @@ -1,7 +1,7 @@ # Test whether the test environment is functional. import json -from common.google_sheet_test_cases import GoogleSheetTestCases +from src.babel_validation.sources.google_sheets.google_sheet_test_cases import GoogleSheetTestCases def test_google_sheet_has_test_cases(): @@ -13,4 +13,4 @@ def test_google_sheet_has_test_cases(): print(f"Found {len(gsheet.rows)} test cases in {gsheet}: {json.dumps(gsheet.rows[:10], indent=2)}") categories = gsheet.categories() - assert 'Unit Tests' in categories \ No newline at end of file + assert 'Unit Tests' in categories diff --git a/uv.lock b/uv.lock index 3369043..b6d3c19 100644 --- a/uv.lock +++ b/uv.lock @@ -18,18 +18,28 @@ source = { virtual = "." } dependencies = [ { name = "black" }, { name = "deepdiff" }, + { name = "dotenv" }, + { name = "filelock" }, { name = "openapi-spec-validator" }, + { name = "pygithub" }, { name = "pytest" }, + { name = "pytest-xdist", extra = ["psutil"] }, { name = "requests" }, + { name = "tqdm" }, ] [package.metadata] requires-dist = [ { name = "black", specifier = ">=25.9.0" }, { name = "deepdiff", specifier = ">=8.6.1" }, + { name = "dotenv", specifier = ">=0.9.9" }, + { name = "filelock" }, { name = "openapi-spec-validator", specifier = ">=0.7.2" }, - { name = "pytest", specifier = ">=8.4.2" }, + { name = "pygithub", specifier = ">=2.8.1" }, + { name = "pytest", specifier = ">=9.0" }, + { name = "pytest-xdist", extras = ["psutil"] }, { name = "requests", specifier = ">=2.32.5" }, + { name = "tqdm", specifier = ">=4.67.1" }, ] [[package]] @@ -70,6 +80,76 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.4" @@ -164,6 +244,68 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, + { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, + { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, + { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, + { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, + { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, + { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, + { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, + { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, + { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, + { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + [[package]] name = "deepdiff" version = "8.6.1" @@ -176,6 +318,35 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/e6/efe534ef0952b531b630780e19cabd416e2032697019d5295defc6ef9bd9/deepdiff-8.6.1-py3-none-any.whl", hash = "sha256:ee8708a7f7d37fb273a541fa24ad010ed484192cd0c4ffc0fa0ed5e2d4b9e78b", size = 91378, upload-time = "2025-09-03T19:40:39.679Z" }, ] +[[package]] +name = "dotenv" +version = "0.9.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dotenv" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/b7/545d2c10c1fc15e48653c91efde329a790f2eecfbbf2bd16003b5db2bab0/dotenv-0.9.9-py2.py3-none-any.whl", hash = "sha256:29cf74a087b31dafdb5a446b6d7e11cbce8ed2741540e2339c69fbef92c94ce9", size = 1892, upload-time = "2025-02-19T22:15:01.647Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bf/89/780e11f9588d9e7128a3f87788354c7946a9cbb1401ad38a48c4db9a4f07/execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd", size = 166622, upload-time = "2025-11-12T09:56:37.75Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/84/02fc1827e8cdded4aa65baef11296a9bbe595c474f0d6d758af082d849fd/execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec", size = 40708, upload-time = "2025-11-12T09:56:36.333Z" }, +] + +[[package]] +name = "filelock" +version = "3.24.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" }, +] + [[package]] name = "idna" version = "3.11" @@ -367,6 +538,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "psutil" +version = "7.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/c6/d1ddf4abb55e93cebc4f2ed8b5d6dbad109ecb8d63748dd2b20ab5e57ebe/psutil-7.2.2.tar.gz", hash = "sha256:0746f5f8d406af344fd547f1c8daa5f5c33dbc293bb8d6a16d80b4bb88f59372", size = 493740, upload-time = "2026-01-28T18:14:54.428Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/08/510cbdb69c25a96f4ae523f733cdc963ae654904e8db864c07585ef99875/psutil-7.2.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2edccc433cbfa046b980b0df0171cd25bcaeb3a68fe9022db0979e7aa74a826b", size = 130595, upload-time = "2026-01-28T18:14:57.293Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f5/97baea3fe7a5a9af7436301f85490905379b1c6f2dd51fe3ecf24b4c5fbf/psutil-7.2.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e78c8603dcd9a04c7364f1a3e670cea95d51ee865e4efb3556a3a63adef958ea", size = 131082, upload-time = "2026-01-28T18:14:59.732Z" }, + { url = "https://files.pythonhosted.org/packages/37/d6/246513fbf9fa174af531f28412297dd05241d97a75911ac8febefa1a53c6/psutil-7.2.2-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a571f2330c966c62aeda00dd24620425d4b0cc86881c89861fbc04549e5dc63", size = 181476, upload-time = "2026-01-28T18:15:01.884Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b5/9182c9af3836cca61696dabe4fd1304e17bc56cb62f17439e1154f225dd3/psutil-7.2.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:917e891983ca3c1887b4ef36447b1e0873e70c933afc831c6b6da078ba474312", size = 184062, upload-time = "2026-01-28T18:15:04.436Z" }, + { url = "https://files.pythonhosted.org/packages/16/ba/0756dca669f5a9300d0cbcbfae9a4c30e446dfc7440ffe43ded5724bfd93/psutil-7.2.2-cp313-cp313t-win_amd64.whl", hash = "sha256:ab486563df44c17f5173621c7b198955bd6b613fb87c71c161f827d3fb149a9b", size = 139893, upload-time = "2026-01-28T18:15:06.378Z" }, + { url = "https://files.pythonhosted.org/packages/1c/61/8fa0e26f33623b49949346de05ec1ddaad02ed8ba64af45f40a147dbfa97/psutil-7.2.2-cp313-cp313t-win_arm64.whl", hash = "sha256:ae0aefdd8796a7737eccea863f80f81e468a1e4cf14d926bd9b6f5f2d5f90ca9", size = 135589, upload-time = "2026-01-28T18:15:08.03Z" }, + { url = "https://files.pythonhosted.org/packages/81/69/ef179ab5ca24f32acc1dac0c247fd6a13b501fd5534dbae0e05a1c48b66d/psutil-7.2.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:eed63d3b4d62449571547b60578c5b2c4bcccc5387148db46e0c2313dad0ee00", size = 130664, upload-time = "2026-01-28T18:15:09.469Z" }, + { url = "https://files.pythonhosted.org/packages/7b/64/665248b557a236d3fa9efc378d60d95ef56dd0a490c2cd37dafc7660d4a9/psutil-7.2.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7b6d09433a10592ce39b13d7be5a54fbac1d1228ed29abc880fb23df7cb694c9", size = 131087, upload-time = "2026-01-28T18:15:11.724Z" }, + { url = "https://files.pythonhosted.org/packages/d5/2e/e6782744700d6759ebce3043dcfa661fb61e2fb752b91cdeae9af12c2178/psutil-7.2.2-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fa4ecf83bcdf6e6c8f4449aff98eefb5d0604bf88cb883d7da3d8d2d909546a", size = 182383, upload-time = "2026-01-28T18:15:13.445Z" }, + { url = "https://files.pythonhosted.org/packages/57/49/0a41cefd10cb7505cdc04dab3eacf24c0c2cb158a998b8c7b1d27ee2c1f5/psutil-7.2.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e452c464a02e7dc7822a05d25db4cde564444a67e58539a00f929c51eddda0cf", size = 185210, upload-time = "2026-01-28T18:15:16.002Z" }, + { url = "https://files.pythonhosted.org/packages/dd/2c/ff9bfb544f283ba5f83ba725a3c5fec6d6b10b8f27ac1dc641c473dc390d/psutil-7.2.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c7663d4e37f13e884d13994247449e9f8f574bc4655d509c3b95e9ec9e2b9dc1", size = 141228, upload-time = "2026-01-28T18:15:18.385Z" }, + { url = "https://files.pythonhosted.org/packages/f2/fc/f8d9c31db14fcec13748d373e668bc3bed94d9077dbc17fb0eebc073233c/psutil-7.2.2-cp314-cp314t-win_arm64.whl", hash = "sha256:11fe5a4f613759764e79c65cf11ebdf26e33d6dd34336f8a337aa2996d71c841", size = 136284, upload-time = "2026-01-28T18:15:19.912Z" }, + { url = "https://files.pythonhosted.org/packages/e7/36/5ee6e05c9bd427237b11b3937ad82bb8ad2752d72c6969314590dd0c2f6e/psutil-7.2.2-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ed0cace939114f62738d808fdcecd4c869222507e266e574799e9c0faa17d486", size = 129090, upload-time = "2026-01-28T18:15:22.168Z" }, + { url = "https://files.pythonhosted.org/packages/80/c4/f5af4c1ca8c1eeb2e92ccca14ce8effdeec651d5ab6053c589b074eda6e1/psutil-7.2.2-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a7b04c10f32cc88ab39cbf606e117fd74721c831c98a27dc04578deb0c16979", size = 129859, upload-time = "2026-01-28T18:15:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/b5/70/5d8df3b09e25bce090399cf48e452d25c935ab72dad19406c77f4e828045/psutil-7.2.2-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:076a2d2f923fd4821644f5ba89f059523da90dc9014e85f8e45a5774ca5bc6f9", size = 155560, upload-time = "2026-01-28T18:15:25.976Z" }, + { url = "https://files.pythonhosted.org/packages/63/65/37648c0c158dc222aba51c089eb3bdfa238e621674dc42d48706e639204f/psutil-7.2.2-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0726cecd84f9474419d67252add4ac0cd9811b04d61123054b9fb6f57df6e9e", size = 156997, upload-time = "2026-01-28T18:15:27.794Z" }, + { url = "https://files.pythonhosted.org/packages/8e/13/125093eadae863ce03c6ffdbae9929430d116a246ef69866dad94da3bfbc/psutil-7.2.2-cp36-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:fd04ef36b4a6d599bbdb225dd1d3f51e00105f6d48a28f006da7f9822f2606d8", size = 148972, upload-time = "2026-01-28T18:15:29.342Z" }, + { url = "https://files.pythonhosted.org/packages/04/78/0acd37ca84ce3ddffaa92ef0f571e073faa6d8ff1f0559ab1272188ea2be/psutil-7.2.2-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b58fabe35e80b264a4e3bb23e6b96f9e45a3df7fb7eed419ac0e5947c61e47cc", size = 148266, upload-time = "2026-01-28T18:15:31.597Z" }, + { url = "https://files.pythonhosted.org/packages/b4/90/e2159492b5426be0c1fef7acba807a03511f97c5f86b3caeda6ad92351a7/psutil-7.2.2-cp37-abi3-win_amd64.whl", hash = "sha256:eb7e81434c8d223ec4a219b5fc1c47d0417b12be7ea866e24fb5ad6e84b3d988", size = 137737, upload-time = "2026-01-28T18:15:33.849Z" }, + { url = "https://files.pythonhosted.org/packages/8c/c7/7bb2e321574b10df20cbde462a94e2b71d05f9bbda251ef27d104668306a/psutil-7.2.2-cp37-abi3-win_arm64.whl", hash = "sha256:8c233660f575a5a89e6d4cb65d9f938126312bca76d8fe087b947b3a1aaac9ee", size = 134617, upload-time = "2026-01-28T18:15:36.514Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pygithub" +version = "2.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyjwt", extra = ["crypto"] }, + { name = "pynacl" }, + { name = "requests" }, + { name = "typing-extensions" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c1/74/e560bdeffea72ecb26cff27f0fad548bbff5ecc51d6a155311ea7f9e4c4c/pygithub-2.8.1.tar.gz", hash = "sha256:341b7c78521cb07324ff670afd1baa2bf5c286f8d9fd302c1798ba594a5400c9", size = 2246994, upload-time = "2025-09-02T17:41:54.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/ba/7049ce39f653f6140aac4beb53a5aaf08b4407b6a3019aae394c1c5244ff/pygithub-2.8.1-py3-none-any.whl", hash = "sha256:23a0a5bca93baef082e03411bf0ce27204c32be8bfa7abc92fe4a3e132936df0", size = 432709, upload-time = "2025-09-02T17:41:52.947Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -376,9 +600,58 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pynacl" +version = "1.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/9a/4019b524b03a13438637b11538c82781a5eda427394380381af8f04f467a/pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c", size = 3511692, upload-time = "2026-01-01T17:48:10.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/79/0e3c34dc3c4671f67d251c07aa8eb100916f250ee470df230b0ab89551b4/pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594", size = 390064, upload-time = "2026-01-01T17:31:57.264Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/23a26e931736e13b16483795c8a6b2f641bf6a3d5238c22b070a5112722c/pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0", size = 809370, upload-time = "2026-01-01T17:31:59.198Z" }, + { url = "https://files.pythonhosted.org/packages/87/74/8d4b718f8a22aea9e8dcc8b95deb76d4aae380e2f5b570cc70b5fd0a852d/pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9", size = 1408304, upload-time = "2026-01-01T17:32:01.162Z" }, + { url = "https://files.pythonhosted.org/packages/fd/73/be4fdd3a6a87fe8a4553380c2b47fbd1f7f58292eb820902f5c8ac7de7b0/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574", size = 844871, upload-time = "2026-01-01T17:32:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/55/ad/6efc57ab75ee4422e96b5f2697d51bbcf6cdcc091e66310df91fbdc144a8/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634", size = 1446356, upload-time = "2026-01-01T17:32:04.452Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/928ee9c4779caa0a915844311ab9fb5f99585621c5d6e4574538a17dca07/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88", size = 826814, upload-time = "2026-01-01T17:32:06.078Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a9/1bdba746a2be20f8809fee75c10e3159d75864ef69c6b0dd168fc60e485d/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14", size = 1411742, upload-time = "2026-01-01T17:32:07.651Z" }, + { url = "https://files.pythonhosted.org/packages/f3/2f/5e7ea8d85f9f3ea5b6b87db1d8388daa3587eed181bdeb0306816fdbbe79/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444", size = 801714, upload-time = "2026-01-01T17:32:09.558Z" }, + { url = "https://files.pythonhosted.org/packages/06/ea/43fe2f7eab5f200e40fb10d305bf6f87ea31b3bbc83443eac37cd34a9e1e/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b", size = 1372257, upload-time = "2026-01-01T17:32:11.026Z" }, + { url = "https://files.pythonhosted.org/packages/4d/54/c9ea116412788629b1347e415f72195c25eb2f3809b2d3e7b25f5c79f13a/pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145", size = 231319, upload-time = "2026-01-01T17:32:12.46Z" }, + { url = "https://files.pythonhosted.org/packages/ce/04/64e9d76646abac2dccf904fccba352a86e7d172647557f35b9fe2a5ee4a1/pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590", size = 244044, upload-time = "2026-01-01T17:32:13.781Z" }, + { url = "https://files.pythonhosted.org/packages/33/33/7873dc161c6a06f43cda13dec67b6fe152cb2f982581151956fa5e5cdb47/pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2", size = 188740, upload-time = "2026-01-01T17:32:15.083Z" }, + { url = "https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465", size = 388458, upload-time = "2026-01-01T17:32:16.829Z" }, + { url = "https://files.pythonhosted.org/packages/1e/b4/e927e0653ba63b02a4ca5b4d852a8d1d678afbf69b3dbf9c4d0785ac905c/pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0", size = 800020, upload-time = "2026-01-01T17:32:18.34Z" }, + { url = "https://files.pythonhosted.org/packages/7f/81/d60984052df5c97b1d24365bc1e30024379b42c4edcd79d2436b1b9806f2/pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4", size = 1399174, upload-time = "2026-01-01T17:32:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/68/f7/322f2f9915c4ef27d140101dd0ed26b479f7e6f5f183590fd32dfc48c4d3/pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87", size = 835085, upload-time = "2026-01-01T17:32:22.24Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c", size = 1437614, upload-time = "2026-01-01T17:32:23.766Z" }, + { url = "https://files.pythonhosted.org/packages/c4/58/fc6e649762b029315325ace1a8c6be66125e42f67416d3dbd47b69563d61/pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130", size = 818251, upload-time = "2026-01-01T17:32:25.69Z" }, + { url = "https://files.pythonhosted.org/packages/c9/a8/b917096b1accc9acd878819a49d3d84875731a41eb665f6ebc826b1af99e/pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6", size = 1402859, upload-time = "2026-01-01T17:32:27.215Z" }, + { url = "https://files.pythonhosted.org/packages/85/42/fe60b5f4473e12c72f977548e4028156f4d340b884c635ec6b063fe7e9a5/pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e", size = 791926, upload-time = "2026-01-01T17:32:29.314Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f9/e40e318c604259301cc091a2a63f237d9e7b424c4851cafaea4ea7c4834e/pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577", size = 1363101, upload-time = "2026-01-01T17:32:31.263Z" }, + { url = "https://files.pythonhosted.org/packages/48/47/e761c254f410c023a469284a9bc210933e18588ca87706ae93002c05114c/pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa", size = 227421, upload-time = "2026-01-01T17:32:33.076Z" }, + { url = "https://files.pythonhosted.org/packages/41/ad/334600e8cacc7d86587fe5f565480fde569dfb487389c8e1be56ac21d8ac/pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0", size = 239754, upload-time = "2026-01-01T17:32:34.557Z" }, + { url = "https://files.pythonhosted.org/packages/29/7d/5945b5af29534641820d3bd7b00962abbbdfee84ec7e19f0d5b3175f9a31/pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c", size = 184801, upload-time = "2026-01-01T17:32:36.309Z" }, +] + [[package]] name = "pytest" -version = "8.4.2" +version = "9.0.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -387,9 +660,36 @@ dependencies = [ { name = "pluggy" }, { name = "pygments" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[package.optional-dependencies] +psutil = [ + { name = "psutil" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, ] [[package]] @@ -614,6 +914,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, ] +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + [[package]] name = "typing-extensions" version = "4.15.0"