diff --git a/.claude/rules/architecture.md b/.claude/rules/architecture.md index cc53dac0f..535e08d79 100644 --- a/.claude/rules/architecture.md +++ b/.claude/rules/architecture.md @@ -26,3 +26,17 @@ codeflash/ ├── result/ # Result types and handling └── version.py # Version information ``` + +## Key Entry Points + +| Task | Start here | +|------|------------| +| CLI arguments & commands | `cli_cmds/cli.py` | +| Optimization orchestration | `optimization/optimizer.py` → `run()` | +| Per-function optimization | `optimization/function_optimizer.py` | +| Function discovery | `discovery/functions_to_optimize.py` | +| Context extraction | `context/code_context_extractor.py` | +| Test execution | `verification/test_runner.py`, `verification/pytest_plugin.py` | +| Performance ranking | `benchmarking/function_ranker.py` | +| Domain types | `models/models.py`, `models/function_types.py` | +| Result handling | `either.py` (`Result`, `Success`, `Failure`, `is_successful`) | diff --git a/.claude/rules/code-style.md b/.claude/rules/code-style.md index fcad0f253..bcb8fd30b 100644 --- a/.claude/rules/code-style.md +++ b/.claude/rules/code-style.md @@ -2,6 +2,7 @@ - **Line length**: 120 characters - **Python**: 3.9+ syntax +- **Package management**: Always use `uv`, never `pip` - **Tooling**: Ruff for linting/formatting, mypy strict mode, prek for pre-commit checks - **Comments**: Minimal - only explain "why", not "what" - **Docstrings**: Do not add unless explicitly requested diff --git a/.claude/rules/git.md b/.claude/rules/git.md index 058e8ca80..d1be68114 100644 --- a/.claude/rules/git.md +++ b/.claude/rules/git.md @@ -1,5 +1,6 @@ # Git Commits & Pull Requests +- **Always create a new branch from `main` before starting any new work** — never commit directly to `main` or reuse an existing feature branch for unrelated changes - Use conventional commit format: `fix:`, `feat:`, `refactor:`, `docs:`, `test:`, `chore:` - Keep commits atomic - one logical change per commit - Commit message body should be concise (1-2 sentences max) diff --git a/.claude/rules/language-patterns.md b/.claude/rules/language-patterns.md new file mode 100644 index 000000000..8616eb478 --- /dev/null +++ b/.claude/rules/language-patterns.md @@ -0,0 +1,12 @@ +--- +paths: + - "codeflash/languages/**/*.py" +--- + +# Language Support Patterns + +- Current language is a module-level singleton in `languages/current.py` — use `set_current_language()` / `current_language()`, never pass language as a parameter through call chains +- Use `get_language_support(identifier)` from `languages/registry.py` to get a `LanguageSupport` instance — never import language classes directly +- New language support classes must use the `@register_language` decorator to register with the extension and language registries +- `languages/__init__.py` uses `__getattr__` for lazy imports to avoid circular dependencies — follow this pattern when adding new exports +- `is_javascript()` returns `True` for both JavaScript and TypeScript diff --git a/.claude/rules/optimization-patterns.md b/.claude/rules/optimization-patterns.md new file mode 100644 index 000000000..f677d48de --- /dev/null +++ b/.claude/rules/optimization-patterns.md @@ -0,0 +1,17 @@ +--- +paths: + - "codeflash/optimization/**/*.py" + - "codeflash/verification/**/*.py" + - "codeflash/benchmarking/**/*.py" + - "codeflash/context/**/*.py" +--- + +# Optimization Pipeline Patterns + +- All major operations return `Result[SuccessType, ErrorType]` — construct with `Success(value)` / `Failure(error)`, check with `is_successful()` before calling `unwrap()` +- Code context has token limits (`OPTIMIZATION_CONTEXT_TOKEN_LIMIT`, `TESTGEN_CONTEXT_TOKEN_LIMIT` in `config_consts.py`) — exceeding them rejects the function +- `read_writable_code` can span multiple files; `read_only_context_code` is reference-only +- Code is serialized as markdown code blocks: ` ```language:filepath\ncode\n``` ` (see `CodeStringsMarkdown`) +- Candidates form a forest (DAG): refinements/repairs reference `parent_id` on previous candidates +- Test generation and optimization run concurrently — coordinate through `CandidateEvaluationContext` +- Generated tests are instrumented with `codeflash_capture.py` to record return values and traces diff --git a/.claude/rules/source-code.md b/.claude/rules/source-code.md index 27c939642..297daa6ae 100644 --- a/.claude/rules/source-code.md +++ b/.claude/rules/source-code.md @@ -6,6 +6,3 @@ paths: # Source Code Rules - Use `libcst` for code modification/transformation to preserve formatting. `ast` is acceptable for read-only analysis and parsing. -- NEVER use leading underscores for function names (e.g., `_helper`). Python has no true private functions. Always use public names. -- Any new feature or bug fix that can be tested automatically must have test cases. -- If changes affect existing test expectations, update the tests accordingly. Tests must always pass after changes. diff --git a/.claude/rules/testing.md b/.claude/rules/testing.md index 809a4ea91..d604e56e6 100644 --- a/.claude/rules/testing.md +++ b/.claude/rules/testing.md @@ -13,3 +13,5 @@ paths: - Use `.as_posix()` when converting resolved paths to strings (normalizes to forward slashes). - Any new feature or bug fix that can be tested automatically must have test cases. - If changes affect existing test expectations, update the tests accordingly. Tests must always pass after changes. +- The pytest plugin patches `time`, `random`, `uuid`, and `datetime` for deterministic test execution — never assume real randomness or real time in verification tests. +- `conftest.py` uses an autouse fixture that calls `reset_current_language()` — tests always start with Python as the default language. diff --git a/.claude/skills/fix-mypy.md b/.claude/skills/fix-mypy.md new file mode 100644 index 000000000..1a9432bf3 --- /dev/null +++ b/.claude/skills/fix-mypy.md @@ -0,0 +1,12 @@ +# Fix mypy errors + +When modifying code, fix any mypy type errors in the files you changed: + +```bash +uv run mypy --non-interactive --config-file pyproject.toml +``` + +- Fix type annotation issues: missing return types, incorrect types, Optional/None unions, import errors for type hints +- Do NOT add `# type: ignore` comments — always fix the root cause +- Do NOT fix type errors that require logic changes, complex generic type rework, or anything that could change runtime behavior +- Files in `mypy_allowlist.txt` are checked in CI — ensure they remain error-free diff --git a/.claude/skills/fix-prek.md b/.claude/skills/fix-prek.md new file mode 100644 index 000000000..f681512ec --- /dev/null +++ b/.claude/skills/fix-prek.md @@ -0,0 +1,9 @@ +# Fix prek failures + +When prek (pre-commit) checks fail: + +1. Run `uv run prek run` to see failures (local, checks staged files) +2. In CI, the equivalent is `uv run prek run --from-ref origin/main` +3. prek runs ruff format, ruff check, and mypy on changed files +4. Fix issues in order: formatting → lint → type errors +5. Re-run `uv run prek run` to verify all checks pass diff --git a/.codex/skills/.gitignore b/.codex/skills/.gitignore new file mode 100644 index 000000000..b1cda282a --- /dev/null +++ b/.codex/skills/.gitignore @@ -0,0 +1,2 @@ +# Managed by Tessl +tessl:* diff --git a/.gemini/skills/.gitignore b/.gemini/skills/.gitignore new file mode 100644 index 000000000..b1cda282a --- /dev/null +++ b/.gemini/skills/.gitignore @@ -0,0 +1,2 @@ +# Managed by Tessl +tessl:* diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 5c89e6ea7..d691072aa 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -48,7 +48,7 @@ jobs: with: use_foundry: "true" use_sticky_comment: true - allowed_bots: "claude[bot]" + allowed_bots: "claude[bot],codeflash-ai[bot]" prompt: | REPO: ${{ github.repository }} PR NUMBER: ${{ github.event.pull_request.number }} diff --git a/.github/workflows/duplicate-code-detector.yml b/.github/workflows/duplicate-code-detector.yml new file mode 100644 index 000000000..ea36bf54d --- /dev/null +++ b/.github/workflows/duplicate-code-detector.yml @@ -0,0 +1,114 @@ +name: Duplicate Code Detector + +on: + workflow_dispatch: + pull_request: + types: [opened, synchronize] + +jobs: + detect-duplicates: + if: github.event.pull_request.head.repo.full_name == github.repository || github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + permissions: + contents: read + pull-requests: write + issues: write + id-token: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + ref: ${{ github.event.pull_request.head.ref || github.ref }} + + - name: Start Serena MCP server + run: | + docker pull ghcr.io/github/serena-mcp-server:latest + docker run -d --name serena \ + --network host \ + -v "${{ github.workspace }}:${{ github.workspace }}:rw" \ + ghcr.io/github/serena-mcp-server:latest \ + serena start-mcp-server --context codex --project "${{ github.workspace }}" + + mkdir -p /tmp/mcp-config + cat > /tmp/mcp-config/mcp-servers.json << 'EOF' + { + "mcpServers": { + "serena": { + "command": "docker", + "args": ["exec", "-i", "serena", "serena", "start-mcp-server", "--context", "codex", "--project", "${{ github.workspace }}"] + } + } + } + EOF + + - name: Run Claude Code + uses: anthropics/claude-code-action@v1 + with: + use_foundry: "true" + use_sticky_comment: true + allowed_bots: "claude[bot],codeflash-ai[bot]" + claude_args: '--mcp-config /tmp/mcp-config/mcp-servers.json --allowedTools "Read,Glob,Grep,Bash(git diff:*),Bash(git log:*),Bash(git show:*),Bash(wc *),Bash(find *),mcp__serena__*"' + prompt: | + You are a duplicate code detector with access to Serena semantic code analysis. + + ## Setup + + First activate the project in Serena: + - Use `mcp__serena__activate_project` with the workspace path `${{ github.workspace }}` + + ## Steps + + 1. Get the list of changed .py files (excluding tests): + `git diff --name-only origin/main...HEAD -- '*.py' | grep -v -E '(test_|_test\.py|/tests/|/test/)'` + + 2. Use Serena's semantic analysis on changed files: + - `mcp__serena__get_symbols_overview` to understand file structure + - `mcp__serena__find_symbol` to search for similarly named symbols across the codebase + - `mcp__serena__find_referencing_symbols` to understand usage patterns + - `mcp__serena__search_for_pattern` to find similar code patterns + + 3. For each changed file, look for: + - **Exact Duplication**: Identical code blocks (>10 lines) in multiple locations + - **Structural Duplication**: Same logic with minor variations (different variable names) + - **Functional Duplication**: Different implementations of the same functionality + - **Copy-Paste Programming**: Similar blocks that could be extracted into shared utilities + + 4. Cross-reference against the rest of the codebase using Serena: + - Search for similar function signatures and logic patterns + - Check if new code duplicates existing utilities or helpers + - Look for repeated patterns across modules + + ## What to Report + + - Identical or nearly identical functions in different files + - Repeated code blocks that could be extracted to utilities + - Similar classes or modules with overlapping functionality + - Copy-pasted code with minor modifications + - Duplicated business logic across components + + ## What to Skip + + - Standard boilerplate (imports, __init__, etc.) + - Test setup/teardown code + - Configuration with similar structure + - Language-specific patterns (constructors, getters/setters) + - Small snippets (<5 lines) unless highly repetitive + - Workflow files under .github/ + + ## Output + + Post a single PR comment with your findings. For each pattern found: + - Severity (High/Medium/Low) + - File locations with line numbers + - Code samples showing the duplication + - Concrete refactoring suggestion + + If no significant duplication is found, say so briefly. Do not create issues — just comment on the PR. + env: + ANTHROPIC_FOUNDRY_API_KEY: ${{ secrets.AZURE_ANTHROPIC_API_KEY }} + ANTHROPIC_FOUNDRY_BASE_URL: ${{ secrets.AZURE_ANTHROPIC_ENDPOINT }} + + - name: Stop Serena + if: always() + run: docker stop serena && docker rm serena || true diff --git a/.mcp.json b/.mcp.json new file mode 100644 index 000000000..ebfccaac7 --- /dev/null +++ b/.mcp.json @@ -0,0 +1,12 @@ +{ + "mcpServers": { + "tessl": { + "type": "stdio", + "command": "tessl", + "args": [ + "mcp", + "start" + ] + } + } +} diff --git a/CLAUDE.md b/CLAUDE.md index fdc1b943b..622351db4 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,37 +1,32 @@ # CLAUDE.md -This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. - ## Project Overview CodeFlash is an AI-powered Python code optimizer that automatically improves code performance while maintaining correctness. It uses LLMs to generate optimization candidates, verifies correctness through test execution, and benchmarks performance improvements. -## Common Commands - -```bash -# Package management (NEVER use pip) -uv sync # Install dependencies -uv sync --group dev # Install dev dependencies -uv add # Add a package - -# Running tests -uv run pytest tests/ # Run all tests -uv run pytest tests/test_foo.py # Run specific test file -uv run pytest tests/test_foo.py::test_bar -v # Run single test - -# Type checking and linting -uv run mypy codeflash/ # Type check -uv run ruff check codeflash/ # Lint -uv run ruff format codeflash/ # Format +## Optimization Pipeline -# Linting (run before committing) -uv run prek run --from-ref origin/main - -# Running the CLI -uv run codeflash --help -uv run codeflash init # Initialize in a project -uv run codeflash --all # Optimize entire codebase ``` +Discovery → Ranking → Context Extraction → Test Gen + Optimization → Baseline → Candidate Evaluation → PR +``` + +1. **Discovery** (`discovery/`): Find optimizable functions across the codebase +2. **Ranking** (`benchmarking/function_ranker.py`): Rank functions by addressable time using trace data +3. **Context** (`context/`): Extract code dependencies (read-writable code + read-only imports) +4. **Optimization** (`optimization/`, `api/`): Generate candidates via AI service, run in parallel with test generation +5. **Verification** (`verification/`): Run candidates against tests, compare outputs via custom pytest plugin +6. **Benchmarking** (`benchmarking/`): Measure performance, select best candidate by speedup +7. **Result** (`result/`, `github/`): Create PR with winning optimization + +## Domain Glossary + +- **Optimization candidate**: A generated code variant that might be faster (`OptimizedCandidate`) +- **Function context**: All code needed for optimization — split into read-writable (modifiable) and read-only (reference) +- **Addressable time**: Time a function spends that could be optimized (own time + callee time / call count) +- **Candidate forest**: DAG of candidates where refinements/repairs build on previous candidates +- **Replay test**: Test generated from recorded benchmark data to reproduce real workloads +- **Tracer**: Profiling system that records function call trees and timings (`tracing/`, `tracer.py`) +- **Worktree mode**: Git worktree-based parallel optimization (`--worktree` flag) diff --git a/code_to_optimize/js/code_to_optimize_js/bubble_sort.js b/code_to_optimize/js/code_to_optimize_js/bubble_sort.js index 8f3c9ffca..fe63d82dc 100644 --- a/code_to_optimize/js/code_to_optimize_js/bubble_sort.js +++ b/code_to_optimize/js/code_to_optimize_js/bubble_sort.js @@ -11,14 +11,21 @@ function bubbleSort(arr) { const result = arr.slice(); const n = result.length; - for (let i = 0; i < n; i++) { - for (let j = 0; j < n - 1; j++) { - if (result[j] > result[j + 1]) { - const temp = result[j]; - result[j] = result[j + 1]; - result[j + 1] = temp; + if (n <= 1) return result; + + for (let i = 0; i < n - 1; i++) { + let swapped = false; + const limit = n - i - 1; + for (let j = 0; j < limit; j++) { + const a = result[j]; + const b = result[j + 1]; + if (a > b) { + result[j] = b; + result[j + 1] = a; + swapped = true; } } + if (!swapped) break; } return result; diff --git a/code_to_optimize/js/code_to_optimize_vitest/package-lock.json b/code_to_optimize/js/code_to_optimize_vitest/package-lock.json index ac3d39afd..ef24dc459 100644 --- a/code_to_optimize/js/code_to_optimize_vitest/package-lock.json +++ b/code_to_optimize/js/code_to_optimize_vitest/package-lock.json @@ -15,7 +15,7 @@ } }, "../../../packages/codeflash": { - "version": "0.7.0", + "version": "0.8.0", "dev": true, "hasInstallScript": true, "license": "MIT", diff --git a/codeflash/benchmarking/trace_benchmarks.py b/codeflash/benchmarking/trace_benchmarks.py index 8217ac37e..98b8e0540 100644 --- a/codeflash/benchmarking/trace_benchmarks.py +++ b/codeflash/benchmarking/trace_benchmarks.py @@ -1,23 +1,18 @@ from __future__ import annotations -import os import re import subprocess from pathlib import Path from codeflash.cli_cmds.console import logger from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE -from codeflash.code_utils.shell_utils import get_cross_platform_subprocess_run_args +from codeflash.code_utils.shell_utils import get_cross_platform_subprocess_run_args, make_env_with_project_root def trace_benchmarks_pytest( benchmarks_root: Path, tests_root: Path, project_root: Path, trace_file: Path, timeout: int = 300 ) -> None: - benchmark_env = os.environ.copy() - if "PYTHONPATH" not in benchmark_env: - benchmark_env["PYTHONPATH"] = str(project_root) - else: - benchmark_env["PYTHONPATH"] += os.pathsep + str(project_root) + benchmark_env = make_env_with_project_root(project_root) run_args = get_cross_platform_subprocess_run_args( cwd=project_root, env=benchmark_env, timeout=timeout, check=False, text=True, capture_output=True ) diff --git a/codeflash/code_utils/code_utils.py b/codeflash/code_utils/code_utils.py index 95fc5d506..7a9afc96f 100644 --- a/codeflash/code_utils/code_utils.py +++ b/codeflash/code_utils/code_utils.py @@ -37,21 +37,6 @@ def is_glob_pattern(path_str: str) -> bool: def normalize_ignore_paths(paths: list[str], base_path: Path | None = None) -> list[Path]: - """Normalize ignore paths, expanding glob patterns and resolving paths. - - Accepts a list of path strings that can be either: - - Literal paths (relative or absolute): e.g., "node_modules", "/absolute/path" - - Glob patterns: e.g., "**/*.test.js", "dist/*", "*.log" - - Args: - paths: List of path strings (literal paths or glob patterns). - base_path: Base path for resolving relative paths and patterns. - If None, uses current working directory. - - Returns: - List of resolved Path objects, deduplicated. - - """ if base_path is None: base_path = Path.cwd() @@ -59,22 +44,25 @@ def normalize_ignore_paths(paths: list[str], base_path: Path | None = None) -> l normalized: set[Path] = set() for path_str in paths: + if not path_str: + continue + + path_str = str(path_str) + if is_glob_pattern(path_str): - # It's a glob pattern - expand it - # Use base_path as the root for glob expansion - pattern_path = base_path / path_str - # glob returns an iterator of matching paths + # pathlib requires relative glob patterns + path_str = path_str.removeprefix("./") + if path_str.startswith("/"): + path_str = path_str.lstrip("/") + for matched_path in base_path.glob(path_str): - if matched_path.exists(): - normalized.add(matched_path.resolve()) + normalized.add(matched_path.resolve()) else: - # It's a literal path path_obj = Path(path_str) if not path_obj.is_absolute(): path_obj = base_path / path_obj if path_obj.exists(): normalized.add(path_obj.resolve()) - # Silently skip non-existent literal paths (e.g., .next, dist before build) return list(normalized) diff --git a/codeflash/code_utils/concolic_utils.py b/codeflash/code_utils/concolic_utils.py index aab9a431f..d674be370 100644 --- a/codeflash/code_utils/concolic_utils.py +++ b/codeflash/code_utils/concolic_utils.py @@ -9,6 +9,7 @@ import sentry_sdk from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE, codeflash_temp_dir +from codeflash.code_utils.shell_utils import make_env_with_project_root # Known CrossHair limitations that produce invalid Python syntax in generated tests: # - "" - higher-order functions returning nested functions @@ -37,6 +38,7 @@ def is_valid_concolic_test(test_code: str, project_root: Optional[str] = None) - text=True, cwd=project_root, timeout=10, + env=make_env_with_project_root(project_root) if project_root else None, ) except (subprocess.TimeoutExpired, Exception): return False @@ -105,12 +107,12 @@ def clean_concolic_tests(test_suite_code: str) -> str: can_parse = False tree = None - if not can_parse: + if not can_parse or tree is None: return AssertCleanup().transform_asserts(test_suite_code) for node in ast.walk(tree): if isinstance(node, ast.FunctionDef) and node.name.startswith("test_"): - new_body = [] + new_body: list[ast.stmt] = [] for stmt in node.body: if isinstance(stmt, ast.Assert): if isinstance(stmt.test, ast.Compare) and isinstance(stmt.test.left, ast.Call): diff --git a/codeflash/code_utils/config_parser.py b/codeflash/code_utils/config_parser.py index b62368390..378171f41 100644 --- a/codeflash/code_utils/config_parser.py +++ b/codeflash/code_utils/config_parser.py @@ -94,9 +94,24 @@ def find_conftest_files(test_paths: list[Path]) -> list[Path]: def parse_config_file( config_file_path: Path | None = None, override_formatter_check: bool = False ) -> tuple[dict[str, Any], Path]: - # First try package.json for JS/TS projects package_json_path = find_package_json(config_file_path) + pyproject_toml_path = find_closest_config_file("pyproject.toml") if config_file_path is None else None + + # When both config files exist, prefer the one closer to CWD. + # This prevents a parent-directory package.json (e.g., monorepo root) + # from overriding a closer pyproject.toml with [tool.codeflash]. + use_package_json = False if package_json_path: + if pyproject_toml_path is None: + use_package_json = True + else: + # Compare depth: more path parts = closer to CWD = more specific + package_json_depth = len(package_json_path.parent.parts) + pyproject_toml_depth = len(pyproject_toml_path.parent.parts) + use_package_json = package_json_depth >= pyproject_toml_depth + + if use_package_json: + assert package_json_path is not None result = parse_package_json_config(package_json_path) if result is not None: config, path = result diff --git a/codeflash/code_utils/coverage_utils.py b/codeflash/code_utils/coverage_utils.py index ed3d277a4..b5d7ab8d8 100644 --- a/codeflash/code_utils/coverage_utils.py +++ b/codeflash/code_utils/coverage_utils.py @@ -13,14 +13,28 @@ def extract_dependent_function(main_function: str, code_context: CodeOptimizationContext) -> str | Literal[False]: """Extract the single dependent function from the code context excluding the main function.""" dependent_functions = set() + + # Compare using bare name since AST extracts bare function names + bare_main = main_function.rsplit(".", 1)[-1] if "." in main_function else main_function + for code_string in code_context.testgen_context.code_strings: - ast_tree = ast.parse(code_string.code) - dependent_functions.update( - {node.name for node in ast_tree.body if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef))} - ) + # Quick heuristic: skip parsing entirely if there is no 'def' token, + # since no function definitions can be present without it. + if "def" not in code_string.code: + continue - if main_function in dependent_functions: - dependent_functions.discard(main_function) + ast_tree = ast.parse(code_string.code) + # Add function names directly, skipping the bare main name. + for node in ast_tree.body: + if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)): + name = node.name + if name == bare_main: + continue + dependent_functions.add(name) + # If more than one dependent function (other than the main) is found, + # we can return False early since the final result cannot be a single name. + if len(dependent_functions) > 1: + return False if not dependent_functions: return False @@ -32,6 +46,9 @@ def extract_dependent_function(main_function: str, code_context: CodeOptimizatio def build_fully_qualified_name(function_name: str, code_context: CodeOptimizationContext) -> str: + # If the name is already qualified (contains a dot), return as-is + if "." in function_name: + return function_name full_name = function_name for obj_name, parents in code_context.preexisting_objects: if obj_name == function_name: diff --git a/codeflash/code_utils/shell_utils.py b/codeflash/code_utils/shell_utils.py index df2cff2d6..2052f3e96 100644 --- a/codeflash/code_utils/shell_utils.py +++ b/codeflash/code_utils/shell_utils.py @@ -238,6 +238,18 @@ def save_api_key_to_rc(api_key: str) -> Result[str, str]: ) +def make_env_with_project_root(project_root: Path | str) -> dict[str, str]: + """Return a copy of os.environ with project_root prepended to PYTHONPATH.""" + env = os.environ.copy() + project_root_str = str(project_root) + pythonpath = env.get("PYTHONPATH", "") + if pythonpath: + env["PYTHONPATH"] = f"{project_root_str}{os.pathsep}{pythonpath}" + else: + env["PYTHONPATH"] = project_root_str + return env + + def get_cross_platform_subprocess_run_args( cwd: Path | str | None = None, env: Mapping[str, str] | None = None, diff --git a/codeflash/code_utils/time_utils.py b/codeflash/code_utils/time_utils.py index e44c279d3..ff04b5037 100644 --- a/codeflash/code_utils/time_utils.py +++ b/codeflash/code_utils/time_utils.py @@ -1,10 +1,5 @@ from __future__ import annotations -import datetime as dt -import re - -import humanize - def humanize_runtime(time_in_ns: int) -> str: runtime_human: str = str(time_in_ns) @@ -14,22 +9,32 @@ def humanize_runtime(time_in_ns: int) -> str: if time_in_ns / 1000 >= 1: time_micro = float(time_in_ns) / 1000 - runtime_human = humanize.precisedelta(dt.timedelta(microseconds=time_micro), minimum_unit="microseconds") - units = re.split(r",|\s", runtime_human)[1] - - if units in {"microseconds", "microsecond"}: + # Direct unit determination and formatting without external library + if time_micro < 1000: runtime_human = f"{time_micro:.3g}" - elif units in {"milliseconds", "millisecond"}: - runtime_human = "%.3g" % (time_micro / 1000) - elif units in {"seconds", "second"}: - runtime_human = "%.3g" % (time_micro / (1000**2)) - elif units in {"minutes", "minute"}: - runtime_human = "%.3g" % (time_micro / (60 * 1000**2)) - elif units in {"hour", "hours"}: # hours - runtime_human = "%.3g" % (time_micro / (3600 * 1000**2)) + units = "microseconds" if time_micro >= 2 else "microsecond" + elif time_micro < 1000000: + time_milli = time_micro / 1000 + runtime_human = f"{time_milli:.3g}" + units = "milliseconds" if time_milli >= 2 else "millisecond" + elif time_micro < 60000000: + time_sec = time_micro / 1000000 + runtime_human = f"{time_sec:.3g}" + units = "seconds" if time_sec >= 2 else "second" + elif time_micro < 3600000000: + time_min = time_micro / 60000000 + runtime_human = f"{time_min:.3g}" + units = "minutes" if time_min >= 2 else "minute" + elif time_micro < 86400000000: + time_hour = time_micro / 3600000000 + runtime_human = f"{time_hour:.3g}" + units = "hours" if time_hour >= 2 else "hour" else: # days - runtime_human = "%.3g" % (time_micro / (24 * 3600 * 1000**2)) + time_day = time_micro / 86400000000 + runtime_human = f"{time_day:.3g}" + units = "days" if time_day >= 2 else "day" + runtime_human_parts = str(runtime_human).split(".") if len(runtime_human_parts[0]) == 1: if runtime_human_parts[0] == "1" and len(runtime_human_parts) > 1: diff --git a/codeflash/context/code_context_extractor.py b/codeflash/context/code_context_extractor.py index 6bd36c7e1..7e0f1fa0c 100644 --- a/codeflash/context/code_context_extractor.py +++ b/codeflash/context/code_context_extractor.py @@ -70,6 +70,12 @@ def build_testgen_context( code_strings=testgen_context.code_strings + external_base_inits.code_strings ) + external_class_inits = get_external_class_inits(testgen_context, project_root_path) + if external_class_inits.code_strings: + testgen_context = CodeStringsMarkdown( + code_strings=testgen_context.code_strings + external_class_inits.code_strings + ) + return testgen_context @@ -319,14 +325,10 @@ def get_code_optimization_context_for_language( if code_context.imported_type_skeletons: testgen_code_strings.append( CodeString( - code=code_context.imported_type_skeletons, - file_path=None, - language=function_to_optimize.language, + code=code_context.imported_type_skeletons, file_path=None, language=function_to_optimize.language ) ) - testgen_context = CodeStringsMarkdown( - code_strings=testgen_code_strings, language=function_to_optimize.language - ) + testgen_context = CodeStringsMarkdown(code_strings=testgen_code_strings, language=function_to_optimize.language) # Check token limits read_writable_tokens = encoded_tokens_len(read_writable_code.markdown) @@ -830,6 +832,210 @@ def get_external_base_class_inits(code_context: CodeStringsMarkdown, project_roo return CodeStringsMarkdown(code_strings=code_strings) +MAX_TRANSITIVE_DEPTH = 2 + + +def extract_classes_from_type_hint(hint: object) -> list[type]: + """Recursively extract concrete class objects from a type annotation. + + Unwraps Optional, Union, List, Dict, Callable, Annotated, etc. + Filters out builtins and typing module types. + """ + import typing + + classes: list[type] = [] + origin = getattr(hint, "__origin__", None) + args = getattr(hint, "__args__", None) + + if origin is not None and args: + for arg in args: + classes.extend(extract_classes_from_type_hint(arg)) + elif isinstance(hint, type): + module = getattr(hint, "__module__", "") + if module not in ("builtins", "typing", "typing_extensions", "types"): + classes.append(hint) + # Handle typing.Annotated on older Pythons where __origin__ may not be set + if hasattr(typing, "get_args") and origin is None and args is None: + try: + inner_args = typing.get_args(hint) + if inner_args: + for arg in inner_args: + classes.extend(extract_classes_from_type_hint(arg)) + except Exception: + pass + + return classes + + +def resolve_transitive_type_deps(cls: type) -> list[type]: + """Find external classes referenced in cls.__init__ type annotations. + + Returns classes from site-packages that have a custom __init__. + """ + import inspect + import typing + + try: + init_method = getattr(cls, "__init__") + hints = typing.get_type_hints(init_method) + except Exception: + return [] + + deps: list[type] = [] + for param_name, hint in hints.items(): + if param_name == "return": + continue + for dep_cls in extract_classes_from_type_hint(hint): + if dep_cls is cls: + continue + init_method = getattr(dep_cls, "__init__", None) + if init_method is None or init_method is object.__init__: + continue + try: + class_file = Path(inspect.getfile(dep_cls)) + except (OSError, TypeError): + continue + if not path_belongs_to_site_packages(class_file): + continue + deps.append(dep_cls) + + return deps + + +def extract_init_stub_for_class(cls: type, class_name: str) -> CodeString | None: + """Extract a stub containing the class definition with only its __init__ method.""" + import inspect + import textwrap + + init_method = getattr(cls, "__init__", None) + if init_method is None or init_method is object.__init__: + return None + + try: + class_file = Path(inspect.getfile(cls)) + except (OSError, TypeError): + return None + + if not path_belongs_to_site_packages(class_file): + return None + + try: + init_source = inspect.getsource(init_method) + init_source = textwrap.dedent(init_source) + except (OSError, TypeError): + return None + + parts = class_file.parts + if "site-packages" in parts: + idx = parts.index("site-packages") + class_file = Path(*parts[idx + 1 :]) + + class_source = f"class {class_name}:\n" + textwrap.indent(init_source, " ") + return CodeString(code=class_source, file_path=class_file) + + +def get_external_class_inits(code_context: CodeStringsMarkdown, project_root_path: Path) -> CodeStringsMarkdown: + """Extract __init__ methods from directly imported external library classes. + + Scans the code context for classes imported from external packages (site-packages) and extracts + their __init__ methods, including transitive type dependencies found in __init__ annotations. + This helps the LLM understand constructor signatures for instantiation in generated tests. + """ + import importlib + import inspect + + all_code = "\n".join(cs.code for cs in code_context.code_strings) + + try: + tree = ast.parse(all_code) + except SyntaxError: + return CodeStringsMarkdown(code_strings=[]) + + # Collect all from X import Y statements + imported_names: dict[str, str] = {} + is_project_cache: dict[str, bool] = {} + + # Track classes already defined in the context to avoid duplicates + existing_classes: set[str] = set() + + for node in ast.walk(tree): + if isinstance(node, ast.ImportFrom) and node.module: + for alias in node.names: + if alias.name != "*": + imported_name = alias.asname if alias.asname else alias.name + imported_names[imported_name] = node.module + elif isinstance(node, ast.ClassDef): + existing_classes.add(node.name) + + if not imported_names: + return CodeStringsMarkdown(code_strings=[]) + + # Filter to external-only imports + external_imports: set[tuple[str, str]] = set() + for name, module_name in imported_names.items(): + if name in existing_classes: + continue + cached = is_project_cache.get(module_name) + if cached is None: + is_project = _is_project_module(module_name, project_root_path) + is_project_cache[module_name] = is_project + else: + is_project = cached + if not is_project: + external_imports.add((name, module_name)) + + if not external_imports: + return CodeStringsMarkdown(code_strings=[]) + + code_strings: list[CodeString] = [] + imported_module_cache: dict[str, object] = {} + processed_classes: set[type] = set() + emitted_names: set[str] = set() + + # BFS worklist: (class_object, class_name, depth) + worklist: list[tuple[type, str, int]] = [] + + # Seed the worklist with directly imported classes + for class_name, module_name in external_imports: + try: + module = imported_module_cache.get(module_name) + if module is None: + module = importlib.import_module(module_name) + imported_module_cache[module_name] = module + + cls = getattr(module, class_name, None) + if cls is None or not inspect.isclass(cls): + continue + + worklist.append((cls, class_name, 0)) + except (ImportError, ModuleNotFoundError, AttributeError): + logger.debug(f"Failed to import {module_name}.{class_name}") + continue + + while worklist: + cls, class_name, depth = worklist.pop(0) + + if cls in processed_classes: + continue + processed_classes.add(cls) + + stub = extract_init_stub_for_class(cls, class_name) + if stub is None: + continue + + if class_name not in emitted_names: + code_strings.append(stub) + emitted_names.add(class_name) + + # Resolve transitive type dependencies up to MAX_TRANSITIVE_DEPTH + if depth < MAX_TRANSITIVE_DEPTH: + for dep_cls in resolve_transitive_type_deps(cls): + if dep_cls not in processed_classes: + worklist.append((dep_cls, dep_cls.__name__, depth + 1)) + + return CodeStringsMarkdown(code_strings=code_strings) + + def _is_project_module(module_name: str, project_root_path: Path) -> bool: """Check if a module is part of the project (not external/stdlib).""" import importlib.util diff --git a/codeflash/discovery/functions_to_optimize.py b/codeflash/discovery/functions_to_optimize.py index 29bea8761..86d574af1 100644 --- a/codeflash/discovery/functions_to_optimize.py +++ b/codeflash/discovery/functions_to_optimize.py @@ -78,10 +78,23 @@ def __init__(self, file_path: str) -> None: self.file_path: str = file_path self.functions: list[FunctionToOptimize] = [] + @staticmethod + def is_pytest_fixture(node: cst.FunctionDef) -> bool: + for decorator in node.decorators: + dec = decorator.decorator + if isinstance(dec, cst.Call): + dec = dec.func + if isinstance(dec, cst.Attribute) and dec.attr.value == "fixture": + if isinstance(dec.value, cst.Name) and dec.value.value == "pytest": + return True + if isinstance(dec, cst.Name) and dec.value == "fixture": + return True + return False + def visit_FunctionDef(self, node: cst.FunctionDef) -> None: return_visitor: ReturnStatementVisitor = ReturnStatementVisitor() node.visit(return_visitor) - if return_visitor.has_return_statement: + if return_visitor.has_return_statement and not self.is_pytest_fixture(node): pos: CodeRange = self.get_metadata(cst.metadata.PositionProvider, node) parents: CSTNode | None = self.get_metadata(cst.metadata.ParentNodeProvider, node) ast_parents: list[FunctionParent] = [] @@ -108,14 +121,12 @@ def __init__(self, file_path: Path) -> None: self.file_path: Path = file_path def visit_FunctionDef(self, node: FunctionDef) -> None: - # Check if the function has a return statement and add it to the list if function_has_return_statement(node) and not function_is_a_property(node): self.functions.append( FunctionToOptimize(function_name=node.name, file_path=self.file_path, parents=self.ast_path[:]) ) def visit_AsyncFunctionDef(self, node: AsyncFunctionDef) -> None: - # Check if the async function has a return statement and add it to the list if function_has_return_statement(node) and not function_is_a_property(node): self.functions.append( FunctionToOptimize( @@ -831,22 +842,17 @@ def filter_functions( test_dir_patterns = (os.sep + "test" + os.sep, os.sep + "tests" + os.sep, os.sep + "__tests__" + os.sep) def is_test_file(file_path_normalized: str) -> bool: - """Check if a file is a test file based on patterns.""" if tests_root_overlaps_source: - # Use file pattern matching when tests_root overlaps with source file_lower = file_path_normalized.lower() - # Check filename patterns (e.g., .test.ts, .spec.ts) + basename = Path(file_lower).name + if basename.startswith("test_") or basename == "conftest.py": + return True if any(pattern in file_lower for pattern in test_file_name_patterns): return True - # Check directory patterns, but only within the project root - # to avoid false positives from parent directories (e.g., project at /home/user/tests/myproject) if project_root_str and file_lower.startswith(project_root_str.lower()): relative_path = file_lower[len(project_root_str) :] return any(pattern in relative_path for pattern in test_dir_patterns) - # If we can't compute relative path from project root, don't check directory patterns - # This avoids false positives when project is inside a folder named "tests" return False - # Use directory-based filtering when tests are in a separate directory return file_path_normalized.startswith(tests_root_str + os.sep) # We desperately need Python 3.10+ only support to make this code readable with structural pattern matching diff --git a/codeflash/github/PrComment.py b/codeflash/github/PrComment.py index 3444c5477..3dc84deb4 100644 --- a/codeflash/github/PrComment.py +++ b/codeflash/github/PrComment.py @@ -26,10 +26,10 @@ class PrComment: def to_json(self) -> dict[str, Union[str, int, dict[str, dict[str, int]], list[BenchmarkDetail], None]]: report_table: dict[str, dict[str, int]] = {} - for test_type, report in self.winning_behavior_test_results.get_test_pass_fail_report_by_type().items(): + for test_type, counts in self.winning_behavior_test_results.get_test_pass_fail_report_by_type().items(): name = test_type.to_name() if name: - report_table[name] = report + report_table[name] = counts json_result: dict[str, Union[str, int, dict[str, dict[str, int]], list[BenchmarkDetail], None]] = { "optimization_explanation": self.optimization_explanation, diff --git a/codeflash/languages/javascript/instrument.py b/codeflash/languages/javascript/instrument.py index 551d99f0f..8c0136723 100644 --- a/codeflash/languages/javascript/instrument.py +++ b/codeflash/languages/javascript/instrument.py @@ -56,6 +56,46 @@ class StandaloneCallMatch: ) +def is_inside_string(code: str, pos: int) -> bool: + """Check if a position in code is inside a string literal. + + Handles single quotes, double quotes, and template literals (backticks). + Properly handles escaped quotes. + + Args: + code: The source code. + pos: The position to check. + + Returns: + True if the position is inside a string literal. + + """ + in_string = False + string_char = None + i = 0 + + while i < pos: + char = code[i] + + if in_string: + # Check for escape sequence + if char == "\\" and i + 1 < len(code): + i += 2 # Skip escaped character + continue + # Check for end of string + if char == string_char: + in_string = False + string_char = None + # Check for start of string + elif char in "\"'`": + in_string = True + string_char = char + + i += 1 + + return in_string + + class StandaloneCallTransformer: """Transforms standalone func(...) calls in JavaScript test code. @@ -82,6 +122,11 @@ def __init__(self, function_to_optimize: FunctionToOptimize, capture_func: str) # Captures: (whitespace)(await )?(object.)*func_name( # We'll filter out expect() and codeflash. cases in the transform loop self._call_pattern = re.compile(rf"(\s*)(await\s+)?((?:\w+\.)*){re.escape(self.func_name)}\s*\(") + # Pattern to match bracket notation: obj['func_name']( or obj["func_name"]( + # Captures: (whitespace)(await )?(obj)['|"]func_name['|"]( + self._bracket_call_pattern = re.compile( + rf"(\s*)(await\s+)?(\w+)\[['\"]({re.escape(self.func_name)})['\"]]\s*\(" + ) def transform(self, code: str) -> str: """Transform all standalone calls in the code.""" @@ -89,7 +134,25 @@ def transform(self, code: str) -> str: pos = 0 while pos < len(code): - match = self._call_pattern.search(code, pos) + # Try both dot notation and bracket notation patterns + dot_match = self._call_pattern.search(code, pos) + bracket_match = self._bracket_call_pattern.search(code, pos) + + # Choose the first match (by position) + match = None + is_bracket_notation = False + if dot_match and bracket_match: + if dot_match.start() <= bracket_match.start(): + match = dot_match + else: + match = bracket_match + is_bracket_notation = True + elif dot_match: + match = dot_match + elif bracket_match: + match = bracket_match + is_bracket_notation = True + if not match: result.append(code[pos:]) break @@ -106,7 +169,11 @@ def transform(self, code: str) -> str: result.append(code[pos:match_start]) # Try to parse the full standalone call - standalone_match = self._parse_standalone_call(code, match) + if is_bracket_notation: + standalone_match = self._parse_bracket_standalone_call(code, match) + else: + standalone_match = self._parse_standalone_call(code, match) + if standalone_match is None: # Couldn't parse, skip this match result.append(code[match_start : match.end()]) @@ -115,7 +182,7 @@ def transform(self, code: str) -> str: # Generate the transformed code self.invocation_counter += 1 - transformed = self._generate_transformed_call(standalone_match) + transformed = self._generate_transformed_call(standalone_match, is_bracket_notation) result.append(transformed) pos = standalone_match.end_pos @@ -123,6 +190,10 @@ def transform(self, code: str) -> str: def _should_skip_match(self, code: str, start: int, match: re.Match) -> bool: """Check if the match should be skipped (inside expect, already transformed, etc.).""" + # Skip if inside a string literal (e.g., test description) + if is_inside_string(code, start): + return True + # Look backwards to check context lookback_start = max(0, start - 200) lookback = code[lookback_start:start] @@ -252,17 +323,24 @@ def _find_balanced_parens(self, code: str, open_paren_pos: int) -> tuple[str | N in_string = False string_char = None - while pos < len(code) and depth > 0: - char = code[pos] + s = code # local alias for speed + s_len = len(s) + quotes = "\"'`" + + while pos < s_len and depth > 0: + char = s[pos] # Handle string literals - if char in "\"'`" and (pos == 0 or code[pos - 1] != "\\"): - if not in_string: - in_string = True - string_char = char - elif char == string_char: - in_string = False - string_char = None + # Note: preserve original escaping semantics (only checks immediate preceding char) + if char in quotes: + prev_char = s[pos - 1] if pos > 0 else None + if prev_char != "\\": + if not in_string: + in_string = True + string_char = char + elif char == string_char: + in_string = False + string_char = None elif not in_string: if char == "(": depth += 1 @@ -274,19 +352,64 @@ def _find_balanced_parens(self, code: str, open_paren_pos: int) -> tuple[str | N if depth != 0: return None, -1 - return code[open_paren_pos + 1 : pos - 1], pos + # slice once + return s[open_paren_pos + 1 : pos - 1], pos + + def _parse_bracket_standalone_call(self, code: str, match: re.Match) -> StandaloneCallMatch | None: + """Parse a complete standalone obj['func'](...) call with bracket notation.""" + leading_ws = match.group(1) + prefix = match.group(2) or "" # "await " or "" + obj_name = match.group(3) # The object name before bracket + # match.group(4) is the function name inside brackets + + # Find the opening paren position + match_text = match.group(0) + paren_offset = match_text.rfind("(") + open_paren_pos = match.start() + paren_offset + + # Find the arguments (content inside parens) + func_args, close_pos = self._find_balanced_parens(code, open_paren_pos) + if func_args is None: + return None + + # Check for trailing semicolon + end_pos = close_pos + # Skip whitespace + s = code + s_len = len(s) + while end_pos < s_len and s[end_pos] in " \t": + end_pos += 1 + + has_trailing_semicolon = end_pos < s_len and s[end_pos] == ";" + if has_trailing_semicolon: + end_pos += 1 + + return StandaloneCallMatch( + start_pos=match.start(), + end_pos=end_pos, + leading_whitespace=leading_ws, + func_args=func_args, + prefix=prefix, + object_prefix=f"{obj_name}.", # Use dot notation format for consistency + has_trailing_semicolon=has_trailing_semicolon, + ) - def _generate_transformed_call(self, match: StandaloneCallMatch) -> str: + def _generate_transformed_call(self, match: StandaloneCallMatch, is_bracket_notation: bool = False) -> str: """Generate the transformed code for a standalone call.""" line_id = str(self.invocation_counter) args_str = match.func_args.strip() semicolon = ";" if match.has_trailing_semicolon else "" - # Handle method calls on objects (e.g., calc.fibonacci, this.method) + # Handle method calls on objects (e.g., calc.fibonacci, this.method, instance['method']) if match.object_prefix: # Remove trailing dot from object prefix for the bind call obj = match.object_prefix.rstrip(".") - full_method = f"{obj}.{self.func_name}" + + # For bracket notation, use bracket access syntax for the bind + if is_bracket_notation: + full_method = f"{obj}['{self.func_name}']" + else: + full_method = f"{obj}.{self.func_name}" if args_str: return ( @@ -370,6 +493,12 @@ def transform(self, code: str) -> str: result.append(code[pos:]) break + # Skip if inside a string literal (e.g., test description) + if is_inside_string(code, match.start()): + result.append(code[pos : match.end()]) + pos = match.end() + continue + # Add everything before the match result.append(code[pos : match.start()]) @@ -1067,3 +1196,171 @@ def instrument_generated_js_test( mode=mode, remove_assertions=True, ) + + +def fix_imports_inside_test_blocks(test_code: str) -> str: + """Fix import statements that appear inside test/it blocks. + + JavaScript/TypeScript `import` statements must be at the top level of a module. + The AI sometimes generates imports inside test functions, which is invalid syntax. + + This function detects such patterns and converts them to dynamic require() calls + which are valid inside functions. + + Args: + test_code: The generated test code. + + Returns: + Fixed test code with imports converted to require() inside functions. + + """ + if not test_code or not test_code.strip(): + return test_code + + # Pattern to match import statements inside functions + # This captures imports that appear after function/test block openings + # We look for lines that: + # 1. Start with whitespace (indicating they're inside a block) + # 2. Have an import statement + + lines = test_code.split("\n") + result_lines = [] + brace_depth = 0 + in_test_block = False + + for line in lines: + stripped = line.strip() + + # Track brace depth to know if we're inside a block + # Count braces, but ignore braces in strings (simplified check) + for char in stripped: + if char == "{": + brace_depth += 1 + elif char == "}": + brace_depth -= 1 + + # Check if we're entering a test/it/describe block + if re.match(r"^(test|it|describe|beforeEach|afterEach|beforeAll|afterAll)\s*\(", stripped): + in_test_block = True + + # Check for import statement inside a block (brace_depth > 0 means we're inside a function/block) + if brace_depth > 0 and stripped.startswith("import "): + # Convert ESM import to require + # Pattern: import { name } from 'module' -> const { name } = require('module') + # Pattern: import name from 'module' -> const name = require('module') + + named_import = re.match(r"import\s+\{([^}]+)\}\s+from\s+['\"]([^'\"]+)['\"]", stripped) + default_import = re.match(r"import\s+(\w+)\s+from\s+['\"]([^'\"]+)['\"]", stripped) + namespace_import = re.match(r"import\s+\*\s+as\s+(\w+)\s+from\s+['\"]([^'\"]+)['\"]", stripped) + + leading_whitespace = line[: len(line) - len(line.lstrip())] + + if named_import: + names = named_import.group(1) + module = named_import.group(2) + new_line = f"{leading_whitespace}const {{{names}}} = require('{module}');" + result_lines.append(new_line) + logger.debug(f"Fixed import inside block: {stripped} -> {new_line.strip()}") + continue + if default_import: + name = default_import.group(1) + module = default_import.group(2) + new_line = f"{leading_whitespace}const {name} = require('{module}');" + result_lines.append(new_line) + logger.debug(f"Fixed import inside block: {stripped} -> {new_line.strip()}") + continue + if namespace_import: + name = namespace_import.group(1) + module = namespace_import.group(2) + new_line = f"{leading_whitespace}const {name} = require('{module}');" + result_lines.append(new_line) + logger.debug(f"Fixed import inside block: {stripped} -> {new_line.strip()}") + continue + + result_lines.append(line) + + return "\n".join(result_lines) + + +def fix_jest_mock_paths(test_code: str, test_file_path: Path, source_file_path: Path, tests_root: Path) -> str: + """Fix relative paths in jest.mock() calls to be correct from the test file's location. + + The AI sometimes generates jest.mock() calls with paths relative to the source file + instead of the test file. For example: + - Source at `src/queue/queue.ts` imports `../environment` (-> src/environment) + - Test at `tests/test.test.ts` generates `jest.mock('../environment')` (-> ./environment, wrong!) + - Should generate `jest.mock('../src/environment')` + + This function detects relative mock paths and adjusts them based on the test file's + location relative to the source file's directory. + + Args: + test_code: The generated test code. + test_file_path: Path to the test file being generated. + source_file_path: Path to the source file being tested. + tests_root: Root directory of the tests. + + Returns: + Fixed test code with corrected mock paths. + + """ + if not test_code or not test_code.strip(): + return test_code + + import os + + # Get the directory containing the source file and the test file + source_dir = source_file_path.resolve().parent + test_dir = test_file_path.resolve().parent + project_root = tests_root.resolve().parent if tests_root.name == "tests" else tests_root.resolve() + + # Pattern to match jest.mock() or jest.doMock() with relative paths + mock_pattern = re.compile(r"(jest\.(?:mock|doMock)\s*\(\s*['\"])(\.\./[^'\"]+|\.\/[^'\"]+)(['\"])") + + def fix_mock_path(match: re.Match[str]) -> str: + original = match.group(0) + prefix = match.group(1) + rel_path = match.group(2) + suffix = match.group(3) + + # Resolve the path as if it were relative to the source file's directory + # (which is how the AI often generates it) + source_relative_resolved = (source_dir / rel_path).resolve() + + # Check if this resolved path exists or if adjusting it would make more sense + # Calculate what the correct relative path from the test file should be + try: + # First, try to find if the path makes sense from the test directory + test_relative_resolved = (test_dir / rel_path).resolve() + + # If the path exists relative to test dir, keep it + if test_relative_resolved.exists() or ( + test_relative_resolved.with_suffix(".ts").exists() + or test_relative_resolved.with_suffix(".js").exists() + or test_relative_resolved.with_suffix(".tsx").exists() + or test_relative_resolved.with_suffix(".jsx").exists() + ): + return original # Keep original, it's valid + + # If path exists relative to source dir, recalculate from test dir + if source_relative_resolved.exists() or ( + source_relative_resolved.with_suffix(".ts").exists() + or source_relative_resolved.with_suffix(".js").exists() + or source_relative_resolved.with_suffix(".tsx").exists() + or source_relative_resolved.with_suffix(".jsx").exists() + ): + # Calculate the correct relative path from test_dir to source_relative_resolved + new_rel_path = Path(os.path.relpath(source_relative_resolved, test_dir)).as_posix() + # Ensure it starts with ./ or ../ + if not new_rel_path.startswith("../") and not new_rel_path.startswith("./"): + new_rel_path = f"./{new_rel_path}" + + logger.debug(f"Fixed jest.mock path: {rel_path} -> {new_rel_path}") + return f"{prefix}{new_rel_path}{suffix}" + + except (ValueError, OSError): + pass # Path resolution failed, keep original + + return original # Keep original if we can't fix it + + return mock_pattern.sub(fix_mock_path, test_code) diff --git a/codeflash/languages/javascript/module_system.py b/codeflash/languages/javascript/module_system.py index 66e6fe7e3..89d723c02 100644 --- a/codeflash/languages/javascript/module_system.py +++ b/codeflash/languages/javascript/module_system.py @@ -100,23 +100,40 @@ def detect_module_system(project_root: Path, file_path: Path | None = None) -> s try: content = file_path.read_text() - # Look for ES module syntax + # Look for ES module syntax - these are explicit ESM markers has_import = "import " in content and "from " in content - has_export = "export " in content or "export default" in content or "export {" in content + # Check for export function/class/const/default which are unambiguous ESM syntax + has_esm_export = ( + "export function " in content + or "export class " in content + or "export const " in content + or "export let " in content + or "export default " in content + or "export async function " in content + ) + has_export_block = "export {" in content # Look for CommonJS syntax has_require = "require(" in content has_module_exports = "module.exports" in content or "exports." in content - # Determine based on what we found - if (has_import or has_export) and not (has_require or has_module_exports): - logger.debug("Detected ES Module from import/export statements") + # Prioritize ESM when explicit ESM export syntax is found + # This handles hybrid files that have both `export function` and `module.exports` + # The ESM syntax is more explicit and should take precedence + if has_esm_export or has_import: + logger.debug("Detected ES Module from explicit export/import statements") return ModuleSystem.ES_MODULE - if (has_require or has_module_exports) and not (has_import or has_export): + # Pure CommonJS + if (has_require or has_module_exports) and not has_export_block: logger.debug("Detected CommonJS from require/module.exports") return ModuleSystem.COMMONJS + # Export block without other ESM markers - still ESM + if has_export_block: + logger.debug("Detected ES Module from export block") + return ModuleSystem.ES_MODULE + except Exception as e: logger.warning("Failed to analyze file %s: %s", file_path, e) diff --git a/codeflash/languages/javascript/parse.py b/codeflash/languages/javascript/parse.py index 1bfda8bca..a5e7ae8c6 100644 --- a/codeflash/languages/javascript/parse.py +++ b/codeflash/languages/javascript/parse.py @@ -198,14 +198,20 @@ def parse_jest_test_xml( # Extract console output from suite-level system-out (Jest specific) suite_stdout = _extract_jest_console_output(suite._elem) # noqa: SLF001 - # Fallback: use subprocess stdout if XML system-out is empty - if not suite_stdout and global_stdout: - suite_stdout = global_stdout + # Combine suite stdout with global stdout to ensure we capture all timing markers + # Jest-junit may not capture all console.log output in the XML, so we also need + # to check the subprocess stdout directly for timing markers + combined_stdout = suite_stdout + if global_stdout: + if combined_stdout: + combined_stdout = combined_stdout + "\n" + global_stdout + else: + combined_stdout = global_stdout - # Parse timing markers from the suite's console output - start_matches = list(jest_start_pattern.finditer(suite_stdout)) + # Parse timing markers from the combined console output + start_matches = list(jest_start_pattern.finditer(combined_stdout)) end_matches_dict = {} - for match in jest_end_pattern.finditer(suite_stdout): + for match in jest_end_pattern.finditer(combined_stdout): # Key: (testName, testName2, funcName, loopIndex, lineId) key = match.groups()[:5] end_matches_dict[key] = match @@ -318,7 +324,7 @@ def parse_jest_test_xml( # Infer test type from filename pattern filename = test_file_path.name if "__perf_test_" in filename or "_perf_test_" in filename: - test_type = TestType.GENERATED_PERFORMANCE + test_type = TestType.GENERATED_REGRESSION # Performance tests are still generated regression tests elif "__unit_test_" in filename or "_unit_test_" in filename: test_type = TestType.GENERATED_REGRESSION else: diff --git a/codeflash/languages/javascript/support.py b/codeflash/languages/javascript/support.py index 6a67821a8..5d6967442 100644 --- a/codeflash/languages/javascript/support.py +++ b/codeflash/languages/javascript/support.py @@ -104,6 +104,12 @@ def discover_functions( if not criteria.include_async and func.is_async: continue + # Skip non-exported functions (can't be imported in tests) + # Exception: nested functions and methods are allowed if their parent is exported + if not func.is_exported and not func.parent_function: + logger.debug(f"Skipping non-exported function: {func.name}") # noqa: G004 + continue + # Build parents list parents: list[FunctionParent] = [] if func.class_name: @@ -326,8 +332,14 @@ def extract_code_context(self, function: FunctionToOptimize, project_root: Path, else: target_code = "" + imports = analyzer.find_imports(source) + + # Find helper functions called by target (needed before class wrapping to find same-class helpers) + helpers = self._find_helper_functions(function, source, analyzer, imports, module_root) + # For class methods, wrap the method in its class definition # This is necessary because method definition syntax is only valid inside a class body + same_class_helper_names: set[str] = set() if function.is_method and function.parents: class_name = None for parent in function.parents: @@ -336,17 +348,26 @@ def extract_code_context(self, function: FunctionToOptimize, project_root: Path, break if class_name: + # Find same-class helper methods that need to be included inside the class wrapper + same_class_helpers = self._find_same_class_helpers( + class_name, function.function_name, helpers, tree_functions, lines + ) + same_class_helper_names = {h[0] for h in same_class_helpers} # method names + # Find the class definition in the source to get proper indentation, JSDoc, constructor, and fields class_info = self._find_class_definition(source, class_name, analyzer, function.function_name) if class_info: class_jsdoc, class_indent, constructor_code, fields_code = class_info - # Build the class body with fields, constructor, and target method + # Build the class body with fields, constructor, target method, and same-class helpers class_body_parts = [] if fields_code: class_body_parts.append(fields_code) if constructor_code: class_body_parts.append(constructor_code) class_body_parts.append(target_code) + # Add same-class helper methods inside the class body + for _helper_name, helper_source in same_class_helpers: + class_body_parts.append(helper_source) class_body = "\n".join(class_body_parts) # Wrap the method in a class definition with context @@ -357,13 +378,16 @@ def extract_code_context(self, function: FunctionToOptimize, project_root: Path, else: target_code = f"{class_indent}class {class_name} {{\n{class_body}{class_indent}}}\n" else: - # Fallback: wrap with no indentation - target_code = f"class {class_name} {{\n{target_code}}}\n" - - imports = analyzer.find_imports(source) + # Fallback: wrap with no indentation, including same-class helpers + helper_code = "\n".join(h[1] for h in same_class_helpers) + if helper_code: + target_code = f"class {class_name} {{\n{target_code}\n{helper_code}}}\n" + else: + target_code = f"class {class_name} {{\n{target_code}}}\n" - # Find helper functions called by target - helpers = self._find_helper_functions(function, source, analyzer, imports, module_root) + # Filter out same-class helpers from the helpers list (they're already inside the class wrapper) + if same_class_helper_names: + helpers = [h for h in helpers if h.name not in same_class_helper_names] # Extract import statements as strings import_lines = [] @@ -546,6 +570,49 @@ def _extract_class_context( return (constructor_code, fields_code) + def _find_same_class_helpers( + self, + class_name: str, + target_method_name: str, + helpers: list[HelperFunction], + tree_functions: list, + lines: list[str], + ) -> list[tuple[str, str]]: + """Find helper methods that belong to the same class as the target method. + + These helpers need to be included inside the class wrapper rather than + appended outside, because they may use class-specific syntax like 'private'. + + Args: + class_name: Name of the class containing the target method. + target_method_name: Name of the target method (to exclude). + helpers: List of all helper functions found. + tree_functions: List of FunctionNode from tree-sitter analysis. + lines: Source code split into lines. + + Returns: + List of (method_name, source_code) tuples for same-class helpers. + + """ + same_class_helpers: list[tuple[str, str]] = [] + + # Build a set of helper names for quick lookup + helper_names = {h.name for h in helpers} + + # Names to exclude from same-class helpers (target method and constructor) + exclude_names = {target_method_name, "constructor"} + + # Find methods in tree_functions that belong to the same class and are helpers + for func in tree_functions: + if func.class_name == class_name and func.name in helper_names and func.name not in exclude_names: + # Extract source including JSDoc if present + effective_start = func.doc_start_line or func.start_line + helper_lines = lines[effective_start - 1 : func.end_line] + helper_source = "".join(helper_lines) + same_class_helpers.append((func.name, helper_source)) + + return same_class_helpers + def _find_helper_functions( self, function: FunctionToOptimize, diff --git a/codeflash/languages/javascript/test_runner.py b/codeflash/languages/javascript/test_runner.py index c65adfa7b..1d79ad382 100644 --- a/codeflash/languages/javascript/test_runner.py +++ b/codeflash/languages/javascript/test_runner.py @@ -7,6 +7,7 @@ from __future__ import annotations import json +import os import subprocess import time from pathlib import Path @@ -21,6 +22,25 @@ if TYPE_CHECKING: from codeflash.models.models import TestFiles +# Track created config files (jest configs and tsconfigs) for cleanup +_created_config_files: set[Path] = set() + + +def get_created_config_files() -> list[Path]: + """Get list of config files created by codeflash for cleanup. + + Returns: + List of paths to created config files (jest.codeflash.config.js, tsconfig.codeflash.json) + that should be cleaned up after optimization. + + """ + return list(_created_config_files) + + +def clear_created_config_files() -> None: + """Clear the set of tracked config files after cleanup.""" + _created_config_files.clear() + def _detect_bundler_module_resolution(project_root: Path) -> bool: """Detect if the project uses moduleResolution: 'bundler' in tsconfig. @@ -163,6 +183,7 @@ def _create_codeflash_tsconfig(project_root: Path) -> Path: try: codeflash_tsconfig_path.write_text(json.dumps(codeflash_tsconfig, indent=2)) + _created_config_files.add(codeflash_tsconfig_path) logger.debug(f"Created {codeflash_tsconfig_path} with Node moduleResolution") except Exception as e: logger.warning(f"Failed to create codeflash tsconfig: {e}") @@ -170,70 +191,142 @@ def _create_codeflash_tsconfig(project_root: Path) -> Path: return codeflash_tsconfig_path -def _create_codeflash_jest_config(project_root: Path, original_jest_config: Path | None) -> Path | None: - """Create a Jest config that uses the codeflash tsconfig for ts-jest. +def _has_ts_jest_dependency(project_root: Path) -> bool: + """Check if the project has ts-jest as a dependency. + + Args: + project_root: Root of the project. + + Returns: + True if ts-jest is found in dependencies or devDependencies. + + """ + package_json = project_root / "package.json" + if not package_json.exists(): + return False + + try: + content = json.loads(package_json.read_text()) + deps = {**content.get("dependencies", {}), **content.get("devDependencies", {})} + return "ts-jest" in deps + except (json.JSONDecodeError, OSError): + return False + + +def _create_codeflash_jest_config( + project_root: Path, original_jest_config: Path | None, *, for_esm: bool = False +) -> Path | None: + """Create a Jest config that handles ESM packages and TypeScript properly. Args: project_root: Root of the project. original_jest_config: Path to the original Jest config, or None. + for_esm: If True, configure for ESM package transformation. Returns: Path to the codeflash Jest config, or None if creation failed. """ - codeflash_jest_config_path = project_root / "jest.codeflash.config.js" + # For ESM projects (type: module), use .cjs extension since config uses CommonJS require/module.exports + # This prevents "ReferenceError: module is not defined" errors + is_esm = _is_esm_project(project_root) + config_ext = ".cjs" if is_esm else ".js" - # If it already exists, use it + # Create codeflash config in the same directory as the original config + # This ensures relative paths work correctly + if original_jest_config: + codeflash_jest_config_path = original_jest_config.parent / f"jest.codeflash.config{config_ext}" + else: + codeflash_jest_config_path = project_root / f"jest.codeflash.config{config_ext}" + + # If it already exists, use it (check both extensions) if codeflash_jest_config_path.exists(): logger.debug(f"Using existing {codeflash_jest_config_path}") return codeflash_jest_config_path - # Create a wrapper Jest config that uses tsconfig.codeflash.json + # Also check if the alternate extension exists + alt_ext = ".js" if is_esm else ".cjs" + alt_path = codeflash_jest_config_path.with_suffix(alt_ext) + if alt_path.exists(): + logger.debug(f"Using existing {alt_path}") + return alt_path + + # Common ESM-only packages that need to be transformed + # These packages ship only ESM and will cause "Cannot use import statement" errors + esm_packages = [ + "p-queue", + "p-limit", + "p-timeout", + "yocto-queue", + "eventemitter3", + "chalk", + "ora", + "strip-ansi", + "ansi-regex", + "string-width", + "wrap-ansi", + "is-unicode-supported", + "is-interactive", + "log-symbols", + "figures", + ] + esm_pattern = "|".join(esm_packages) + + # Check if ts-jest is available in the project + has_ts_jest = _has_ts_jest_dependency(project_root) + + # Build transform config only if ts-jest is available + if has_ts_jest: + transform_config = """ + // Ensure TypeScript files are transformed using ts-jest + transform: { + '^.+\\\\.(ts|tsx)$': ['ts-jest', { isolatedModules: true }], + // Use ts-jest for JS files in ESM packages too + '^.+\\\\.js$': ['ts-jest', { isolatedModules: true }], + },""" + else: + transform_config = "" + logger.debug("ts-jest not found in project dependencies, skipping transform config") + + # Create a wrapper Jest config if original_jest_config: - # Extend the original config - jest_config_content = f"""// Auto-generated by codeflash for bundler moduleResolution compatibility -const originalConfig = require('./{original_jest_config.name}'); + # Since codeflash config is in the same directory as original, use simple relative path + config_require_path = f"./{original_jest_config.name}" -const tsJestOptions = {{ - isolatedModules: true, - tsconfig: 'tsconfig.codeflash.json', -}}; + # Extend the original config + jest_config_content = f"""// Auto-generated by codeflash for ESM compatibility +const originalConfig = require('{config_require_path}'); module.exports = {{ ...originalConfig, - transform: {{ - ...originalConfig.transform, - '^.+\\\\.tsx?$': ['ts-jest', tsJestOptions], - }}, - globals: {{ - ...originalConfig.globals, - 'ts-jest': tsJestOptions, - }}, + // Transform ESM packages that don't work with Jest's default config + // Pattern handles both npm/yarn (node_modules/pkg) and pnpm (node_modules/.pnpm/pkg@version/node_modules/pkg) + transformIgnorePatterns: [ + 'node_modules/(?!(\\\\.pnpm/)?({esm_pattern}))', + ],{transform_config} }}; """ else: - # Create a minimal Jest config for TypeScript - jest_config_content = """// Auto-generated by codeflash for bundler moduleResolution compatibility -const tsJestOptions = { - isolatedModules: true, - tsconfig: 'tsconfig.codeflash.json', -}; - -module.exports = { + # Create a minimal Jest config for TypeScript with ESM support + jest_config_content = f"""// Auto-generated by codeflash for ESM compatibility +module.exports = {{ verbose: true, testEnvironment: 'node', testRegex: '\\\\.(test|spec)\\\\.(js|ts|tsx)$', - testPathIgnorePatterns: ['/dist/', '/node_modules/'], - transform: { - '^.+\\\\.tsx?$': ['ts-jest', tsJestOptions], - }, + testPathIgnorePatterns: ['/dist/'], + // Transform ESM packages that don't work with Jest's default config + // Pattern handles both npm/yarn and pnpm directory structures + transformIgnorePatterns: [ + 'node_modules/(?!(\\\\.pnpm/)?({esm_pattern}))', + ],{transform_config} moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], -}; +}}; """ try: codeflash_jest_config_path.write_text(jest_config_content) - logger.debug(f"Created {codeflash_jest_config_path} with codeflash tsconfig") + _created_config_files.add(codeflash_jest_config_path) + logger.debug(f"Created {codeflash_jest_config_path} with ESM package support") return codeflash_jest_config_path except Exception as e: logger.warning(f"Failed to create codeflash Jest config: {e}") @@ -323,6 +416,55 @@ def _find_monorepo_root(start_path: Path) -> Path | None: return None +def _get_jest_major_version(project_root: Path) -> int | None: + """Detect the major version of Jest installed in the project. + + Args: + project_root: Root of the project to check. + + Returns: + Major version number (e.g., 29, 30), or None if not detected. + + """ + # First try to check package.json for explicit version + package_json = project_root / "package.json" + if package_json.exists(): + try: + content = json.loads(package_json.read_text()) + deps = {**content.get("devDependencies", {}), **content.get("dependencies", {})} + jest_version = deps.get("jest", "") + # Parse version like "30.0.5", "^30.0.5", "~30.0.5" + if jest_version: + # Strip leading version prefixes (^, ~, =, v) + version_str = jest_version.lstrip("^~=v") + if version_str and version_str[0].isdigit(): + major = version_str.split(".")[0] + if major.isdigit(): + return int(major) + except (json.JSONDecodeError, OSError): + pass + + # Also check monorepo root + monorepo_root = _find_monorepo_root(project_root) + if monorepo_root and monorepo_root != project_root: + monorepo_package = monorepo_root / "package.json" + if monorepo_package.exists(): + try: + content = json.loads(monorepo_package.read_text()) + deps = {**content.get("devDependencies", {}), **content.get("dependencies", {})} + jest_version = deps.get("jest", "") + if jest_version: + version_str = jest_version.lstrip("^~=v") + if version_str and version_str[0].isdigit(): + major = version_str.split(".")[0] + if major.isdigit(): + return int(major) + except (json.JSONDecodeError, OSError): + pass + + return None + + def _find_jest_config(project_root: Path) -> Path | None: """Find Jest configuration file in the project. @@ -535,7 +677,6 @@ def run_jest_behavioral_tests( # Get test files to run test_files = [str(file.instrumented_behavior_file_path) for file in test_paths.test_files] - # Use provided project_root, or detect it as fallback if project_root is None and test_files: first_test_file = Path(test_files[0]) @@ -610,13 +751,25 @@ def run_jest_behavioral_tests( # Configure ESM support if project uses ES Modules _configure_esm_environment(jest_env, effective_cwd) + # Increase Node.js heap size for large TypeScript projects + # Default heap is often not enough for monorepos with many dependencies + existing_node_options = jest_env.get("NODE_OPTIONS", "") + if "--max-old-space-size" not in existing_node_options: + jest_env["NODE_OPTIONS"] = f"{existing_node_options} --max-old-space-size=4096".strip() + logger.debug(f"Running Jest tests with command: {' '.join(jest_cmd)}") + # Calculate subprocess timeout: needs to be much larger than per-test timeout + # to account for Jest startup, TypeScript compilation, module loading, etc. + # Use at least 120 seconds, or 10x the per-test timeout, whichever is larger + subprocess_timeout = max(120, (timeout or 15) * 10, 600) if timeout else 600 + start_time_ns = time.perf_counter_ns() try: run_args = get_cross_platform_subprocess_run_args( - cwd=effective_cwd, env=jest_env, timeout=timeout or 600, check=False, text=True, capture_output=True + cwd=effective_cwd, env=jest_env, timeout=subprocess_timeout, check=False, text=True, capture_output=True ) + logger.debug(f"Jest subprocess timeout: {subprocess_timeout}s (per-test timeout: {timeout}s)") result = subprocess.run(jest_cmd, **run_args) # noqa: PLW1510 # Jest sends console.log output to stderr by default - move it to stdout # so our timing markers (printed via console.log) are in the expected place @@ -634,12 +787,12 @@ def run_jest_behavioral_tests( # This helps debug issues like import errors that cause Jest to fail early if result.returncode != 0 and not result_file_path.exists(): logger.warning( - f"Jest failed with returncode={result.returncode} and no XML output created.\n" + f"Jest failed with returncode={result.returncode}.\n" f"Jest stdout: {result.stdout[:2000] if result.stdout else '(empty)'}\n" f"Jest stderr: {result.stderr[:500] if result.stderr else '(empty)'}" ) except subprocess.TimeoutExpired: - logger.warning(f"Jest tests timed out after {timeout}s") + logger.warning(f"Jest tests timed out after {subprocess_timeout}s") result = subprocess.CompletedProcess(args=jest_cmd, returncode=-1, stdout="", stderr="Test execution timed out") except FileNotFoundError: logger.error("Jest not found. Make sure Jest is installed (npm install jest)") @@ -650,6 +803,8 @@ def run_jest_behavioral_tests( wall_clock_ns = time.perf_counter_ns() - start_time_ns logger.debug(f"Jest behavioral tests completed in {wall_clock_ns / 1e9:.2f}s") + print(result.stdout) + return result_file_path, result, coverage_json_path, None @@ -774,25 +929,28 @@ def run_jest_benchmarking_tests( # Get performance test files test_files = [str(file.benchmarking_file_path) for file in test_paths.test_files if file.benchmarking_file_path] - # Use provided project_root, or detect it as fallback if project_root is None and test_files: first_test_file = Path(test_files[0]) project_root = _find_node_project_root(first_test_file) effective_cwd = project_root if project_root else cwd - logger.debug(f"Jest benchmarking working directory: {effective_cwd}") # Ensure the codeflash npm package is installed _ensure_runtime_files(effective_cwd) - # Build Jest command for performance tests with custom loop runner + # Detect Jest version for logging + jest_major_version = _get_jest_major_version(effective_cwd) + if jest_major_version: + logger.debug(f"Jest {jest_major_version} detected - using loop-runner for batched looping") + + # Build Jest command for performance tests jest_cmd = [ "npx", "jest", "--reporters=default", "--reporters=jest-junit", - "--runInBand", # Ensure serial execution even though runner enforces it + "--runInBand", # Ensure serial execution "--forceExit", "--runner=codeflash/loop-runner", # Use custom loop runner for in-process looping ] @@ -844,9 +1002,25 @@ def run_jest_benchmarking_tests( jest_env["CODEFLASH_PERF_STABILITY_CHECK"] = "true" if stability_check else "false" jest_env["CODEFLASH_LOOP_INDEX"] = "1" # Initial value for compatibility + # Enable console output for timing markers + # Some projects mock console.log in test setup (e.g., based on LOG_LEVEL or DEBUG) + # We need console.log to work for capturePerf timing markers + jest_env["LOG_LEVEL"] = "info" # Disable console.log mocking in projects that check LOG_LEVEL + jest_env["DEBUG"] = "1" # Disable console.log mocking in projects that check DEBUG + + # Debug logging for loop behavior verification (set CODEFLASH_DEBUG_LOOPS=true to enable) + if os.environ.get("CODEFLASH_DEBUG_LOOPS") == "true": + jest_env["CODEFLASH_DEBUG_LOOPS"] = "true" + logger.info("Loop debug logging enabled - will show capturePerf loop details") + # Configure ESM support if project uses ES Modules _configure_esm_environment(jest_env, effective_cwd) + # Increase Node.js heap size for large TypeScript projects + existing_node_options = jest_env.get("NODE_OPTIONS", "") + if "--max-old-space-size" not in existing_node_options: + jest_env["NODE_OPTIONS"] = f"{existing_node_options} --max-old-space-size=4096".strip() + # Total timeout for the entire benchmark run (longer than single-loop timeout) # Account for startup overhead + target duration + buffer total_timeout = max(120, (target_duration_ms // 1000) + 60, timeout or 120) @@ -882,7 +1056,6 @@ def run_jest_benchmarking_tests( wall_clock_seconds = time.time() - total_start_time logger.debug(f"Jest benchmarking completed in {wall_clock_seconds:.2f}s") - return result_file_path, result @@ -985,6 +1158,11 @@ def run_jest_line_profile_tests( # Configure ESM support if project uses ES Modules _configure_esm_environment(jest_env, effective_cwd) + # Increase Node.js heap size for large TypeScript projects + existing_node_options = jest_env.get("NODE_OPTIONS", "") + if "--max-old-space-size" not in existing_node_options: + jest_env["NODE_OPTIONS"] = f"{existing_node_options} --max-old-space-size=4096".strip() + subprocess_timeout = timeout or 600 logger.debug(f"Running Jest line profile tests: {' '.join(jest_cmd)}") diff --git a/codeflash/languages/javascript/treesitter.py b/codeflash/languages/javascript/treesitter.py index 650d899a5..32d2431ac 100644 --- a/codeflash/languages/javascript/treesitter.py +++ b/codeflash/languages/javascript/treesitter.py @@ -69,6 +69,7 @@ class FunctionNode: parent_function: str | None source_text: str doc_start_line: int | None = None # Line where JSDoc comment starts (or None if no JSDoc) + is_exported: bool = False # Whether the function is exported @dataclass @@ -295,6 +296,7 @@ def _extract_function_info( is_generator = False is_method = False is_arrow = node.type == "arrow_function" + is_exported = False # Check for async modifier for child in node.children: @@ -306,6 +308,12 @@ def _extract_function_info( if "generator" in node.type: is_generator = True + # Check if function is exported + # For function_declaration: check if parent is export_statement + # For arrow functions: check if parent variable_declarator's grandparent is export_statement + # For CommonJS: check module.exports = { name } or exports.name = ... + is_exported = self._is_node_exported(node, source_bytes) + # Get function name based on node type if node.type in ("function_declaration", "generator_function_declaration"): name_node = node.child_by_field_name("name") @@ -355,8 +363,157 @@ def _extract_function_info( parent_function=current_function, source_text=source_text, doc_start_line=doc_start_line, + is_exported=is_exported, ) + def _is_node_exported(self, node: Node, source_bytes: bytes | None = None) -> bool: + """Check if a function node is exported. + + Handles various export patterns: + - export function foo() {} + - export const foo = () => {} + - export default function foo() {} + - Class methods in exported classes + - module.exports = { foo } (CommonJS) + - exports.foo = ... (CommonJS) + + Args: + node: The function node to check. + source_bytes: Source code bytes (needed for CommonJS export detection). + + Returns: + True if the function is exported, False otherwise. + + """ + # Check direct parent for export_statement + if node.parent and node.parent.type == "export_statement": + return True + + # For arrow functions and function expressions assigned to variables + # e.g., export const foo = () => {} + if node.type in ("arrow_function", "function_expression", "generator_function"): + parent = node.parent + if parent and parent.type == "variable_declarator": + grandparent = parent.parent + if grandparent and grandparent.type in ("lexical_declaration", "variable_declaration"): + great_grandparent = grandparent.parent + if great_grandparent and great_grandparent.type == "export_statement": + return True + + # For methods in exported classes + if node.type == "method_definition": + # Walk up to find class_declaration + current = node.parent + while current: + if current.type in ("class_declaration", "class"): + # Check if this class is exported via ES module export + if current.parent and current.parent.type == "export_statement": + return True + # Check if class is exported via CommonJS + if source_bytes: + class_name_node = current.child_by_field_name("name") + if class_name_node: + class_name = self.get_node_text(class_name_node, source_bytes) + if self._is_name_in_commonjs_exports(node, class_name, source_bytes): + return True + break + current = current.parent + + # Check CommonJS exports: module.exports = { foo } or exports.foo = ... + if source_bytes: + func_name = self._get_function_name_for_export_check(node, source_bytes) + if func_name and self._is_name_in_commonjs_exports(node, func_name, source_bytes): + return True + + return False + + def _get_function_name_for_export_check(self, node: Node, source_bytes: bytes) -> str | None: + """Get the function name for export checking.""" + if node.type in ("function_declaration", "generator_function_declaration"): + name_node = node.child_by_field_name("name") + if name_node: + return self.get_node_text(name_node, source_bytes) + elif node.type in ("arrow_function", "function_expression", "generator_function"): + # Get name from variable assignment + parent = node.parent + if parent and parent.type == "variable_declarator": + name_node = parent.child_by_field_name("name") + if name_node and name_node.type == "identifier": + return self.get_node_text(name_node, source_bytes) + return None + + def _is_name_in_commonjs_exports(self, node: Node, name: str, source_bytes: bytes) -> bool: + """Check if a name is exported via CommonJS module.exports or exports. + + Handles patterns like: + - module.exports = { foo, bar } + - module.exports = { foo: someFunc } + - exports.foo = ... + - module.exports.foo = ... + + Args: + node: Any node in the tree (used to find the program root). + name: The name to check for in exports. + source_bytes: Source code bytes. + + Returns: + True if the name is in CommonJS exports. + + """ + # Walk up to find program root + root = node + while root.parent: + root = root.parent + + # Search for CommonJS export patterns in program children + for child in root.children: + if child.type == "expression_statement": + # Look for assignment expressions + for expr in child.children: + if expr.type == "assignment_expression": + if self._check_commonjs_assignment_exports(expr, name, source_bytes): + return True + + return False + + def _check_commonjs_assignment_exports(self, node: Node, name: str, source_bytes: bytes) -> bool: + """Check if a CommonJS assignment exports the given name.""" + left_node = node.child_by_field_name("left") + right_node = node.child_by_field_name("right") + + if not left_node or not right_node: + return False + + left_text = self.get_node_text(left_node, source_bytes) + + # Check module.exports = { name, ... } or module.exports = { key: name, ... } + if left_text == "module.exports" and right_node.type == "object": + for child in right_node.children: + if child.type == "shorthand_property_identifier": + # { foo } - shorthand export + if self.get_node_text(child, source_bytes) == name: + return True + elif child.type == "pair": + # { key: value } - check both key and value + key_node = child.child_by_field_name("key") + value_node = child.child_by_field_name("value") + if key_node and self.get_node_text(key_node, source_bytes) == name: + return True + if value_node and value_node.type == "identifier": + if self.get_node_text(value_node, source_bytes) == name: + return True + + # Check module.exports = name (single export) + if left_text == "module.exports" and right_node.type == "identifier": + if self.get_node_text(right_node, source_bytes) == name: + return True + + # Check module.exports.name = ... or exports.name = ... + if left_text in {f"module.exports.{name}", f"exports.{name}"}: + return True + + return False + def _find_preceding_jsdoc(self, node: Node, source_bytes: bytes) -> int | None: """Find JSDoc comment immediately preceding a function node. diff --git a/codeflash/models/models.py b/codeflash/models/models.py index 252c55541..92d4f354c 100644 --- a/codeflash/models/models.py +++ b/codeflash/models/models.py @@ -963,6 +963,28 @@ def total_passed_runtime(self) -> int: [min(usable_runtime_data) for _, usable_runtime_data in self.usable_runtime_data_by_test_case().items()] ) + def effective_loop_count(self) -> int: + """Calculate the effective number of complete loops. + + For consistent behavior across Python and JavaScript tests, this returns + the maximum loop_index seen across all test results. This represents + the number of timing iterations that were performed. + + Note: For JavaScript tests without the loop-runner, each test case may have + different iteration counts due to internal looping in capturePerf. We use + max() to report the highest iteration count achieved. + + :return: The effective loop count, or 0 if no test results. + """ + if not self.test_results: + return 0 + # Get all loop indices from results that have timing data + loop_indices = {result.loop_index for result in self.test_results if result.runtime is not None} + if not loop_indices: + # Fallback: use all loop indices even without runtime + loop_indices = {result.loop_index for result in self.test_results} + return max(loop_indices) if loop_indices else 0 + def file_to_no_of_tests(self, test_functions_to_remove: list[str]) -> Counter[Path]: map_gen_test_file_to_no_of_tests = Counter() for gen_test_result in self.test_results: diff --git a/codeflash/models/test_type.py b/codeflash/models/test_type.py index e3f196756..154e3f7f2 100644 --- a/codeflash/models/test_type.py +++ b/codeflash/models/test_type.py @@ -10,9 +10,7 @@ class TestType(Enum): INIT_STATE_TEST = 6 def to_name(self) -> str: - if self is TestType.INIT_STATE_TEST: - return "" - return _TO_NAME_MAP[self] + return _TO_NAME_MAP.get(self, "") _TO_NAME_MAP: dict[TestType, str] = { diff --git a/codeflash/optimization/function_optimizer.py b/codeflash/optimization/function_optimizer.py index 9f7169740..927e6ed9f 100644 --- a/codeflash/optimization/function_optimizer.py +++ b/codeflash/optimization/function_optimizer.py @@ -12,7 +12,6 @@ from typing import TYPE_CHECKING, Callable import libcst as cst -import sentry_sdk from rich.console import Group from rich.panel import Panel from rich.syntax import Syntax @@ -69,6 +68,7 @@ from codeflash.code_utils.git_utils import git_root_dir from codeflash.code_utils.instrument_existing_tests import inject_profiling_into_existing_test from codeflash.code_utils.line_profile_utils import add_decorator_imports, contains_jit_decorator +from codeflash.code_utils.shell_utils import make_env_with_project_root from codeflash.code_utils.static_analysis import get_first_top_level_function_or_method_ast from codeflash.code_utils.time_utils import humanize_runtime from codeflash.context import code_context_extractor @@ -79,6 +79,7 @@ from codeflash.languages.base import Language from codeflash.languages.current import current_language_support, is_typescript from codeflash.languages.javascript.module_system import detect_module_system +from codeflash.languages.javascript.test_runner import clear_created_config_files, get_created_config_files from codeflash.lsp.helpers import is_LSP_enabled, report_to_markdown_table, tree_to_markdown from codeflash.lsp.lsp_message import LspCodeMessage, LspMarkdownMessage, LSPMessageId from codeflash.models.ExperimentMetadata import ExperimentMetadata @@ -976,30 +977,6 @@ def optimize_function(self) -> Result[BestOptimization, str]: ): console.rule() new_code_context = code_context - if ( - self.is_numerical_code and not self.args.no_jit_opts - ): # if the code is numerical in nature (uses numpy/tensorflow/math/pytorch/jax) - jit_compiled_opt_candidate = self.aiservice_client.get_jit_rewritten_code( - code_context.read_writable_code.markdown, self.function_trace_id - ) - if jit_compiled_opt_candidate: # jit rewrite was successful - # write files - # Try to replace function with optimized code - self.replace_function_and_helpers_with_optimized_code( - code_context=code_context, - optimized_code=jit_compiled_opt_candidate[0].source_code, - original_helper_code=original_helper_code, - ) - # get code context - try: - new_code_context = self.get_code_optimization_context().unwrap() - except Exception as e: - sentry_sdk.capture_exception(e) - logger.debug("!lsp|Getting new code context failed, revert to original one") - # unwrite files - self.write_code_and_helpers( - self.function_to_optimize_source_code, original_helper_code, self.function_to_optimize.file_path - ) # Generate tests and optimizations in parallel future_tests = self.executor.submit(self.generate_and_instrument_tests, new_code_context) future_optimizations = self.executor.submit( @@ -2760,7 +2737,7 @@ def establish_original_code_baseline( if not success: return Failure("Failed to establish a baseline for the original code.") - loop_count = max([int(result.loop_index) for result in benchmarking_results.test_results]) + loop_count = benchmarking_results.effective_loop_count() logger.info( f"h3|⌚ Original code summed runtime measured over '{loop_count}' loop{'s' if loop_count > 1 else ''}: " f"'{humanize_runtime(total_timing)}' per full loop" @@ -3002,11 +2979,10 @@ def run_optimized_candidate( self.write_code_and_helpers( candidate_fto_code, candidate_helper_code, self.function_to_optimize.file_path ) - loop_count = ( - max(all_loop_indices) - if (all_loop_indices := {result.loop_index for result in candidate_benchmarking_results.test_results}) - else 0 - ) + # Use effective_loop_count which represents the minimum number of timing samples + # across all test cases. This is more accurate for JavaScript tests where + # capturePerf does internal looping with potentially different iteration counts per test. + loop_count = candidate_benchmarking_results.effective_loop_count() if (total_candidate_timing := candidate_benchmarking_results.total_passed_runtime()) == 0: logger.warning("The overall test runtime of the optimized function is 0, couldn't run tests.") @@ -3154,7 +3130,7 @@ def run_and_parse_tests( test_config=self.test_cfg, optimization_iteration=optimization_iteration, run_result=run_result, - function_name=self.function_to_optimize.function_name, + function_name=self.function_to_optimize.qualified_name, source_file=self.function_to_optimize.file_path, code_context=code_context, coverage_database_file=coverage_database_file, @@ -3208,19 +3184,22 @@ def cleanup_generated_files(self) -> None: paths_to_cleanup.append(test_file.instrumented_behavior_file_path) paths_to_cleanup.append(test_file.benchmarking_file_path) + # Clean up created config files (jest.codeflash.config.js, tsconfig.codeflash.json) + config_files = get_created_config_files() + if config_files: + paths_to_cleanup.extend(config_files) + logger.debug(f"Cleaning up {len(config_files)} codeflash config file(s)") + clear_created_config_files() + cleanup_paths(paths_to_cleanup) def get_test_env( self, codeflash_loop_index: int, codeflash_test_iteration: int, codeflash_tracer_disable: int = 1 ) -> dict: - test_env = os.environ.copy() + test_env = make_env_with_project_root(self.args.project_root) test_env["CODEFLASH_TEST_ITERATION"] = str(codeflash_test_iteration) test_env["CODEFLASH_TRACER_DISABLE"] = str(codeflash_tracer_disable) test_env["CODEFLASH_LOOP_INDEX"] = str(codeflash_loop_index) - if "PYTHONPATH" not in test_env: - test_env["PYTHONPATH"] = str(self.args.project_root) - else: - test_env["PYTHONPATH"] += os.pathsep + str(self.args.project_root) return test_env def line_profiler_step( diff --git a/codeflash/tracer.py b/codeflash/tracer.py index dd440f3d6..6f59c8b8a 100644 --- a/codeflash/tracer.py +++ b/codeflash/tracer.py @@ -13,7 +13,6 @@ import json import logging -import os import pickle import subprocess import sys @@ -27,6 +26,7 @@ from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE from codeflash.code_utils.config_consts import EffortLevel from codeflash.code_utils.config_parser import parse_config_file +from codeflash.code_utils.shell_utils import make_env_with_project_root from codeflash.tracing.pytest_parallelization import pytest_split if TYPE_CHECKING: @@ -167,13 +167,7 @@ def main(args: Namespace | None = None) -> ArgumentParser: else: updated_sys_argv.append(elem) args_dict["command"] = " ".join(updated_sys_argv) - env = os.environ.copy() - pythonpath = env.get("PYTHONPATH", "") - project_root_str = str(project_root) - if pythonpath: - env["PYTHONPATH"] = f"{project_root_str}{os.pathsep}{pythonpath}" - else: - env["PYTHONPATH"] = project_root_str + env = make_env_with_project_root(project_root) # Disable JIT compilation to ensure tracing captures all function calls env["NUMBA_DISABLE_JIT"] = str(1) env["TORCHDYNAMO_DISABLE"] = str(1) @@ -210,14 +204,7 @@ def main(args: Namespace | None = None) -> ArgumentParser: args_dict["result_pickle_file_path"] = str(result_pickle_file_path) args_dict["command"] = " ".join(sys.argv) - env = os.environ.copy() - # Add project root to PYTHONPATH so imports work correctly - pythonpath = env.get("PYTHONPATH", "") - project_root_str = str(project_root) - if pythonpath: - env["PYTHONPATH"] = f"{project_root_str}{os.pathsep}{pythonpath}" - else: - env["PYTHONPATH"] = project_root_str + env = make_env_with_project_root(project_root) # Disable JIT compilation to ensure tracing captures all function calls env["NUMBA_DISABLE_JIT"] = str(1) env["TORCHDYNAMO_DISABLE"] = str(1) diff --git a/codeflash/tracing/pytest_parallelization.py b/codeflash/tracing/pytest_parallelization.py index ca47bfba4..f09fac389 100644 --- a/codeflash/tracing/pytest_parallelization.py +++ b/codeflash/tracing/pytest_parallelization.py @@ -33,7 +33,7 @@ def pytest_split( except ImportError: return None, None - test_files = set() + test_files_set: set[str] = set() # Find all test_*.py files recursively in the directory for test_path in test_paths: @@ -42,12 +42,12 @@ def pytest_split( return None, None if _test_path.is_dir(): # Find all test files matching the pattern test_*.py - test_files.update(map(str, _test_path.rglob("test_*.py"))) - test_files.update(map(str, _test_path.rglob("*_test.py"))) + test_files_set.update(map(str, _test_path.rglob("test_*.py"))) + test_files_set.update(map(str, _test_path.rglob("*_test.py"))) elif _test_path.is_file(): - test_files.add(str(_test_path)) + test_files_set.add(str(_test_path)) - if not test_files: + if not test_files_set: return [[]], None # Determine number of splits @@ -55,7 +55,7 @@ def pytest_split( num_splits = os.cpu_count() or 4 # randomize to increase chances of all splits being balanced - test_files = list(test_files) + test_files = list(test_files_set) shuffle(test_files) # Apply limit if specified @@ -75,7 +75,7 @@ def pytest_split( chunk_size = ceil(total_files / num_splits) # Initialize result groups - result_groups = [[] for _ in range(num_splits)] + result_groups: list[list[str]] = [[] for _ in range(num_splits)] # Distribute files across groups for i, test_file in enumerate(test_files): diff --git a/codeflash/verification/concolic_testing.py b/codeflash/verification/concolic_testing.py index 1399c6205..1b3af2bff 100644 --- a/codeflash/verification/concolic_testing.py +++ b/codeflash/verification/concolic_testing.py @@ -10,6 +10,7 @@ from codeflash.cli_cmds.console import console, logger from codeflash.code_utils.compat import SAFE_SYS_EXECUTABLE from codeflash.code_utils.concolic_utils import clean_concolic_tests, is_valid_concolic_test +from codeflash.code_utils.shell_utils import make_env_with_project_root from codeflash.code_utils.static_analysis import has_typed_parameters from codeflash.discovery.discover_unit_tests import discover_unit_tests from codeflash.languages import is_python @@ -63,6 +64,7 @@ def generate_concolic_tests( logger.info("Generating concolic opcode coverage tests for the original code…") console.rule() try: + env = make_env_with_project_root(args.project_root) cover_result = subprocess.run( [ SAFE_SYS_EXECUTABLE, @@ -89,6 +91,7 @@ def generate_concolic_tests( # Override via CODEFLASH_CONCOLIC_TIMEOUT env var, # falling back to CODEFLASH_TEST_TIMEOUT, then default 600s. timeout=600, + env=env, ) except subprocess.TimeoutExpired: logger.debug("CrossHair Cover test generation timed out") diff --git a/codeflash/verification/coverage_utils.py b/codeflash/verification/coverage_utils.py index c77f5e7df..fabe6e5a8 100644 --- a/codeflash/verification/coverage_utils.py +++ b/codeflash/verification/coverage_utils.py @@ -59,7 +59,9 @@ def load_from_jest_json( source_path_str = str(source_code_path.resolve()) for file_path, file_data in coverage_data.items(): - if file_path == source_path_str or file_path.endswith(source_code_path.name): + # Match exact path or path ending with full relative path from src/ + # Avoid matching files with same name in different directories (e.g., db/utils.ts vs utils/utils.ts) + if file_path == source_path_str or file_path.endswith(str(source_code_path)): file_coverage = file_data break @@ -576,7 +578,9 @@ def grab_dependent_function_from_coverage_data( for file in files: functions = files[file]["functions"] for function in functions: - if dependent_function_name in function: + if function == dependent_function_name or ( + "." in dependent_function_name and function.endswith(f".{dependent_function_name}") + ): return FunctionCoverage( name=dependent_function_name, coverage=functions[function]["summary"]["percent_covered"], diff --git a/codeflash/verification/verifier.py b/codeflash/verification/verifier.py index 9a48edb0b..b00700607 100644 --- a/codeflash/verification/verifier.py +++ b/codeflash/verification/verifier.py @@ -42,7 +42,20 @@ def generate_tests( source_file = Path(function_to_optimize.file_path) project_module_system = detect_module_system(test_cfg.tests_project_rootdir, source_file) - logger.debug(f"Detected module system: {project_module_system}") + + # For JavaScript, calculate the correct import path from the actual test location + # (test_path) to the source file, not from tests_root + import os + + source_file_abs = source_file.resolve().with_suffix("") + test_dir_abs = test_path.resolve().parent + # Compute relative path from test directory to source file + rel_import_path = os.path.relpath(str(source_file_abs), str(test_dir_abs)) + # Ensure path starts with ./ or ../ for JavaScript/TypeScript imports + if not rel_import_path.startswith("../"): + rel_import_path = f"./{rel_import_path}" + # Keep as string since Path() normalizes away the ./ prefix + module_path = rel_import_path response = aiservice_client.generate_regression_tests( source_code_being_tested=source_code_being_tested, @@ -66,7 +79,8 @@ def generate_tests( if is_javascript(): from codeflash.languages.javascript.instrument import ( TestingMode, - fix_import_path_for_test_location, + fix_imports_inside_test_blocks, + fix_jest_mock_paths, instrument_generated_js_test, validate_and_fix_import_style, ) @@ -77,10 +91,12 @@ def generate_tests( source_file = Path(function_to_optimize.file_path) - # Fix import paths to be relative to test file location - # AI may generate imports like 'apps/web/app/file' instead of '../../app/file' - generated_test_source = fix_import_path_for_test_location( - generated_test_source, source_file, test_path, module_path + # Fix import statements that appear inside test blocks (invalid JS syntax) + generated_test_source = fix_imports_inside_test_blocks(generated_test_source) + + # Fix relative paths in jest.mock() calls + generated_test_source = fix_jest_mock_paths( + generated_test_source, test_path, source_file, test_cfg.tests_project_rootdir ) # Validate and fix import styles (default vs named exports) diff --git a/packages/codeflash/runtime/capture.js b/packages/codeflash/runtime/capture.js index 616e2907c..0fdcc5784 100644 --- a/packages/codeflash/runtime/capture.js +++ b/packages/codeflash/runtime/capture.js @@ -87,6 +87,8 @@ if (!process[PERF_STATE_KEY]) { shouldStop: false, // Flag to stop all further looping currentBatch: 0, // Current batch number (incremented by runner) invocationLoopCounts: {}, // Track loops per invocation: {invocationKey: loopCount} + invocationRuntimes: {}, // Track runtimes per invocation for stability: {invocationKey: [runtimes]} + stableInvocations: {}, // Invocations that have reached stability: {invocationKey: true} }; } const sharedPerfState = process[PERF_STATE_KEY]; @@ -98,10 +100,10 @@ const sharedPerfState = process[PERF_STATE_KEY]; function checkSharedTimeLimit() { if (sharedPerfState.shouldStop) return true; if (sharedPerfState.startTime === null) { - sharedPerfState.startTime = Date.now(); + sharedPerfState.startTime = _ORIGINAL_DATE_NOW(); return false; } - const elapsed = Date.now() - sharedPerfState.startTime; + const elapsed = _ORIGINAL_DATE_NOW() - sharedPerfState.startTime; if (elapsed >= getPerfTargetDurationMs() && sharedPerfState.totalLoopsCompleted >= getPerfMinLoops()) { sharedPerfState.shouldStop = true; return true; @@ -111,25 +113,33 @@ function checkSharedTimeLimit() { /** * Get the current loop index for a specific invocation. - * Each invocation tracks its own loop count independently within a batch. - * The actual loop index is computed as: (batch - 1) * BATCH_SIZE + localIndex - * This ensures continuous loop indices even when Jest resets module state. + * The loop index represents how many times ALL test files have been run through. + * This is the batch count from the loop-runner. * @param {string} invocationKey - Unique key for this test invocation - * @returns {number} The next global loop index for this invocation + * @returns {number} The current batch number (loop index) */ function getInvocationLoopIndex(invocationKey) { - // Track local loop count within this batch (starts at 0) + // Track local loop count for stopping logic (increments on each call) if (!sharedPerfState.invocationLoopCounts[invocationKey]) { sharedPerfState.invocationLoopCounts[invocationKey] = 0; } - const localIndex = ++sharedPerfState.invocationLoopCounts[invocationKey]; + ++sharedPerfState.invocationLoopCounts[invocationKey]; - // Calculate global loop index using batch number from environment - // PERF_CURRENT_BATCH is 1-based (set by loop-runner before each batch) - const currentBatch = parseInt(process.env.CODEFLASH_PERF_CURRENT_BATCH || '1', 10); - const globalIndex = (currentBatch - 1) * getPerfBatchSize() + localIndex; + // Return the batch number as the loop index for timing markers + // This represents how many times all test files have been run through + return parseInt(process.env.CODEFLASH_PERF_CURRENT_BATCH || '1', 10); +} - return globalIndex; +/** + * Get the total number of iterations for a specific invocation. + * Used for stopping logic to check against max loop count. + * @param {string} invocationKey - Unique key for this test invocation + * @returns {number} Total iterations across all batches for this invocation + */ +function getTotalIterations(invocationKey) { + const localCount = sharedPerfState.invocationLoopCounts[invocationKey] || 0; + const currentBatch = parseInt(process.env.CODEFLASH_PERF_CURRENT_BATCH || '1', 10); + return (currentBatch - 1) * getPerfBatchSize() + localCount; } /** @@ -164,6 +174,8 @@ function createSeededRandom(seed) { return ((t ^ t >>> 14) >>> 0) / 4294967296; }; } +let _ORIGINAL_DATE = Date +let _ORIGINAL_DATE_NOW = Date.now // Override non-deterministic APIs with seeded versions if seed is provided // NOTE: We do NOT seed performance.now() or process.hrtime() as those are used @@ -176,8 +188,8 @@ if (RANDOM_SEED !== 0) { // Seed Date.now() and new Date() - use fixed base timestamp that increments const SEEDED_BASE_TIME = 1700000000000; // Nov 14, 2023 - fixed reference point let dateOffset = 0; - const OriginalDate = Date; - const originalDateNow = Date.now; + _ORIGINAL_DATE = Date; + _ORIGINAL_DATE_NOW = Date.now; Date.now = function() { return SEEDED_BASE_TIME + (dateOffset++); @@ -187,15 +199,15 @@ if (RANDOM_SEED !== 0) { function SeededDate(...args) { if (args.length === 0) { // No arguments: use seeded current time - return new OriginalDate(SEEDED_BASE_TIME + (dateOffset++)); + return new _ORIGINAL_DATE(SEEDED_BASE_TIME + (dateOffset++)); } // With arguments: use original behavior - return new OriginalDate(...args); + return new _ORIGINAL_DATE(...args); } - SeededDate.prototype = OriginalDate.prototype; + SeededDate.prototype = _ORIGINAL_DATE.prototype; SeededDate.now = Date.now; - SeededDate.parse = OriginalDate.parse; - SeededDate.UTC = OriginalDate.UTC; + SeededDate.parse = _ORIGINAL_DATE.parse; + SeededDate.UTC = _ORIGINAL_DATE.UTC; global.Date = SeededDate; // Seed crypto.randomUUID() and crypto.getRandomValues() @@ -265,26 +277,40 @@ const results = []; let db = null; /** - * Check if performance has stabilized (for internal looping). - * Matches Python's pytest_plugin.should_stop() logic. + * Check if performance has stabilized, allowing early stopping of benchmarks. + * Matches Python's pytest_plugin.should_stop() logic for consistency. + * + * Performance is considered stable when BOTH conditions are met: + * 1. CENTER: All recent measurements are within ±10% of the median + * 2. SPREAD: The range (max-min) is within 10% of the minimum + * + * @param {Array} runtimes - Array of runtime measurements in microseconds + * @param {number} window - Number of recent measurements to check + * @param {number} minWindowSize - Minimum samples required before checking + * @returns {boolean} True if performance has stabilized */ function shouldStopStability(runtimes, window, minWindowSize) { if (runtimes.length < window || runtimes.length < minWindowSize) { return false; } + const recent = runtimes.slice(-window); const recentSorted = [...recent].sort((a, b) => a - b); const mid = Math.floor(window / 2); const median = window % 2 ? recentSorted[mid] : (recentSorted[mid - 1] + recentSorted[mid]) / 2; + // Check CENTER: all recent points must be close to median for (const r of recent) { if (Math.abs(r - median) / median > STABILITY_CENTER_TOLERANCE) { return false; } } + + // Check SPREAD: range must be small relative to minimum const rMin = recentSorted[0]; const rMax = recentSorted[recentSorted.length - 1]; if (rMin === 0) return false; + return (rMax - rMin) / rMin <= STABILITY_SPREAD_TOLERANCE; } @@ -673,17 +699,32 @@ function capturePerf(funcName, lineId, fn, ...args) { ? (hasExternalLoopRunner ? getPerfBatchSize() : getPerfLoopCount()) : 1; + // Initialize runtime tracking for this invocation if needed + if (!sharedPerfState.invocationRuntimes[invocationKey]) { + sharedPerfState.invocationRuntimes[invocationKey] = []; + } + const runtimes = sharedPerfState.invocationRuntimes[invocationKey]; + + // Calculate stability window size based on collected runtimes + const getStabilityWindow = () => Math.max(getPerfMinLoops(), Math.ceil(runtimes.length * STABILITY_WINDOW_SIZE)); + for (let batchIndex = 0; batchIndex < batchSize; batchIndex++) { // Check shared time limit BEFORE each iteration if (shouldLoop && checkSharedTimeLimit()) { break; } - // Get the global loop index for this invocation (increments across batches) + // Check if this invocation has already reached stability + if (getPerfStabilityCheck() && sharedPerfState.stableInvocations[invocationKey]) { + break; + } + + // Get the loop index (batch number) for timing markers const loopIndex = getInvocationLoopIndex(invocationKey); // Check if we've exceeded max loops for this invocation - if (loopIndex > getPerfLoopCount()) { + const totalIterations = getTotalIterations(invocationKey); + if (totalIterations > getPerfLoopCount()) { break; } @@ -703,23 +744,17 @@ function capturePerf(funcName, lineId, fn, ...args) { const endTime = getTimeNs(); durationNs = getDurationNs(startTime, endTime); - // Handle promises - for async functions, run once and return + // Handle promises - for async functions, we need to handle looping differently + // Since we can't use await in the sync loop, delegate to async helper if (lastReturnValue instanceof Promise) { - return lastReturnValue.then( - (resolved) => { - const asyncEndTime = getTimeNs(); - const asyncDurationNs = getDurationNs(startTime, asyncEndTime); - console.log(`!######${testStdoutTag}:${asyncDurationNs}######!`); - sharedPerfState.totalLoopsCompleted++; - return resolved; - }, - (err) => { - const asyncEndTime = getTimeNs(); - const asyncDurationNs = getDurationNs(startTime, asyncEndTime); - console.log(`!######${testStdoutTag}:${asyncDurationNs}######!`); - sharedPerfState.totalLoopsCompleted++; - throw err; - } + // For async functions, delegate to the async looping helper + // Pass along all the context needed for continued looping + return _capturePerfAsync( + funcName, lineId, fn, args, + lastReturnValue, startTime, testStdoutTag, + safeModulePath, testClassName, safeTestFunctionName, + invocationKey, runtimes, batchSize, batchIndex, + shouldLoop, getStabilityWindow ); } @@ -735,6 +770,20 @@ function capturePerf(funcName, lineId, fn, ...args) { // Update shared loop counter sharedPerfState.totalLoopsCompleted++; + // Track runtime for stability check (convert to microseconds) + if (durationNs > 0) { + runtimes.push(durationNs / 1000); + } + + // Check stability after accumulating enough samples + if (getPerfStabilityCheck() && runtimes.length >= getPerfMinLoops()) { + const window = getStabilityWindow(); + if (shouldStopStability(runtimes, window, getPerfMinLoops())) { + sharedPerfState.stableInvocations[invocationKey] = true; + break; + } + } + // If we had an error, stop looping if (lastError) { break; @@ -751,6 +800,123 @@ function capturePerf(funcName, lineId, fn, ...args) { return lastReturnValue; } +/** + * Helper to record async timing and update state. + * @private + */ +function _recordAsyncTiming(startTime, testStdoutTag, durationNs, runtimes) { + console.log(`!######${testStdoutTag}:${durationNs}######!`); + sharedPerfState.totalLoopsCompleted++; + if (durationNs > 0) { + runtimes.push(durationNs / 1000); + } +} + +/** + * Async helper for capturePerf to handle async function looping. + * This function awaits promises and continues the benchmark loop properly. + * + * @private + * @param {string} funcName - Name of the function being benchmarked + * @param {string} lineId - Line identifier for this capture point + * @param {Function} fn - The async function to benchmark + * @param {Array} args - Arguments to pass to fn + * @param {Promise} firstPromise - The first promise that was already started + * @param {number} firstStartTime - Start time of the first execution + * @param {string} firstTestStdoutTag - Timing marker tag for the first execution + * @param {string} safeModulePath - Sanitized module path + * @param {string|null} testClassName - Test class name (if any) + * @param {string} safeTestFunctionName - Sanitized test function name + * @param {string} invocationKey - Unique key for this invocation + * @param {Array} runtimes - Array to collect runtimes for stability checking + * @param {number} batchSize - Number of iterations per batch + * @param {number} startBatchIndex - Index where async looping started + * @param {boolean} shouldLoop - Whether to continue looping + * @param {Function} getStabilityWindow - Function to get stability window size + * @returns {Promise} The last return value from fn + */ +async function _capturePerfAsync( + funcName, lineId, fn, args, + firstPromise, firstStartTime, firstTestStdoutTag, + safeModulePath, testClassName, safeTestFunctionName, + invocationKey, runtimes, batchSize, startBatchIndex, + shouldLoop, getStabilityWindow +) { + let lastReturnValue; + let lastError = null; + + // Handle the first promise that was already started + try { + lastReturnValue = await firstPromise; + const asyncEndTime = getTimeNs(); + const asyncDurationNs = getDurationNs(firstStartTime, asyncEndTime); + _recordAsyncTiming(firstStartTime, firstTestStdoutTag, asyncDurationNs, runtimes); + } catch (err) { + const asyncEndTime = getTimeNs(); + const asyncDurationNs = getDurationNs(firstStartTime, asyncEndTime); + _recordAsyncTiming(firstStartTime, firstTestStdoutTag, asyncDurationNs, runtimes); + lastError = err; + // Don't throw yet - we want to record the timing first + } + + // If first iteration failed, stop and throw + if (lastError) { + throw lastError; + } + + // Continue looping for remaining iterations + for (let batchIndex = startBatchIndex + 1; batchIndex < batchSize; batchIndex++) { + // Check exit conditions before starting next iteration + if (shouldLoop && checkSharedTimeLimit()) { + break; + } + + if (getPerfStabilityCheck() && sharedPerfState.stableInvocations[invocationKey]) { + break; + } + + // Get the loop index (batch number) for timing markers + const loopIndex = getInvocationLoopIndex(invocationKey); + + // Check if we've exceeded max loops for this invocation + const totalIterations = getTotalIterations(invocationKey); + if (totalIterations > getPerfLoopCount()) { + break; + } + + // Generate timing marker identifiers + const testId = `${safeModulePath}:${testClassName}:${safeTestFunctionName}:${lineId}:${loopIndex}`; + const invocationIndex = getInvocationIndex(testId); + const invocationId = `${lineId}_${invocationIndex}`; + const testStdoutTag = `${safeModulePath}:${testClassName ? testClassName + '.' : ''}${safeTestFunctionName}:${funcName}:${loopIndex}:${invocationId}`; + + // Execute and time the function + try { + const startTime = getTimeNs(); + lastReturnValue = await fn(...args); + const endTime = getTimeNs(); + const durationNs = getDurationNs(startTime, endTime); + + _recordAsyncTiming(startTime, testStdoutTag, durationNs, runtimes); + + // Check if we've reached performance stability + if (getPerfStabilityCheck() && runtimes.length >= getPerfMinLoops()) { + const window = getStabilityWindow(); + if (shouldStopStability(runtimes, window, getPerfMinLoops())) { + sharedPerfState.stableInvocations[invocationKey] = true; + break; + } + } + } catch (e) { + lastError = e; + break; + } + } + + if (lastError) throw lastError; + return lastReturnValue; +} + /** * Capture multiple invocations for benchmarking. * @@ -789,7 +955,7 @@ function writeResults() { const output = { version: '1.0.0', loopIndex: LOOP_INDEX, - timestamp: Date.now(), + timestamp: _ORIGINAL_DATE_NOW(), results }; fs.writeFileSync(jsonPath, JSON.stringify(output, null, 2)); @@ -806,6 +972,8 @@ function resetPerfState() { sharedPerfState.startTime = null; sharedPerfState.totalLoopsCompleted = 0; sharedPerfState.shouldStop = false; + sharedPerfState.invocationRuntimes = {}; + sharedPerfState.stableInvocations = {}; } /** diff --git a/packages/codeflash/runtime/loop-runner.js b/packages/codeflash/runtime/loop-runner.js index 6bfde0c4c..c6d476f1f 100644 --- a/packages/codeflash/runtime/loop-runner.js +++ b/packages/codeflash/runtime/loop-runner.js @@ -24,6 +24,8 @@ * NOTE: This runner requires jest-runner to be installed in your project. * It is a Jest-specific feature and does not work with Vitest. * For Vitest projects, capturePerf() does all loops internally in a single call. + * + * Compatibility: Works with Jest 29.x and Jest 30.x */ 'use strict'; @@ -32,10 +34,26 @@ const { createRequire } = require('module'); const path = require('path'); const fs = require('fs'); +/** + * Validates that a jest-runner path is valid by checking for package.json. + * @param {string} jestRunnerPath - Path to check + * @returns {boolean} True if valid jest-runner package + */ +function isValidJestRunnerPath(jestRunnerPath) { + if (!fs.existsSync(jestRunnerPath)) { + return false; + } + const packageJsonPath = path.join(jestRunnerPath, 'package.json'); + return fs.existsSync(packageJsonPath); +} + /** * Resolve jest-runner with monorepo support. * Uses CODEFLASH_MONOREPO_ROOT environment variable if available, * otherwise walks up the directory tree looking for node_modules/jest-runner. + * + * @returns {string} Path to jest-runner package + * @throws {Error} If jest-runner cannot be found */ function resolveJestRunner() { // Try standard resolution first (works in simple projects) @@ -49,11 +67,8 @@ function resolveJestRunner() { const monorepoRoot = process.env.CODEFLASH_MONOREPO_ROOT; if (monorepoRoot) { const jestRunnerPath = path.join(monorepoRoot, 'node_modules', 'jest-runner'); - if (fs.existsSync(jestRunnerPath)) { - const packageJsonPath = path.join(jestRunnerPath, 'package.json'); - if (fs.existsSync(packageJsonPath)) { - return jestRunnerPath; - } + if (isValidJestRunnerPath(jestRunnerPath)) { + return jestRunnerPath; } } @@ -69,11 +84,8 @@ function resolveJestRunner() { // Try node_modules/jest-runner at this level const jestRunnerPath = path.join(currentDir, 'node_modules', 'jest-runner'); - if (fs.existsSync(jestRunnerPath)) { - const packageJsonPath = path.join(jestRunnerPath, 'package.json'); - if (fs.existsSync(packageJsonPath)) { - return jestRunnerPath; - } + if (isValidJestRunnerPath(jestRunnerPath)) { + return jestRunnerPath; } // Check if this is a workspace root (has monorepo markers) @@ -89,18 +101,53 @@ function resolveJestRunner() { currentDir = path.dirname(currentDir); } - throw new Error('jest-runner not found'); + throw new Error( + 'jest-runner not found. Please install jest-runner in your project: npm install --save-dev jest-runner' + ); } -// Try to load jest-runner - it's a peer dependency that must be installed by the user +/** + * Jest runner components - loaded dynamically from project's node_modules. + * This ensures we use the same version that the project uses. + * + * Jest 30+ uses TestRunner class with event-based architecture. + * Jest 29 uses runTest function for direct test execution. + */ +let TestRunner; let runTest; let jestRunnerAvailable = false; +let jestVersion = 0; try { const jestRunnerPath = resolveJestRunner(); const internalRequire = createRequire(jestRunnerPath); - runTest = internalRequire('./runTest').default; - jestRunnerAvailable = true; + + // Try to get the TestRunner class (Jest 30+) + const jestRunner = internalRequire(jestRunnerPath); + TestRunner = jestRunner.default || jestRunner.TestRunner; + + if (TestRunner && TestRunner.prototype && typeof TestRunner.prototype.runTests === 'function') { + // Jest 30+ - use TestRunner class with event emitter pattern + jestVersion = 30; + jestRunnerAvailable = true; + } else { + // Try Jest 29 style import + try { + runTest = internalRequire('./runTest').default; + if (typeof runTest === 'function') { + // Jest 29 - use direct runTest function + jestVersion = 29; + jestRunnerAvailable = true; + } + } catch (e29) { + // Neither Jest 29 nor 30 style import worked + const errorMsg = `Found jest-runner at ${jestRunnerPath} but could not load it. ` + + `This may indicate an unsupported Jest version. ` + + `Supported versions: Jest 29.x and Jest 30.x`; + console.error(errorMsg); + jestRunnerAvailable = false; + } + } } catch (e) { // jest-runner not installed - this is expected for Vitest projects // The runner will throw a helpful error if someone tries to use it without jest-runner @@ -167,6 +214,9 @@ function deepCopy(obj, seen = new WeakMap()) { /** * Codeflash Loop Runner with Batched Looping + * + * For Jest 30+, extends the TestRunner class directly. + * For Jest 29, uses the runTest function import. */ class CodeflashLoopRunner { constructor(globalConfig, context) { @@ -175,12 +225,24 @@ class CodeflashLoopRunner { 'codeflash/loop-runner requires jest-runner to be installed.\n' + 'Please install it: npm install --save-dev jest-runner\n\n' + 'If you are using Vitest, the loop-runner is not needed - ' + - 'Vitest projects use external looping handled by the Python runner.' + 'Vitest projects use internal looping handled by capturePerf().' ); } + this._globalConfig = globalConfig; this._context = context || {}; this._eventEmitter = new SimpleEventEmitter(); + + // For Jest 30+, create an instance of the base TestRunner for delegation + if (jestVersion >= 30) { + if (!TestRunner) { + throw new Error( + `Jest ${jestVersion} detected but TestRunner class not available. ` + + `This indicates an internal error in loop-runner initialization.` + ); + } + this._baseRunner = new TestRunner(globalConfig, context); + } } get supportsEventEmitters() { @@ -196,7 +258,17 @@ class CodeflashLoopRunner { } /** - * Run tests with batched looping for fair distribution. + * Run tests with batched looping for fair distribution across all test invocations. + * + * This implements the batched looping strategy: + * Batch 1: Test1(N loops) → Test2(N loops) → Test3(N loops) + * Batch 2: Test1(N loops) → Test2(N loops) → Test3(N loops) + * ...until time budget exhausted or max batches reached + * + * @param {Array} tests - Jest test objects to run + * @param {Object} watcher - Jest watcher for interrupt handling + * @param {Object} options - Jest runner options + * @returns {Promise} */ async runTests(tests, watcher, options) { const startTime = Date.now(); @@ -204,59 +276,51 @@ class CodeflashLoopRunner { let hasFailure = false; let allConsoleOutput = ''; - // Import shared state functions from capture module - // We need to do this dynamically since the module may be reloaded - let checkSharedTimeLimit; - let incrementBatch; - try { - const capture = require('codeflash'); - checkSharedTimeLimit = capture.checkSharedTimeLimit; - incrementBatch = capture.incrementBatch; - } catch (e) { - // Fallback if codeflash module not available - checkSharedTimeLimit = () => { - const elapsed = Date.now() - startTime; - return elapsed >= TARGET_DURATION_MS && batchCount >= MIN_BATCHES; - }; - incrementBatch = () => {}; - } + // Time limit check - must use local time tracking because Jest runs tests + // in isolated worker processes where shared state from capture.js isn't accessible + const checkTimeLimit = () => { + const elapsed = Date.now() - startTime; + return elapsed >= TARGET_DURATION_MS && batchCount >= MIN_BATCHES; + }; // Batched looping: run all test files multiple times while (batchCount < MAX_BATCHES) { batchCount++; // Check time limit BEFORE each batch - if (batchCount > MIN_BATCHES && checkSharedTimeLimit()) { + if (batchCount > MIN_BATCHES && checkTimeLimit()) { + console.log(`[codeflash] Time limit reached after ${batchCount - 1} batches (${Date.now() - startTime}ms elapsed)`); break; } // Check if interrupted if (watcher.isInterrupted()) { + console.log(`[codeflash] Watcher is interrupted`) break; } - // Increment batch counter in shared state and set env var - // The env var persists across Jest module resets, ensuring continuous loop indices - incrementBatch(); + // Set env var for batch number - persists across Jest module resets process.env.CODEFLASH_PERF_CURRENT_BATCH = String(batchCount); // Run all test files in this batch - const batchResult = await this._runAllTestsOnce(tests, watcher); + const batchResult = await this._runAllTestsOnce(tests, watcher, options); allConsoleOutput += batchResult.consoleOutput; - if (batchResult.hasFailure) { - hasFailure = true; - break; - } + // if (batchResult.hasFailure) { + // hasFailure = true; + // break; + // } // Check time limit AFTER each batch - if (checkSharedTimeLimit()) { + if (checkTimeLimit()) { + console.log(`[codeflash] Time limit reached after ${batchCount} batches (${Date.now() - startTime}ms elapsed)`); break; } } const totalTimeMs = Date.now() - startTime; + console.log(`[codeflash] now: ${Date.now()}`) // Output all collected console logs - this is critical for timing marker extraction // The console output contains the !######...######! timing markers from capturePerf if (allConsoleOutput) { @@ -268,8 +332,74 @@ class CodeflashLoopRunner { /** * Run all test files once (one batch). + * Uses different approaches for Jest 29 vs Jest 30. + */ + async _runAllTestsOnce(tests, watcher, options) { + if (jestVersion >= 30) { + return this._runAllTestsOnceJest30(tests, watcher, options); + } else { + return this._runAllTestsOnceJest29(tests, watcher); + } + } + + /** + * Jest 30+ implementation - delegates to base TestRunner and collects results. + */ + async _runAllTestsOnceJest30(tests, watcher, options) { + let hasFailure = false; + let allConsoleOutput = ''; + + // For Jest 30, we need to collect results through event listeners + const resultsCollector = []; + + // Subscribe to events from the base runner + const unsubscribeSuccess = this._baseRunner.on('test-file-success', (testData) => { + const [test, result] = testData; + resultsCollector.push({ test, result, success: true }); + + if (result && result.console && Array.isArray(result.console)) { + allConsoleOutput += result.console.map(e => e.message || '').join('\n') + '\n'; + } + + if (result && result.numFailingTests > 0) { + hasFailure = true; + } + + // Forward to our event emitter + this._eventEmitter.emit('test-file-success', testData); + }); + + const unsubscribeFailure = this._baseRunner.on('test-file-failure', (testData) => { + const [test, error] = testData; + resultsCollector.push({ test, error, success: false }); + hasFailure = true; + + // Forward to our event emitter + this._eventEmitter.emit('test-file-failure', testData); + }); + + const unsubscribeStart = this._baseRunner.on('test-file-start', (testData) => { + // Forward to our event emitter + this._eventEmitter.emit('test-file-start', testData); + }); + + try { + // Run tests using the base runner (always serial for benchmarking) + await this._baseRunner.runTests(tests, watcher, { ...options, serial: true }); + } finally { + // Cleanup subscriptions + if (typeof unsubscribeSuccess === 'function') unsubscribeSuccess(); + if (typeof unsubscribeFailure === 'function') unsubscribeFailure(); + if (typeof unsubscribeStart === 'function') unsubscribeStart(); + } + + return { consoleOutput: allConsoleOutput, hasFailure }; + } + + /** + * Jest 29 implementation - uses direct runTest import. */ - async _runAllTestsOnce(tests, watcher) { + async _runAllTestsOnceJest29(tests, watcher) { let hasFailure = false; let allConsoleOutput = ''; diff --git a/pyproject.toml b/pyproject.toml index f02995db3..ea5f2140a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -290,6 +290,7 @@ ignore = [ "SIM108", # Ternary operator suggestion "F841", # Unused variable (often intentional) "ANN202", # Missing return type for private functions + "B009", # getattr-with-constant - needed to avoid mypy [misc] on dunder access ] [tool.ruff.lint.flake8-type-checking] diff --git a/tessl.json b/tessl.json new file mode 100644 index 000000000..d766df3ba --- /dev/null +++ b/tessl.json @@ -0,0 +1,80 @@ +{ + "name": "codeflash", + "dependencies": { + "tessl/pypi-pytest": { + "version": "8.4.0" + }, + "tessl/pypi-gitpython": { + "version": "3.1.0" + }, + "tessl/pypi-libcst": { + "version": "1.8.0" + }, + "tessl/pypi-jedi": { + "version": "0.19.0" + }, + "tessl/pypi-tree-sitter": { + "version": "0.25.0" + }, + "tessl/pypi-tomlkit": { + "version": "0.13.0" + }, + "tessl/pypi-pydantic": { + "version": "1.10.0" + }, + "tessl/pypi-humanize": { + "version": "4.13.0" + }, + "tessl/pypi-posthog": { + "version": "6.7.0" + }, + "tessl/pypi-click": { + "version": "8.2.0" + }, + "tessl/pypi-inquirer": { + "version": "3.4.0" + }, + "tessl/pypi-sentry-sdk": { + "version": "1.45.0" + }, + "tessl/pypi-parameterized": { + "version": "0.9.0" + }, + "tessl/pypi-dill": { + "version": "0.4.0" + }, + "tessl/pypi-rich": { + "version": "13.9.0" + }, + "tessl/pypi-lxml": { + "version": "5.4.0" + }, + "tessl/pypi-crosshair-tool": { + "version": "0.0.0" + }, + "tessl/pypi-coverage": { + "version": "7.10.0" + }, + "tessl/pypi-platformdirs": { + "version": "4.4.0" + }, + "tessl/pypi-pygls": { + "version": "1.3.0" + }, + "tessl/pypi-filelock": { + "version": "3.19.0" + }, + "codeflash/codeflash-rules": { + "version": "0.1.0" + }, + "codeflash/codeflash-docs": { + "version": "0.1.0" + }, + "codeflash/codeflash-skills": { + "version": "0.2.0" + }, + "tessl-labs/tessl-skill-eval-scenarios": { + "version": "0.0.5" + } + } +} diff --git a/tests/code_utils/test_coverage_utils.py b/tests/code_utils/test_coverage_utils.py new file mode 100644 index 000000000..d637bac5e --- /dev/null +++ b/tests/code_utils/test_coverage_utils.py @@ -0,0 +1,228 @@ +from __future__ import annotations + +from typing import Any + +from codeflash.code_utils.coverage_utils import build_fully_qualified_name, extract_dependent_function +from codeflash.models.function_types import FunctionParent +from codeflash.models.models import CodeOptimizationContext, CodeString, CodeStringsMarkdown +from codeflash.verification.coverage_utils import CoverageUtils + + +def _make_code_context( + preexisting_objects: set[tuple[str, tuple[FunctionParent, ...]]], + testgen_code_strings: list[CodeString] | None = None, +) -> CodeOptimizationContext: + """Helper to create a minimal CodeOptimizationContext for testing.""" + return CodeOptimizationContext( + testgen_context=CodeStringsMarkdown(code_strings=testgen_code_strings or []), + read_writable_code=CodeStringsMarkdown(), + helper_functions=[], + preexisting_objects=preexisting_objects, + ) + + +class TestBuildFullyQualifiedName: + def test_bare_name_with_class_parent(self) -> None: + ctx = _make_code_context({("__init__", (FunctionParent(name="HttpInterface", type="ClassDef"),))}) + assert build_fully_qualified_name("__init__", ctx) == "HttpInterface.__init__" + + def test_bare_name_no_parent(self) -> None: + ctx = _make_code_context({("helper_func", ())}) + assert build_fully_qualified_name("helper_func", ctx) == "helper_func" + + def test_already_qualified_name_returned_as_is(self) -> None: + """If name already contains a dot, skip preexisting_objects lookup.""" + ctx = _make_code_context({("__init__", (FunctionParent(name="WrongClass", type="ClassDef"),))}) + result = build_fully_qualified_name("HttpInterface.__init__", ctx) + assert result == "HttpInterface.__init__" + + def test_bare_name_picks_first_match_from_set(self) -> None: + """With multiple __init__ entries, bare name picks an arbitrary one.""" + ctx = _make_code_context( + { + ("__init__", (FunctionParent(name="ClassA", type="ClassDef"),)), + ("__init__", (FunctionParent(name="ClassB", type="ClassDef"),)), + } + ) + result = build_fully_qualified_name("__init__", ctx) + assert result in {"ClassA.__init__", "ClassB.__init__"} + + def test_qualified_name_avoids_ambiguity(self) -> None: + """Qualified name bypasses preexisting_objects entirely, avoiding ambiguity.""" + ctx = _make_code_context( + { + ("__init__", (FunctionParent(name="ClassA", type="ClassDef"),)), + ("__init__", (FunctionParent(name="ClassB", type="ClassDef"),)), + } + ) + assert build_fully_qualified_name("ClassB.__init__", ctx) == "ClassB.__init__" + + def test_bare_name_not_in_preexisting_objects(self) -> None: + ctx = _make_code_context(set()) + assert build_fully_qualified_name("some_func", ctx) == "some_func" + + def test_nested_class_parent(self) -> None: + """Bare name under nested class parents gets fully qualified.""" + ctx = _make_code_context( + {("method", (FunctionParent(name="Outer", type="ClassDef"), FunctionParent(name="Inner", type="ClassDef")))} + ) + assert build_fully_qualified_name("method", ctx) == "Inner.Outer.method" + + def test_non_classdef_parent_ignored(self) -> None: + """Only ClassDef parents are prepended to the name.""" + ctx = _make_code_context({("helper", (FunctionParent(name="wrapper", type="FunctionDef"),))}) + assert build_fully_qualified_name("helper", ctx) == "helper" + + +class TestExtractDependentFunction: + def test_single_dependent_function(self) -> None: + ctx = _make_code_context( + preexisting_objects={("helper", ())}, + testgen_code_strings=[CodeString(code="def main_func(): pass\ndef helper(): pass")], + ) + result = extract_dependent_function("main_func", ctx) + assert result == "helper" + + def test_qualified_main_function_discards_bare_match(self) -> None: + """Qualified main_function should still discard the matching bare name.""" + ctx = _make_code_context( + preexisting_objects={("helper", ())}, + testgen_code_strings=[CodeString(code="def __init__(): pass\ndef helper(): pass")], + ) + result = extract_dependent_function("HttpInterface.__init__", ctx) + assert result == "helper" + + def test_bare_main_function_discards_match(self) -> None: + """Bare main_function should still work for discarding.""" + ctx = _make_code_context( + preexisting_objects={("helper", ())}, + testgen_code_strings=[CodeString(code="def main_func(): pass\ndef helper(): pass")], + ) + result = extract_dependent_function("main_func", ctx) + assert result == "helper" + + def test_no_dependent_functions(self) -> None: + ctx = _make_code_context(preexisting_objects=set(), testgen_code_strings=[CodeString(code="x = 1\n")]) + result = extract_dependent_function("main_func", ctx) + assert result is False + + def test_multiple_dependent_functions_returns_false(self) -> None: + ctx = _make_code_context( + preexisting_objects=set(), + testgen_code_strings=[CodeString(code="def helper_a(): pass\ndef helper_b(): pass")], + ) + result = extract_dependent_function("main_func", ctx) + assert result is False + + def test_dependent_function_gets_qualified(self) -> None: + """The dependent function returned should be qualified via build_fully_qualified_name.""" + ctx = _make_code_context( + preexisting_objects={("helper", (FunctionParent(name="MyClass", type="ClassDef"),))}, + testgen_code_strings=[CodeString(code="def main_func(): pass\ndef helper(): pass")], + ) + result = extract_dependent_function("main_func", ctx) + assert result == "MyClass.helper" + + def test_only_main_in_code_returns_false(self) -> None: + """When code only contains the main function, no dependent function exists.""" + ctx = _make_code_context( + preexisting_objects=set(), testgen_code_strings=[CodeString(code="def __init__(): pass")] + ) + result = extract_dependent_function("HttpInterface.__init__", ctx) + assert result is False + + def test_async_functions_extracted(self) -> None: + """Async function definitions are also extracted as dependent functions.""" + ctx = _make_code_context( + preexisting_objects={("async_helper", ())}, + testgen_code_strings=[CodeString(code="def main(): pass\nasync def async_helper(): pass")], + ) + result = extract_dependent_function("main", ctx) + assert result == "async_helper" + + +class TestGrabDependentFunctionFromCoverageData: + def _make_func_data(self, coverage_pct: float = 80.0) -> dict[str, Any]: + return { + "summary": {"percent_covered": coverage_pct}, + "executed_lines": [1, 2, 3], + "missing_lines": [4], + "executed_branches": [[1, 0]], + "missing_branches": [[2, 1]], + } + + def test_exact_match_in_coverage_data(self) -> None: + coverage_data = {"HttpInterface.__init__": self._make_func_data(90.0)} + result = CoverageUtils.grab_dependent_function_from_coverage_data("HttpInterface.__init__", coverage_data, {}) + assert result.name == "HttpInterface.__init__" + assert result.coverage == 90.0 + + def test_fallback_exact_match_in_original_data(self) -> None: + original_cov_data = { + "files": {"http_api.py": {"functions": {"HttpInterface.__init__": self._make_func_data(75.0)}}} + } + result = CoverageUtils.grab_dependent_function_from_coverage_data( + "HttpInterface.__init__", {}, original_cov_data + ) + assert result.name == "HttpInterface.__init__" + assert result.coverage == 75.0 + + def test_fallback_suffix_match_in_original_data(self) -> None: + """Qualified dependent name matches via suffix in original coverage data.""" + original_cov_data = { + "files": {"http_api.py": {"functions": {"module.HttpInterface.__init__": self._make_func_data(60.0)}}} + } + result = CoverageUtils.grab_dependent_function_from_coverage_data( + "HttpInterface.__init__", {}, original_cov_data + ) + assert result.name == "HttpInterface.__init__" + assert result.coverage == 60.0 + + def test_no_false_substring_match_bare_init(self) -> None: + """Bare __init__ should NOT match PathAwareCORSMiddleware.__init__ via substring.""" + original_cov_data = { + "files": {"cors.py": {"functions": {"PathAwareCORSMiddleware.__init__": self._make_func_data(50.0)}}} + } + result = CoverageUtils.grab_dependent_function_from_coverage_data("__init__", {}, original_cov_data) + assert result.coverage == 0 + + def test_no_false_substring_match_different_class(self) -> None: + """Qualified name for one class should not match another class's method.""" + original_cov_data = { + "files": { + "api.py": { + "functions": { + "PathAwareCORSMiddleware.__init__": self._make_func_data(50.0), + "HttpInterface.__init__": self._make_func_data(85.0), + } + } + } + } + result = CoverageUtils.grab_dependent_function_from_coverage_data( + "HttpInterface.__init__", {}, original_cov_data + ) + assert result.name == "HttpInterface.__init__" + assert result.coverage == 85.0 + + def test_no_match_returns_zero_coverage(self) -> None: + result = CoverageUtils.grab_dependent_function_from_coverage_data("nonexistent_func", {}, {"files": {}}) + assert result.coverage == 0 + assert result.executed_lines == [] + + def test_qualified_suffix_no_match_for_partial_name(self) -> None: + """Ensure suffix match requires a dot boundary, not just string suffix.""" + original_cov_data = { + "files": {"api.py": {"functions": {"XHttpInterface.__init__": self._make_func_data(40.0)}}} + } + # "HttpInterface.__init__" should NOT match "XHttpInterface.__init__" via suffix + result = CoverageUtils.grab_dependent_function_from_coverage_data( + "HttpInterface.__init__", {}, original_cov_data + ) + assert result.coverage == 0 + + def test_bare_name_exact_match_in_fallback(self) -> None: + """Bare function name should still work with exact match in fallback.""" + original_cov_data = {"files": {"utils.py": {"functions": {"helper_func": self._make_func_data(95.0)}}}} + result = CoverageUtils.grab_dependent_function_from_coverage_data("helper_func", {}, original_cov_data) + assert result.name == "helper_func" + assert result.coverage == 95.0 diff --git a/tests/languages/javascript/test_vitest_junit.py b/tests/languages/javascript/test_vitest_junit.py index ac52ffe3e..720c158b3 100644 --- a/tests/languages/javascript/test_vitest_junit.py +++ b/tests/languages/javascript/test_vitest_junit.py @@ -12,7 +12,7 @@ import pytest from junitparser import JUnitXml -from codeflash.verification.parse_test_output import jest_end_pattern, jest_start_pattern +from codeflash.languages.javascript.parse import jest_end_pattern, jest_start_pattern class TestVitestJunitXmlFormat: diff --git a/tests/test_code_context_extractor.py b/tests/test_code_context_extractor.py index c5009b898..7088e6f1f 100644 --- a/tests/test_code_context_extractor.py +++ b/tests/test_code_context_extractor.py @@ -12,10 +12,13 @@ from codeflash.code_utils.code_replacer import replace_functions_and_add_imports from codeflash.context.code_context_extractor import ( collect_names_from_annotation, + extract_classes_from_type_hint, extract_imports_for_class, get_code_optimization_context, get_external_base_class_inits, + get_external_class_inits, get_imported_class_definitions, + resolve_transitive_type_deps, ) from codeflash.discovery.functions_to_optimize import FunctionToOptimize from codeflash.models.models import CodeString, CodeStringsMarkdown, FunctionParent @@ -4620,3 +4623,283 @@ def target_method(self): # counter should be in context since __init__ uses it read_writable = code_ctx.read_writable_code.markdown assert "counter" in read_writable + + +def test_get_external_class_inits_extracts_click_option(tmp_path: Path) -> None: + """Extracts __init__ from click.Option when directly imported.""" + code = """from click import Option + +def my_func(opt: Option) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + assert len(result.code_strings) == 1 + code_string = result.code_strings[0] + assert "class Option:" in code_string.code + assert "def __init__" in code_string.code + assert code_string.file_path is not None and "click" in code_string.file_path.as_posix() + + +def test_get_external_class_inits_skips_project_classes(tmp_path: Path) -> None: + """Returns empty when imported class is from the project, not external.""" + # Create a project module with a class + (tmp_path / "mymodule.py").write_text("class ProjectClass:\n pass\n", encoding="utf-8") + + code = """from mymodule import ProjectClass + +def my_func(obj: ProjectClass) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + assert result.code_strings == [] + + +def test_get_external_class_inits_skips_non_classes(tmp_path: Path) -> None: + """Returns empty when imported name is a function, not a class.""" + code = """from collections import OrderedDict +from os.path import join + +def my_func() -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + # join is a function, not a class — should be skipped + # OrderedDict is a class and should be included + class_names = [cs.code.split("\n")[0] for cs in result.code_strings] + assert not any("join" in name for name in class_names) + + +def test_get_external_class_inits_skips_already_defined_classes(tmp_path: Path) -> None: + """Skips classes already defined in the context (e.g., added by get_imported_class_definitions).""" + code = """from collections import UserDict + +class UserDict: + def __init__(self): + pass + +def my_func(d: UserDict) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + # UserDict is already defined in the context, so it should be skipped + assert result.code_strings == [] + + +def test_get_external_class_inits_skips_builtins(tmp_path: Path) -> None: + """Returns empty for builtin classes like list/dict that have no inspectable source.""" + code = """x: list = [] +y: dict = {} + +def my_func() -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + assert result.code_strings == [] + + +def test_get_external_class_inits_skips_object_init(tmp_path: Path) -> None: + """Skips classes whose __init__ is just object.__init__ (trivial).""" + # enum.Enum has a metaclass-based __init__, but individual enum members + # effectively use object.__init__. Use a class we know has object.__init__. + code = """from xml.etree.ElementTree import QName + +def my_func(q: QName) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + # QName has its own __init__, so it should be included if it's in site-packages. + # But since it's stdlib (not site-packages), it should be skipped. + assert result.code_strings == [] + + +def test_get_external_class_inits_empty_when_no_imports(tmp_path: Path) -> None: + """Returns empty when there are no from-imports.""" + code = """def my_func() -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + assert result.code_strings == [] + + +# --- Tests for extract_classes_from_type_hint --- + + +def test_extract_classes_from_type_hint_plain_class() -> None: + """Extracts a plain class directly.""" + from click import Option + + result = extract_classes_from_type_hint(Option) + assert Option in result + + +def test_extract_classes_from_type_hint_optional() -> None: + """Unwraps Optional[X] to find X.""" + from typing import Optional + + from click import Option + + result = extract_classes_from_type_hint(Optional[Option]) + assert Option in result + + +def test_extract_classes_from_type_hint_union() -> None: + """Unwraps Union[X, Y] to find both X and Y.""" + from typing import Union + + from click import Command, Option + + result = extract_classes_from_type_hint(Union[Option, Command]) + assert Option in result + assert Command in result + + +def test_extract_classes_from_type_hint_list() -> None: + """Unwraps List[X] to find X.""" + from typing import List + + from click import Option + + result = extract_classes_from_type_hint(List[Option]) + assert Option in result + + +def test_extract_classes_from_type_hint_filters_builtins() -> None: + """Filters out builtins like str, int, None.""" + from typing import Optional + + result = extract_classes_from_type_hint(Optional[str]) + assert len(result) == 0 + + +def test_extract_classes_from_type_hint_callable() -> None: + """Handles bare Callable without error.""" + from typing import Callable + + result = extract_classes_from_type_hint(Callable) + assert isinstance(result, list) + + +def test_extract_classes_from_type_hint_callable_with_args() -> None: + """Unwraps Callable[[X], Y] to find classes.""" + from typing import Callable + + from click import Context + + result = extract_classes_from_type_hint(Callable[[Context], None]) + assert Context in result + + +# --- Tests for resolve_transitive_type_deps --- + + +def test_resolve_transitive_type_deps_click_context() -> None: + """click.Context.__init__ references Command, which should be found.""" + from click import Command, Context + + deps = resolve_transitive_type_deps(Context) + dep_names = {cls.__name__ for cls in deps} + assert "Command" in dep_names or Command in deps + + +def test_resolve_transitive_type_deps_handles_failure_gracefully() -> None: + """Returns empty list for a class where get_type_hints fails.""" + + class BadClass: + def __init__(self, x: "NonexistentType") -> None: # type: ignore[name-defined] # noqa: F821 + pass + + result = resolve_transitive_type_deps(BadClass) + assert result == [] + + +# --- Integration tests for transitive resolution in get_external_class_inits --- + + +def test_get_external_class_inits_transitive_deps(tmp_path: Path) -> None: + """Extracts transitive type dependencies from __init__ annotations.""" + code = """from click import Context + +def my_func(ctx: Context) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + class_names = {cs.code.split("\n")[0].replace("class ", "").rstrip(":") for cs in result.code_strings} + assert "Context" in class_names + # Command is a transitive dep via Context.__init__ + assert "Command" in class_names + + +def test_get_external_class_inits_no_infinite_loops(tmp_path: Path) -> None: + """Handles classes with circular type references without infinite loops.""" + # click.Context references Command, and Command references Context back + # This should terminate without issues due to the processed_classes set + code = """from click import Context + +def my_func(ctx: Context) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + # Should complete without hanging; just verify we got results + assert len(result.code_strings) >= 1 + + +def test_get_external_class_inits_no_duplicate_stubs(tmp_path: Path) -> None: + """Does not emit duplicate stubs for the same class name.""" + code = """from click import Context + +def my_func(ctx: Context) -> None: + pass +""" + code_path = tmp_path / "myfunc.py" + code_path.write_text(code, encoding="utf-8") + + context = CodeStringsMarkdown(code_strings=[CodeString(code=code, file_path=code_path)]) + result = get_external_class_inits(context, tmp_path) + + class_names = [cs.code.split("\n")[0].replace("class ", "").rstrip(":") for cs in result.code_strings] + assert len(class_names) == len(set(class_names)), f"Duplicate class stubs found: {class_names}" diff --git a/tests/test_function_discovery.py b/tests/test_function_discovery.py index 79907fcf5..3232d8be2 100644 --- a/tests/test_function_discovery.py +++ b/tests/test_function_discovery.py @@ -1149,4 +1149,131 @@ def test_is_object_empty(): ) # Strict check: exactly 2 functions - assert count == 2, f"Expected exactly 2 functions, got {count}" \ No newline at end of file + assert count == 2, f"Expected exactly 2 functions, got {count}" + + +def test_filter_functions_python_test_prefix_convention(): + """Test that files following Python's test_*.py naming convention are filtered. + + Python's standard test file naming uses the test_ prefix (e.g., test_utils.py), + which was previously not caught by the pattern matching in overlapping mode. + """ + with tempfile.TemporaryDirectory() as temp_dir_str: + temp_dir = Path(temp_dir_str) + + # Source file that should NOT be filtered + source_file = temp_dir / "utils.py" + with source_file.open("w") as f: + f.write("def process(): return 1") + + # Python test file with test_ prefix - SHOULD be filtered + test_prefix_file = temp_dir / "test_utils.py" + with test_prefix_file.open("w") as f: + f.write("def test_process(): return 1") + + # conftest.py - SHOULD be filtered + conftest_file = temp_dir / "conftest.py" + with conftest_file.open("w") as f: + f.write(""" +import pytest + +@pytest.fixture +def sample_data(): + return [1, 2, 3] +""") + + # File in a test_ prefixed directory - should NOT be filtered by file patterns + # (directory patterns don't cover test_ prefix dirs, which is fine) + test_subdir = temp_dir / "test_integration" + test_subdir.mkdir() + file_in_test_dir = test_subdir / "helpers.py" + with file_in_test_dir.open("w") as f: + f.write("def helper(): return 1") + + # test_ prefix file inside a subdirectory - SHOULD be filtered + test_in_subdir = test_subdir / "test_helpers.py" + with test_in_subdir.open("w") as f: + f.write("def test_helper(): return 1") + + all_functions = {} + for file_path in [source_file, test_prefix_file, conftest_file, file_in_test_dir, test_in_subdir]: + discovered = find_all_functions_in_file(file_path) + all_functions.update(discovered) + + with unittest.mock.patch( + "codeflash.discovery.functions_to_optimize.get_blocklisted_functions", return_value={} + ): + filtered, count = filter_functions( + all_functions, + tests_root=temp_dir, # Overlapping case + ignore_paths=[], + project_root=temp_dir, + module_root=temp_dir, + ) + + # source_file and file_in_test_dir should remain + # test_prefix_file, conftest_file, and test_in_subdir should be filtered + expected_files = {source_file, file_in_test_dir} + assert set(filtered.keys()) == expected_files, ( + f"Expected {expected_files}, got {set(filtered.keys())}" + ) + assert count == 2, f"Expected exactly 2 functions, got {count}" + + +def test_pytest_fixture_not_discovered(): + """Test that @pytest.fixture decorated functions are not discovered via libcst path.""" + from codeflash.languages.python.support import PythonSupport + + with tempfile.TemporaryDirectory() as temp_dir_str: + temp_dir = Path(temp_dir_str) + + fixture_file = temp_dir / "conftest.py" + with fixture_file.open("w") as f: + f.write(""" +import pytest +from pytest import fixture + +def regular_function(): + return 42 + +@pytest.fixture +def sample_data(): + return [1, 2, 3] + +@pytest.fixture() +def sample_config(): + return {"key": "value"} + +@fixture +def direct_import_fixture(): + return "data" + +@fixture() +def direct_import_fixture_with_parens(): + return "data" + +@pytest.fixture(scope="session") +def session_fixture(): + return "session" + +class TestHelpers: + @pytest.fixture + def class_fixture(self): + return "class_data" + + def helper_method(self): + return "helper" +""") + + support = PythonSupport() + functions = support.discover_functions(fixture_file) + function_names = [fn.function_name for fn in functions] + + assert "regular_function" in function_names + assert "helper_method" in function_names + assert "sample_data" not in function_names + assert "sample_config" not in function_names + assert "direct_import_fixture" not in function_names + assert "direct_import_fixture_with_parens" not in function_names + assert "session_fixture" not in function_names + assert "class_fixture" not in function_names diff --git a/tests/test_javascript_function_discovery.py b/tests/test_javascript_function_discovery.py index 9a39086a8..cf76bee2d 100644 --- a/tests/test_javascript_function_discovery.py +++ b/tests/test_javascript_function_discovery.py @@ -23,7 +23,7 @@ def test_simple_function_discovery(self, tmp_path): """Test discovering a simple JavaScript function with return statement.""" js_file = tmp_path / "simple.js" js_file.write_text(""" -function add(a, b) { +export function add(a, b) { return a + b; } """) @@ -39,15 +39,15 @@ def test_multiple_functions_discovery(self, tmp_path): """Test discovering multiple JavaScript functions.""" js_file = tmp_path / "multiple.js" js_file.write_text(""" -function add(a, b) { +export function add(a, b) { return a + b; } -function multiply(a, b) { +export function multiply(a, b) { return a * b; } -function divide(a, b) { +export function divide(a, b) { return a / b; } """) @@ -61,11 +61,11 @@ def test_function_without_return_excluded(self, tmp_path): """Test that functions without return statements are excluded.""" js_file = tmp_path / "no_return.js" js_file.write_text(""" -function withReturn() { +export function withReturn() { return 42; } -function withoutReturn() { +export function withoutReturn() { console.log("hello"); } """) @@ -78,11 +78,11 @@ def test_arrow_function_discovery(self, tmp_path): """Test discovering arrow functions with explicit return.""" js_file = tmp_path / "arrow.js" js_file.write_text(""" -const add = (a, b) => { +export const add = (a, b) => { return a + b; }; -const multiply = (a, b) => a * b; +export const multiply = (a, b) => a * b; """) functions = find_all_functions_in_file(js_file) @@ -95,7 +95,7 @@ def test_class_method_discovery(self, tmp_path): """Test discovering methods inside a JavaScript class.""" js_file = tmp_path / "class.js" js_file.write_text(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -120,11 +120,11 @@ def test_async_function_discovery(self, tmp_path): """Test discovering async JavaScript functions.""" js_file = tmp_path / "async.js" js_file.write_text(""" -async function fetchData(url) { +export async function fetchData(url) { return await fetch(url); } -function syncFunc() { +export function syncFunc() { return 42; } """) @@ -141,7 +141,7 @@ def test_nested_function_excluded(self, tmp_path): """Test that nested functions are handled correctly.""" js_file = tmp_path / "nested.js" js_file.write_text(""" -function outer() { +export function outer() { function inner() { return 1; } @@ -158,11 +158,11 @@ def test_jsx_file_discovery(self, tmp_path): """Test discovering functions in JSX files.""" jsx_file = tmp_path / "component.jsx" jsx_file.write_text(""" -function Button({ onClick }) { +export function Button({ onClick }) { return ; } -function formatText(text) { +export function formatText(text) { return text.toUpperCase(); } """) @@ -176,7 +176,7 @@ def test_invalid_javascript_returns_empty(self, tmp_path): """Test that invalid JavaScript code returns empty results.""" js_file = tmp_path / "invalid.js" js_file.write_text(""" -function broken( { +export function broken( { return 42; } """) @@ -189,11 +189,11 @@ def test_function_line_numbers(self, tmp_path): """Test that function line numbers are correctly detected.""" js_file = tmp_path / "lines.js" js_file.write_text(""" -function firstFunc() { +export function firstFunc() { return 1; } -function secondFunc() { +export function secondFunc() { return 2; } """) @@ -217,7 +217,7 @@ def test_filter_functions_includes_javascript(self, tmp_path): """Test that filter_functions correctly includes JavaScript files.""" js_file = tmp_path / "module.js" js_file.write_text(""" -function add(a, b) { +export function add(a, b) { return a + b; } """) @@ -240,7 +240,7 @@ def test_filter_excludes_test_directory(self, tmp_path): tests_dir.mkdir() test_file = tests_dir / "test_module.test.js" test_file.write_text(""" -function testHelper() { +export function testHelper() { return 42; } """) @@ -260,7 +260,7 @@ def test_filter_excludes_ignored_paths(self, tmp_path): ignored_dir.mkdir() js_file = ignored_dir / "ignored_module.js" js_file.write_text(""" -function ignoredFunc() { +export function ignoredFunc() { return 42; } """) @@ -282,7 +282,7 @@ def test_filter_includes_files_with_dashes(self, tmp_path): """Test that JavaScript files with dashes in name are included (unlike Python).""" js_file = tmp_path / "my-module.js" js_file.write_text(""" -function myFunc() { +export function myFunc() { return 42; } """) @@ -312,11 +312,11 @@ def test_get_functions_from_file(self, tmp_path): """Test getting functions to optimize from a JavaScript file.""" js_file = tmp_path / "string_utils.js" js_file.write_text(""" -function reverseString(str) { +export function reverseString(str) { return str.split('').reverse().join(''); } -function capitalize(str) { +export function capitalize(str) { return str.charAt(0).toUpperCase() + str.slice(1); } """) @@ -422,12 +422,12 @@ def test_discover_all_js_functions(self, tmp_path): """Test discovering all JavaScript functions in a directory.""" # Create multiple JS files (tmp_path / "math.js").write_text(""" -function add(a, b) { +export function add(a, b) { return a + b; } """) (tmp_path / "string.js").write_text(""" -function reverse(str) { +export function reverse(str) { return str.split('').reverse().join(''); } """) @@ -451,7 +451,7 @@ def py_func(): return 1 """) (tmp_path / "js_module.js").write_text(""" -function jsFunc() { +export function jsFunc() { return 1; } """) @@ -476,7 +476,7 @@ def test_qualified_name_no_parents(self, tmp_path): """Test qualified name for top-level function.""" js_file = tmp_path / "module.js" js_file.write_text(""" -function topLevel() { +export function topLevel() { return 42; } """) @@ -490,7 +490,7 @@ def test_qualified_name_with_class_parent(self, tmp_path): """Test qualified name for class method.""" js_file = tmp_path / "module.js" js_file.write_text(""" -class MyClass { +export class MyClass { myMethod() { return 42; } @@ -506,7 +506,7 @@ def test_language_attribute(self, tmp_path): """Test that JavaScript functions have correct language attribute.""" js_file = tmp_path / "module.js" js_file.write_text(""" -function myFunc() { +export function myFunc() { return 42; } """) diff --git a/tests/test_languages/fixtures/js_cjs/helpers/format.js b/tests/test_languages/fixtures/js_cjs/helpers/format.js index d2d50e4df..15dae5e1c 100644 --- a/tests/test_languages/fixtures/js_cjs/helpers/format.js +++ b/tests/test_languages/fixtures/js_cjs/helpers/format.js @@ -8,7 +8,7 @@ * @param decimals - Number of decimal places * @returns Formatted number */ -function formatNumber(num, decimals) { +export function formatNumber(num, decimals) { return Number(num.toFixed(decimals)); } @@ -18,7 +18,7 @@ function formatNumber(num, decimals) { * @param name - Parameter name for error message * @throws Error if value is not a valid number */ -function validateInput(value, name) { +export function validateInput(value, name) { if (typeof value !== 'number' || isNaN(value)) { throw new Error(`Invalid ${name}: must be a number`); } @@ -30,7 +30,7 @@ function validateInput(value, name) { * @param symbol - Currency symbol * @returns Formatted currency string */ -function formatCurrency(amount, symbol = '$') { +export function formatCurrency(amount, symbol = '$') { return `${symbol}${formatNumber(amount, 2)}`; } diff --git a/tests/test_languages/fixtures/js_cjs/math_utils.js b/tests/test_languages/fixtures/js_cjs/math_utils.js index 0b650ed0e..a09a4e880 100644 --- a/tests/test_languages/fixtures/js_cjs/math_utils.js +++ b/tests/test_languages/fixtures/js_cjs/math_utils.js @@ -8,7 +8,7 @@ * @param b - Second number * @returns Sum of a and b */ -function add(a, b) { +export function add(a, b) { return a + b; } @@ -18,7 +18,7 @@ function add(a, b) { * @param b - Second number * @returns Product of a and b */ -function multiply(a, b) { +export function multiply(a, b) { return a * b; } @@ -27,7 +27,7 @@ function multiply(a, b) { * @param n - Non-negative integer * @returns Factorial of n */ -function factorial(n) { +export function factorial(n) { // Intentionally inefficient recursive implementation if (n <= 1) return 1; return n * factorial(n - 1); @@ -39,7 +39,7 @@ function factorial(n) { * @param exp - Exponent * @returns base raised to exp */ -function power(base, exp) { +export function power(base, exp) { // Inefficient: linear time instead of log time let result = 1; for (let i = 0; i < exp; i++) { diff --git a/tests/test_languages/test_code_context_extraction.py b/tests/test_languages/test_code_context_extraction.py index 87c728b34..07946ddd3 100644 --- a/tests/test_languages/test_code_context_extraction.py +++ b/tests/test_languages/test_code_context_extraction.py @@ -56,7 +56,7 @@ class TestSimpleFunctionContext: def test_simple_function_no_dependencies(self, js_support, temp_project): """Test extracting context for a simple standalone function without any dependencies.""" code = """\ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -70,7 +70,7 @@ def test_simple_function_no_dependencies(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -84,7 +84,7 @@ def test_simple_function_no_dependencies(self, js_support, temp_project): def test_arrow_function_with_implicit_return(self, js_support, temp_project): """Test extracting an arrow function with implicit return.""" code = """\ -const multiply = (a, b) => a * b; +export const multiply = (a, b) => a * b; """ file_path = temp_project / "math.js" file_path.write_text(code, encoding="utf-8") @@ -97,7 +97,7 @@ def test_arrow_function_with_implicit_return(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -const multiply = (a, b) => a * b; +export const multiply = (a, b) => a * b; """ assert context.target_code == expected_target_code assert context.helper_functions == [] @@ -116,7 +116,7 @@ def test_function_with_simple_jsdoc(self, js_support, temp_project): * @param {number} b - Second number * @returns {number} The sum */ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -129,13 +129,7 @@ def test_function_with_simple_jsdoc(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -/** - * Adds two numbers together. - * @param {number} a - First number - * @param {number} b - Second number - * @returns {number} The sum - */ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -163,7 +157,7 @@ def test_function_with_complex_jsdoc_types(self, js_support, temp_project): * const doubled = await processItems([1, 2, 3], x => x * 2); * // returns [2, 4, 6] */ -async function processItems(items, callback, options = {}) { +export async function processItems(items, callback, options = {}) { const { parallel = false, chunkSize = 100 } = options; if (!Array.isArray(items)) { @@ -187,25 +181,7 @@ def test_function_with_complex_jsdoc_types(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -/** - * Processes an array of items with a callback function. - * - * This function iterates over each item and applies the transformation. - * - * @template T - The type of items in the input array - * @template U - The type of items in the output array - * @param {Array} items - The input array to process - * @param {function(T, number): U} callback - Transformation function - * @param {Object} [options] - Optional configuration - * @param {boolean} [options.parallel=false] - Whether to process in parallel - * @param {number} [options.chunkSize=100] - Size of processing chunks - * @returns {Promise>} The transformed array - * @throws {TypeError} If items is not an array - * @example - * const doubled = await processItems([1, 2, 3], x => x * 2); - * // returns [2, 4, 6] - */ -async function processItems(items, callback, options = {}) { +export async function processItems(items, callback, options = {}) { const { parallel = false, chunkSize = 100 } = options; if (!Array.isArray(items)) { @@ -231,7 +207,7 @@ def test_class_with_jsdoc_on_class_and_methods(self, js_support, temp_project): * @class CacheManager * @description Provides in-memory caching with automatic expiration. */ -class CacheManager { +export class CacheManager { /** * Creates a new cache manager. * @param {number} defaultTTL - Default time-to-live in milliseconds @@ -275,12 +251,6 @@ class CacheManager { context = js_support.extract_code_context(get_or_compute, temp_project, temp_project) expected_target_code = """\ -/** - * A cache implementation with TTL support. - * - * @class CacheManager - * @description Provides in-memory caching with automatic expiration. - */ class CacheManager { /** * Creates a new cache manager. @@ -344,7 +314,7 @@ def test_jsdoc_with_typedef_and_callback(self, js_support, temp_project): * @param {ValidatorFunction[]} validators - Array of validator functions * @returns {ValidationResult} Combined validation result */ -function validateUserData(data, validators) { +export function validateUserData(data, validators) { const errors = []; const fieldErrors = {}; @@ -377,13 +347,7 @@ def test_jsdoc_with_typedef_and_callback(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -/** - * Validates user input data. - * @param {Object} data - The data to validate - * @param {ValidatorFunction[]} validators - Array of validator functions - * @returns {ValidationResult} Combined validation result - */ -function validateUserData(data, validators) { +export function validateUserData(data, validators) { const errors = []; const fieldErrors = {}; @@ -433,7 +397,7 @@ def test_function_with_multiple_complex_constants(self, js_support, temp_project }; const UNUSED_CONFIG = { debug: false }; -async function fetchWithRetry(endpoint, options = {}) { +export async function fetchWithRetry(endpoint, options = {}) { const url = API_BASE_URL + endpoint; let lastError; @@ -473,7 +437,7 @@ def test_function_with_multiple_complex_constants(self, js_support, temp_project context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -async function fetchWithRetry(endpoint, options = {}) { +export async function fetchWithRetry(endpoint, options = {}) { const url = API_BASE_URL + endpoint; let lastError; @@ -537,7 +501,7 @@ def test_function_with_regex_and_template_constants(self, js_support, temp_proje url: 'Please enter a valid URL' }; -function validateField(value, fieldType) { +export function validateField(value, fieldType) { const pattern = PATTERNS[fieldType]; if (!pattern) { return { valid: true, error: null }; @@ -559,7 +523,7 @@ def test_function_with_regex_and_template_constants(self, js_support, temp_proje context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function validateField(value, fieldType) { +export function validateField(value, fieldType) { const pattern = PATTERNS[fieldType]; if (!pattern) { return { valid: true, error: null }; @@ -595,16 +559,16 @@ class TestSameFileHelperFunctions: def test_function_with_chain_of_helpers(self, js_support, temp_project): """Test function calling helper that calls another helper (transitive dependencies).""" code = """\ -function sanitizeString(str) { +export function sanitizeString(str) { return str.trim().toLowerCase(); } -function normalizeInput(input) { +export function normalizeInput(input) { const sanitized = sanitizeString(input); return sanitized.replace(/\\s+/g, '-'); } -function processUserInput(rawInput) { +export function processUserInput(rawInput) { const normalized = normalizeInput(rawInput); return { original: rawInput, @@ -622,7 +586,7 @@ def test_function_with_chain_of_helpers(self, js_support, temp_project): context = js_support.extract_code_context(process_func, temp_project, temp_project) expected_target_code = """\ -function processUserInput(rawInput) { +export function processUserInput(rawInput) { const normalized = normalizeInput(rawInput); return { original: rawInput, @@ -640,23 +604,23 @@ def test_function_with_chain_of_helpers(self, js_support, temp_project): def test_function_with_multiple_unrelated_helpers(self, js_support, temp_project): """Test function calling multiple independent helper functions.""" code = """\ -function formatDate(date) { +export function formatDate(date) { return date.toISOString().split('T')[0]; } -function formatCurrency(amount) { +export function formatCurrency(amount) { return '$' + amount.toFixed(2); } -function formatPercentage(value) { +export function formatPercentage(value) { return (value * 100).toFixed(1) + '%'; } -function unusedFormatter() { +export function unusedFormatter() { return 'not used'; } -function generateReport(data) { +export function generateReport(data) { const date = formatDate(new Date(data.timestamp)); const revenue = formatCurrency(data.revenue); const growth = formatPercentage(data.growth); @@ -677,7 +641,7 @@ def test_function_with_multiple_unrelated_helpers(self, js_support, temp_project context = js_support.extract_code_context(report_func, temp_project, temp_project) expected_target_code = """\ -function generateReport(data) { +export function generateReport(data) { const date = formatDate(new Date(data.timestamp)); const revenue = formatCurrency(data.revenue); const growth = formatPercentage(data.growth); @@ -699,21 +663,21 @@ def test_function_with_multiple_unrelated_helpers(self, js_support, temp_project for helper in context.helper_functions: if helper.name == "formatDate": expected = """\ -function formatDate(date) { +export function formatDate(date) { return date.toISOString().split('T')[0]; } """ assert helper.source_code == expected elif helper.name == "formatCurrency": expected = """\ -function formatCurrency(amount) { +export function formatCurrency(amount) { return '$' + amount.toFixed(2); } """ assert helper.source_code == expected elif helper.name == "formatPercentage": expected = """\ -function formatPercentage(value) { +export function formatPercentage(value) { return (value * 100).toFixed(1) + '%'; } """ @@ -726,7 +690,7 @@ class TestClassMethodWithSiblingMethods: def test_graph_topological_sort(self, js_support, temp_project): """Test graph class with topological sort - similar to Python test_class_method_dependencies.""" code = """\ -class Graph { +export class Graph { constructor(vertices) { this.graph = new Map(); this.V = vertices; @@ -774,7 +738,7 @@ class Graph { context = js_support.extract_code_context(topo_sort, temp_project, temp_project) - # The extracted code should include class wrapper with constructor + # The extracted code should include class wrapper with constructor and sibling methods used expected_target_code = """\ class Graph { constructor(vertices) { @@ -794,6 +758,19 @@ class Graph { return stack; } + + topologicalSortUtil(v, visited, stack) { + visited[v] = true; + + const neighbors = this.graph.get(v) || []; + for (const i of neighbors) { + if (visited[i] === false) { + this.topologicalSortUtil(i, visited, stack); + } + } + + stack.unshift(v); + } } """ assert context.target_code == expected_target_code @@ -802,7 +779,7 @@ class Graph { def test_class_method_using_nested_helper_class(self, js_support, temp_project): """Test class method that uses another class as a helper - mirrors Python HelperClass test.""" code = """\ -class HelperClass { +export class HelperClass { constructor(name) { this.name = name; } @@ -816,7 +793,7 @@ class HelperClass { } } -class NestedHelper { +export class NestedHelper { constructor(name) { this.name = name; } @@ -826,11 +803,11 @@ class NestedHelper { } } -function mainMethod() { +export function mainMethod() { return 'hello'; } -class MainClass { +export class MainClass { constructor(name) { this.name = name; } @@ -890,7 +867,7 @@ def test_helper_from_another_file_commonjs(self, js_support, temp_project): main_code = """\ const { sorter } = require('./bubble_sort_with_math'); -function sortFromAnotherFile(arr) { +export function sortFromAnotherFile(arr) { const sortedArr = sorter(arr); return sortedArr; } @@ -906,7 +883,7 @@ def test_helper_from_another_file_commonjs(self, js_support, temp_project): context = js_support.extract_code_context(main_func, temp_project, temp_project) expected_target_code = """\ -function sortFromAnotherFile(arr) { +export function sortFromAnotherFile(arr) { const sortedArr = sorter(arr); return sortedArr; } @@ -943,12 +920,10 @@ def test_helper_from_another_file_esm(self, js_support, temp_project): main_code = """\ import identity, { double, triple } from './utils'; -function processNumber(n) { +export function processNumber(n) { const base = identity(n); return double(base) + triple(base); } - -export { processNumber }; """ main_path = temp_project / "main.js" main_path.write_text(main_code, encoding="utf-8") @@ -959,7 +934,7 @@ def test_helper_from_another_file_esm(self, js_support, temp_project): context = js_support.extract_code_context(process_func, temp_project, temp_project) expected_target_code = """\ -function processNumber(n) { +export function processNumber(n) { const base = identity(n); return double(base) + triple(base); } @@ -1007,7 +982,7 @@ def test_chained_imports_across_three_files(self, js_support, temp_project): main_code = """\ import { transformInput } from './middleware'; -function handleUserInput(rawInput) { +export function handleUserInput(rawInput) { try { const result = transformInput(rawInput); return { success: true, data: result }; @@ -1015,8 +990,6 @@ def test_chained_imports_across_three_files(self, js_support, temp_project): return { success: false, error: error.message }; } } - -export { handleUserInput }; """ main_path = temp_project / "main.js" main_path.write_text(main_code, encoding="utf-8") @@ -1027,7 +1000,7 @@ def test_chained_imports_across_three_files(self, js_support, temp_project): context = js_support.extract_code_context(handle_func, temp_project, temp_project) expected_target_code = """\ -function handleUserInput(rawInput) { +export function handleUserInput(rawInput) { try { const result = transformInput(rawInput); return { success: true, data: result }; @@ -1059,7 +1032,7 @@ def test_function_with_complex_generic_types(self, ts_support, temp_project): type Entity = T & Identifiable & Timestamped; -function createEntity(data: T): Entity { +export function createEntity(data: T): Entity { const now = new Date(); return { ...data, @@ -1078,7 +1051,7 @@ def test_function_with_complex_generic_types(self, ts_support, temp_project): context = ts_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function createEntity(data: T): Entity { +export function createEntity(data: T): Entity { const now = new Date(); return { ...data, @@ -1117,7 +1090,7 @@ def test_class_with_private_fields_and_typed_methods(self, ts_support, temp_proj maxSize: number; } -class TypedCache { +export class TypedCache { private readonly cache: Map>; private readonly config: CacheConfig; @@ -1235,15 +1208,13 @@ def test_typescript_with_type_imports(self, ts_support, temp_project): const DEFAULT_ROLE: UserRole = 'user'; -function createUser(input: CreateUserInput, role: UserRole = DEFAULT_ROLE): User { +export function createUser(input: CreateUserInput, role: UserRole = DEFAULT_ROLE): User { return { id: Math.random().toString(36).substring(2), name: input.name, email: input.email }; } - -export { createUser }; """ service_path = temp_project / "service.ts" service_path.write_text(service_code, encoding="utf-8") @@ -1254,7 +1225,7 @@ def test_typescript_with_type_imports(self, ts_support, temp_project): context = ts_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function createUser(input: CreateUserInput, role: UserRole = DEFAULT_ROLE): User { +export function createUser(input: CreateUserInput, role: UserRole = DEFAULT_ROLE): User { return { id: Math.random().toString(36).substring(2), name: input.name, @@ -1294,7 +1265,7 @@ class TestRecursionAndCircularDependencies: def test_self_recursive_factorial(self, js_support, temp_project): """Test self-recursive function does not list itself as helper.""" code = """\ -function factorial(n) { +export function factorial(n) { if (n <= 1) return 1; return n * factorial(n - 1); } @@ -1308,7 +1279,7 @@ def test_self_recursive_factorial(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function factorial(n) { +export function factorial(n) { if (n <= 1) return 1; return n * factorial(n - 1); } @@ -1319,12 +1290,12 @@ def test_self_recursive_factorial(self, js_support, temp_project): def test_mutually_recursive_even_odd(self, js_support, temp_project): """Test mutually recursive functions.""" code = """\ -function isEven(n) { +export function isEven(n) { if (n === 0) return true; return isOdd(n - 1); } -function isOdd(n) { +export function isOdd(n) { if (n === 0) return false; return isEven(n - 1); } @@ -1338,7 +1309,7 @@ def test_mutually_recursive_even_odd(self, js_support, temp_project): context = js_support.extract_code_context(is_even, temp_project, temp_project) expected_target_code = """\ -function isEven(n) { +export function isEven(n) { if (n === 0) return true; return isOdd(n - 1); } @@ -1351,7 +1322,7 @@ def test_mutually_recursive_even_odd(self, js_support, temp_project): # Verify helper source assert context.helper_functions[0].source_code == """\ -function isOdd(n) { +export function isOdd(n) { if (n === 0) return false; return isEven(n - 1); } @@ -1360,28 +1331,28 @@ def test_mutually_recursive_even_odd(self, js_support, temp_project): def test_complex_recursive_tree_traversal(self, js_support, temp_project): """Test complex recursive tree traversal with multiple recursive calls.""" code = """\ -function traversePreOrder(node, visit) { +export function traversePreOrder(node, visit) { if (!node) return; visit(node.value); traversePreOrder(node.left, visit); traversePreOrder(node.right, visit); } -function traverseInOrder(node, visit) { +export function traverseInOrder(node, visit) { if (!node) return; traverseInOrder(node.left, visit); visit(node.value); traverseInOrder(node.right, visit); } -function traversePostOrder(node, visit) { +export function traversePostOrder(node, visit) { if (!node) return; traversePostOrder(node.left, visit); traversePostOrder(node.right, visit); visit(node.value); } -function collectAllValues(root) { +export function collectAllValues(root) { const values = { pre: [], in: [], post: [] }; traversePreOrder(root, v => values.pre.push(v)); @@ -1400,7 +1371,7 @@ def test_complex_recursive_tree_traversal(self, js_support, temp_project): context = js_support.extract_code_context(collect_func, temp_project, temp_project) expected_target_code = """\ -function collectAllValues(root) { +export function collectAllValues(root) { const values = { pre: [], in: [], post: [] }; traversePreOrder(root, v => values.pre.push(v)); @@ -1423,7 +1394,7 @@ class TestAsyncPatternsAndPromises: def test_async_function_chain(self, js_support, temp_project): """Test async function that calls other async functions.""" code = """\ -async function fetchUserById(id) { +export async function fetchUserById(id) { const response = await fetch(`/api/users/${id}`); if (!response.ok) { throw new Error(`User ${id} not found`); @@ -1431,17 +1402,17 @@ def test_async_function_chain(self, js_support, temp_project): return response.json(); } -async function fetchUserPosts(userId) { +export async function fetchUserPosts(userId) { const response = await fetch(`/api/users/${userId}/posts`); return response.json(); } -async function fetchUserComments(userId) { +export async function fetchUserComments(userId) { const response = await fetch(`/api/users/${userId}/comments`); return response.json(); } -async function fetchUserProfile(userId) { +export async function fetchUserProfile(userId) { const user = await fetchUserById(userId); const [posts, comments] = await Promise.all([ fetchUserPosts(userId), @@ -1465,7 +1436,7 @@ def test_async_function_chain(self, js_support, temp_project): context = js_support.extract_code_context(profile_func, temp_project, temp_project) expected_target_code = """\ -async function fetchUserProfile(userId) { +export async function fetchUserProfile(userId) { const user = await fetchUserById(userId); const [posts, comments] = await Promise.all([ fetchUserPosts(userId), @@ -1493,7 +1464,7 @@ class TestExtractionReplacementRoundTrip: def test_extract_and_replace_class_method(self, js_support, temp_project): """Test extracting code context and then replacing the method.""" original_source = """\ -class Counter { +export class Counter { constructor(initial = 0) { this.count = initial; } @@ -1536,7 +1507,7 @@ class Counter { # Step 2: Simulate AI returning optimized code optimized_code_from_ai = """\ -class Counter { +export class Counter { constructor(initial = 0) { this.count = initial; } @@ -1551,7 +1522,7 @@ class Counter { result = js_support.replace_function(original_source, increment_func, optimized_code_from_ai) expected_result = """\ -class Counter { +export class Counter { constructor(initial = 0) { this.count = initial; } @@ -1578,7 +1549,7 @@ class TestEdgeCases: def test_function_with_complex_destructuring(self, js_support, temp_project): """Test function with complex nested destructuring parameters.""" code = """\ -function processApiResponse({ +export function processApiResponse({ data: { users = [], meta: { total, page } = {} } = {}, status, headers: { 'content-type': contentType } = {} @@ -1600,7 +1571,7 @@ def test_function_with_complex_destructuring(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function processApiResponse({ +export function processApiResponse({ data: { users = [], meta: { total, page } = {} } = {}, status, headers: { 'content-type': contentType } = {} @@ -1619,13 +1590,13 @@ def test_function_with_complex_destructuring(self, js_support, temp_project): def test_generator_function(self, js_support, temp_project): """Test generator function extraction.""" code = """\ -function* range(start, end, step = 1) { +export function* range(start, end, step = 1) { for (let i = start; i < end; i += step) { yield i; } } -function* fibonacci(limit) { +export function* fibonacci(limit) { let [a, b] = [0, 1]; while (a < limit) { yield a; @@ -1642,7 +1613,7 @@ def test_generator_function(self, js_support, temp_project): context = js_support.extract_code_context(range_func, temp_project, temp_project) expected_target_code = """\ -function* range(start, end, step = 1) { +export function* range(start, end, step = 1) { for (let i = start; i < end; i += step) { yield i; } @@ -1660,7 +1631,7 @@ def test_function_with_computed_property_names(self, js_support, temp_project): AGE: 'user_age' }; -function createUserObject(name, email, age) { +export function createUserObject(name, email, age) { return { [FIELD_KEYS.NAME]: name, [FIELD_KEYS.EMAIL]: email, @@ -1677,7 +1648,7 @@ def test_function_with_computed_property_names(self, js_support, temp_project): context = js_support.extract_code_context(func, temp_project, temp_project) expected_target_code = """\ -function createUserObject(name, email, age) { +export function createUserObject(name, email, age) { return { [FIELD_KEYS.NAME]: name, [FIELD_KEYS.EMAIL]: email, @@ -1937,7 +1908,7 @@ class TestContextProperties: def test_javascript_context_has_correct_language(self, js_support, temp_project): """Test that JavaScript context has correct language property.""" code = """\ -function test() { +export function test() { return 1; } """ @@ -1956,7 +1927,7 @@ def test_javascript_context_has_correct_language(self, js_support, temp_project) def test_typescript_context_has_javascript_language(self, ts_support, temp_project): """Test that TypeScript context uses JavaScript language enum.""" code = """\ -function test(): number { +export function test(): number { return 1; } """ @@ -1977,7 +1948,7 @@ class TestContextValidation: def test_all_class_methods_produce_valid_syntax(self, js_support, temp_project): """Test that all extracted class methods are syntactically valid JavaScript.""" code = """\ -class Calculator { +export class Calculator { constructor(precision = 2) { this.precision = precision; } diff --git a/tests/test_languages/test_function_discovery_integration.py b/tests/test_languages/test_function_discovery_integration.py index 621a00d79..c91f91fe5 100644 --- a/tests/test_languages/test_function_discovery_integration.py +++ b/tests/test_languages/test_function_discovery_integration.py @@ -89,11 +89,11 @@ def test_javascript_file_routes_to_js_handler(self): """Test that JavaScript files use the JavaScript handler.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -function add(a, b) { +export function add(a, b) { return a + b; } -function multiply(a, b) { +export function multiply(a, b) { return a * b; } """) @@ -124,7 +124,7 @@ def test_function_to_optimize_has_correct_fields(self): """Test that FunctionToOptimize has all required fields populated.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -162,7 +162,7 @@ def add(a, b): def test_discovers_javascript_files_when_specified(self, tmp_path): """Test that JavaScript files are discovered when language is specified.""" (tmp_path / "module.js").write_text(""" -function add(a, b) { +export function add(a, b) { return a + b; } """) @@ -177,7 +177,7 @@ def py_func(): return 1 """) (tmp_path / "js_module.js").write_text(""" -function jsFunc() { +export function jsFunc() { return 1; } """) diff --git a/tests/test_languages/test_javascript_e2e.py b/tests/test_languages/test_javascript_e2e.py index 2fe25c18a..017e8f66e 100644 --- a/tests/test_languages/test_javascript_e2e.py +++ b/tests/test_languages/test_javascript_e2e.py @@ -155,16 +155,16 @@ def test_replace_function_in_javascript_file(self): from codeflash.languages.base import FunctionInfo original_source = """ -function add(a, b) { +export function add(a, b) { return a + b; } -function multiply(a, b) { +export function multiply(a, b) { return a * b; } """ - new_function = """function add(a, b) { + new_function = """export function add(a, b) { // Optimized version return a + b; }""" @@ -178,12 +178,12 @@ def test_replace_function_in_javascript_file(self): result = js_support.replace_function(original_source, func_info, new_function) expected_result = """ -function add(a, b) { +export function add(a, b) { // Optimized version return a + b; } -function multiply(a, b) { +export function multiply(a, b) { return a * b; } """ @@ -234,7 +234,7 @@ def test_function_to_optimize_has_correct_fields(self): with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -244,7 +244,7 @@ class Calculator { } } -function standalone(x) { +export function standalone(x) { return x * 2; } """) diff --git a/tests/test_languages/test_javascript_instrumentation.py b/tests/test_languages/test_javascript_instrumentation.py index ba25a3af5..e3457c231 100644 --- a/tests/test_languages/test_javascript_instrumentation.py +++ b/tests/test_languages/test_javascript_instrumentation.py @@ -663,4 +663,314 @@ def test_this_method_call_exact_output(self): expected = " return codeflash.capture('Class.fibonacci', '1', this.fibonacci.bind(this), n - 1);" assert transformed == expected, f"Expected:\n{expected}\nGot:\n{transformed}" - assert counter == 1 \ No newline at end of file + assert counter == 1 + + +class TestFixImportsInsideTestBlocks: + """Tests for fix_imports_inside_test_blocks function.""" + + def test_fix_named_import_inside_test_block(self): + """Test fixing named import inside test function.""" + from codeflash.languages.javascript.instrument import fix_imports_inside_test_blocks + + code = """ +test('should work', () => { + const mock = jest.fn(); + import { foo } from '../src/module'; + expect(foo()).toBe(true); +}); +""" + fixed = fix_imports_inside_test_blocks(code) + + assert "const { foo } = require('../src/module');" in fixed + assert "import { foo }" not in fixed + + def test_fix_default_import_inside_test_block(self): + """Test fixing default import inside test function.""" + from codeflash.languages.javascript.instrument import fix_imports_inside_test_blocks + + code = """ +test('should work', () => { + env.isTest.mockReturnValue(false); + import queuesModule from '../src/queue/queue'; + expect(queuesModule).toBeDefined(); +}); +""" + fixed = fix_imports_inside_test_blocks(code) + + assert "const queuesModule = require('../src/queue/queue');" in fixed + assert "import queuesModule from" not in fixed + + def test_fix_namespace_import_inside_test_block(self): + """Test fixing namespace import inside test function.""" + from codeflash.languages.javascript.instrument import fix_imports_inside_test_blocks + + code = """ +test('should work', () => { + import * as utils from '../src/utils'; + expect(utils.foo()).toBe(true); +}); +""" + fixed = fix_imports_inside_test_blocks(code) + + assert "const utils = require('../src/utils');" in fixed + assert "import * as utils" not in fixed + + def test_preserve_top_level_imports(self): + """Test that top-level imports are not modified.""" + from codeflash.languages.javascript.instrument import fix_imports_inside_test_blocks + + code = """ +import { jest, describe, test, expect } from '@jest/globals'; +import { foo } from '../src/module'; + +describe('test suite', () => { + test('should work', () => { + expect(foo()).toBe(true); + }); +}); +""" + fixed = fix_imports_inside_test_blocks(code) + + # Top-level imports should remain unchanged + assert "import { jest, describe, test, expect } from '@jest/globals';" in fixed + assert "import { foo } from '../src/module';" in fixed + + def test_empty_code(self): + """Test handling empty code.""" + from codeflash.languages.javascript.instrument import fix_imports_inside_test_blocks + + assert fix_imports_inside_test_blocks("") == "" + assert fix_imports_inside_test_blocks(" ") == " " + + +class TestFixJestMockPaths: + """Tests for fix_jest_mock_paths function.""" + + def test_fix_mock_path_when_source_relative(self): + """Test fixing mock path that's relative to source file.""" + from codeflash.languages.javascript.instrument import fix_jest_mock_paths + + with tempfile.TemporaryDirectory() as tmpdir: + # Create directory structure + src_dir = Path(tmpdir) / "src" / "queue" + tests_dir = Path(tmpdir) / "tests" + env_file = Path(tmpdir) / "src" / "environment.ts" + + src_dir.mkdir(parents=True) + tests_dir.mkdir(parents=True) + env_file.parent.mkdir(parents=True, exist_ok=True) + env_file.write_text("export const env = {};") + + source_file = src_dir / "queue.ts" + source_file.write_text("import env from '../environment';") + + test_file = tests_dir / "test_queue.test.ts" + + # Test code with incorrect mock path (relative to source, not test) + test_code = """ +import { jest, describe, test, expect } from '@jest/globals'; +jest.mock('../environment'); +jest.mock('../redis/utils'); + +describe('queue', () => { + test('works', () => {}); +}); +""" + fixed = fix_jest_mock_paths(test_code, test_file, source_file, tests_dir) + + # Should fix the path to be relative to the test file + assert "jest.mock('../src/environment')" in fixed + + def test_preserve_valid_mock_path(self): + """Test that valid mock paths are not modified.""" + from codeflash.languages.javascript.instrument import fix_jest_mock_paths + + with tempfile.TemporaryDirectory() as tmpdir: + # Create directory structure + src_dir = Path(tmpdir) / "src" + tests_dir = Path(tmpdir) / "tests" + + src_dir.mkdir(parents=True) + tests_dir.mkdir(parents=True) + + # Create the file being mocked at the correct location + mock_file = src_dir / "utils.ts" + mock_file.write_text("export const utils = {};") + + source_file = src_dir / "main.ts" + source_file.write_text("") + test_file = tests_dir / "test_main.test.ts" + + # Test code with correct mock path (valid from test location) + test_code = """ +jest.mock('../src/utils'); + +describe('main', () => { + test('works', () => {}); +}); +""" + fixed = fix_jest_mock_paths(test_code, test_file, source_file, tests_dir) + + # Should keep the path unchanged since it's valid + assert "jest.mock('../src/utils')" in fixed + + def test_fix_doMock_path(self): + """Test fixing jest.doMock path.""" + from codeflash.languages.javascript.instrument import fix_jest_mock_paths + + with tempfile.TemporaryDirectory() as tmpdir: + # Create directory structure: src/queue/queue.ts imports ../environment (-> src/environment.ts) + src_dir = Path(tmpdir) / "src" + queue_dir = src_dir / "queue" + tests_dir = Path(tmpdir) / "tests" + env_file = src_dir / "environment.ts" + + queue_dir.mkdir(parents=True) + tests_dir.mkdir(parents=True) + env_file.write_text("export const env = {};") + + source_file = queue_dir / "queue.ts" + source_file.write_text("") + test_file = tests_dir / "test_queue.test.ts" + + # From src/queue/queue.ts, ../environment resolves to src/environment.ts + # Test file is at tests/test_queue.test.ts + # So the correct mock path from test should be ../src/environment + test_code = """ +jest.doMock('../environment', () => ({ isTest: jest.fn() })); +""" + fixed = fix_jest_mock_paths(test_code, test_file, source_file, tests_dir) + + # Should fix the doMock path + assert "jest.doMock('../src/environment'" in fixed + + def test_empty_code(self): + """Test handling empty code.""" + from codeflash.languages.javascript.instrument import fix_jest_mock_paths + + with tempfile.TemporaryDirectory() as tmpdir: + tests_dir = Path(tmpdir) / "tests" + tests_dir.mkdir() + source_file = Path(tmpdir) / "src" / "main.ts" + test_file = tests_dir / "test.ts" + + assert fix_jest_mock_paths("", test_file, source_file, tests_dir) == "" + assert fix_jest_mock_paths(" ", test_file, source_file, tests_dir) == " " + + +class TestFunctionCallsInStrings: + """Tests for skipping function calls inside string literals.""" + + def test_skip_function_in_test_description_single_quotes(self): + """Test that function calls in single-quoted test descriptions are not transformed.""" + from codeflash.languages.javascript.instrument import transform_standalone_calls + + func = make_func("fibonacci") + code = """ +test('should compute fibonacci(20) and fibonacci(30) to known values', () => { + const result = fibonacci(10); + expect(result).toBe(55); +}); +""" + transformed, _counter = transform_standalone_calls(code, func, "capture") + + # The function call in the test description should NOT be transformed + assert "fibonacci(20)" in transformed + assert "fibonacci(30)" in transformed + # The actual call should be transformed + assert "codeflash.capture('fibonacci'" in transformed + + def test_skip_function_in_test_description_double_quotes(self): + """Test that function calls in double-quoted test descriptions are not transformed.""" + from codeflash.languages.javascript.instrument import transform_standalone_calls + + func = make_func("fibonacci") + code = ''' +test("should compute fibonacci(20) correctly", () => { + const result = fibonacci(10); +}); +''' + transformed, _counter = transform_standalone_calls(code, func, "capture") + + # The function call in the test description should NOT be transformed + assert 'fibonacci(20)' in transformed + # The actual call should be transformed + assert "codeflash.capture('fibonacci'" in transformed + + def test_skip_function_in_template_literal(self): + """Test that function calls in template literals are not transformed.""" + from codeflash.languages.javascript.instrument import transform_standalone_calls + + func = make_func("fibonacci") + code = """ +test(`should compute fibonacci(20) correctly`, () => { + const result = fibonacci(10); +}); +""" + transformed, _counter = transform_standalone_calls(code, func, "capture") + + # The function call in the template literal should NOT be transformed + assert "fibonacci(20)" in transformed + # The actual call should be transformed + assert "codeflash.capture('fibonacci'" in transformed + + def test_skip_expect_in_string_literal(self): + """Test that expect(func()) in string literals is not transformed.""" + from codeflash.languages.javascript.instrument import transform_expect_calls + + func = make_func("fibonacci") + code = """ +describe('testing expect(fibonacci(n)) patterns', () => { + test('works', () => { + expect(fibonacci(10)).toBe(55); + }); +}); +""" + transformed, _counter = transform_expect_calls(code, func, "capture") + + # The expect in the describe string should NOT be transformed + assert "expect(fibonacci(n))" in transformed + # The actual expect call should be transformed + assert "codeflash.capture('fibonacci'" in transformed + + def test_handle_escaped_quotes_in_string(self): + """Test that escaped quotes in strings are handled correctly.""" + from codeflash.languages.javascript.instrument import transform_standalone_calls + + func = make_func("fibonacci") + code = """ +test('test \\'fibonacci(5)\\' escaping', () => { + const result = fibonacci(10); +}); +""" + transformed, _counter = transform_standalone_calls(code, func, "capture") + + # The function call in the escaped string should NOT be transformed + assert "fibonacci(5)" in transformed + # The actual call should be transformed + assert "codeflash.capture('fibonacci'" in transformed + + def test_is_inside_string_helper(self): + """Test the is_inside_string helper function directly.""" + from codeflash.languages.javascript.instrument import is_inside_string + + # Position inside single-quoted string + code1 = "test('fibonacci(5)', () => {})" + assert is_inside_string(code1, 10) is True # Inside the string + + # Position outside string + assert is_inside_string(code1, 0) is False # Before string + assert is_inside_string(code1, 25) is False # After string + + # Double quotes + code2 = 'test("fibonacci(5)", () => {})' + assert is_inside_string(code2, 10) is True + + # Template literal + code3 = "test(`fibonacci(5)`, () => {})" + assert is_inside_string(code3, 10) is True + + # Escaped quote doesn't end string + code4 = "test('fib\\'s result', () => {})" + assert is_inside_string(code4, 15) is True # Still inside after escaped quote \ No newline at end of file diff --git a/tests/test_languages/test_javascript_optimization_flow.py b/tests/test_languages/test_javascript_optimization_flow.py index 7c7ba5aa6..26d2db140 100644 --- a/tests/test_languages/test_javascript_optimization_flow.py +++ b/tests/test_languages/test_javascript_optimization_flow.py @@ -60,6 +60,7 @@ def test_function_to_optimize_has_correct_language_for_javascript(self, tmp_path function add(a, b) { return a + b; } +module.exports = { add }; """) functions = find_all_functions_in_file(js_file) diff --git a/tests/test_languages/test_javascript_support.py b/tests/test_languages/test_javascript_support.py index 7a6868a66..8a7f9afe1 100644 --- a/tests/test_languages/test_javascript_support.py +++ b/tests/test_languages/test_javascript_support.py @@ -46,7 +46,7 @@ def test_discover_simple_function(self, js_support): """Test discovering a simple function declaration.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -function add(a, b) { +export function add(a, b) { return a + b; } """) @@ -62,15 +62,15 @@ def test_discover_multiple_functions(self, js_support): """Test discovering multiple functions.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -function add(a, b) { +export function add(a, b) { return a + b; } -function subtract(a, b) { +export function subtract(a, b) { return a - b; } -function multiply(a, b) { +export function multiply(a, b) { return a * b; } """) @@ -86,11 +86,11 @@ def test_discover_arrow_function(self, js_support): """Test discovering arrow functions assigned to variables.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -const add = (a, b) => { +export const add = (a, b) => { return a + b; }; -const multiply = (x, y) => x * y; +export const multiply = (x, y) => x * y; """) f.flush() @@ -104,11 +104,11 @@ def test_discover_function_without_return_excluded(self, js_support): """Test that functions without return are excluded by default.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -function withReturn() { +export function withReturn() { return 1; } -function withoutReturn() { +export function withoutReturn() { console.log("hello"); } """) @@ -124,7 +124,7 @@ def test_discover_class_methods(self, js_support): """Test discovering class methods.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -147,11 +147,11 @@ def test_discover_async_functions(self, js_support): """Test discovering async functions.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -async function fetchData(url) { +export async function fetchData(url) { return await fetch(url); } -function syncFunction() { +export function syncFunction() { return 1; } """) @@ -171,11 +171,11 @@ def test_discover_with_filter_exclude_async(self, js_support): """Test filtering out async functions.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -async function asyncFunc() { +export async function asyncFunc() { return 1; } -function syncFunc() { +export function syncFunc() { return 2; } """) @@ -191,11 +191,11 @@ def test_discover_with_filter_exclude_methods(self, js_support): """Test filtering out class methods.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -function standalone() { +export function standalone() { return 1; } -class MyClass { +export class MyClass { method() { return 2; } @@ -212,11 +212,11 @@ class MyClass { def test_discover_line_numbers(self, js_support): """Test that line numbers are correctly captured.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""function func1() { + f.write("""export function func1() { return 1; } -function func2() { +export function func2() { const x = 1; const y = 2; return x + y; @@ -238,7 +238,7 @@ def test_discover_generator_function(self, js_support): """Test discovering generator functions.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -function* numberGenerator() { +export function* numberGenerator() { yield 1; yield 2; return 3; @@ -271,7 +271,7 @@ def test_discover_function_expression(self, js_support): """Test discovering function expressions.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -const add = function(a, b) { +export const add = function(a, b) { return a + b; }; """) @@ -290,7 +290,7 @@ def test_discover_immediately_invoked_function_excluded(self, js_support): return 1; })(); -function named() { +export function named() { return 2; } """) @@ -476,7 +476,7 @@ class TestExtractCodeContext: def test_extract_simple_function(self, js_support): """Test extracting context for a simple function.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""function add(a, b) { + f.write("""export function add(a, b) { return a + b; } """) @@ -495,11 +495,11 @@ def test_extract_simple_function(self, js_support): def test_extract_with_helper(self, js_support): """Test extracting context with helper functions.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""function helper(x) { + f.write("""export function helper(x) { return x * 2; } -function main(a) { +export function main(a) { return helper(a) + 1; } """) @@ -523,7 +523,7 @@ class TestIntegration: def test_discover_and_replace_workflow(self, js_support): """Test full discover -> replace workflow.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - original_code = """function fibonacci(n) { + original_code = """export function fibonacci(n) { if (n <= 1) { return n; } @@ -541,7 +541,7 @@ def test_discover_and_replace_workflow(self, js_support): assert func.function_name == "fibonacci" # Replace - optimized_code = """function fibonacci(n) { + optimized_code = """export function fibonacci(n) { // Memoized version const memo = {0: 0, 1: 1}; for (let i = 2; i <= n; i++) { @@ -561,7 +561,7 @@ def test_multiple_classes_and_functions(self, js_support): """Test discovering and working with complex file.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -571,13 +571,13 @@ class Calculator { } } -class StringUtils { +export class StringUtils { reverse(s) { return s.split('').reverse().join(''); } } -function standalone() { +export function standalone() { return 42; } """) @@ -605,11 +605,11 @@ def test_jsx_file(self, js_support): f.write(""" import React from 'react'; -function Button({ onClick, children }) { +export function Button({ onClick, children }) { return ; } -const Card = ({ title, content }) => { +export const Card = ({ title, content }) => { return (

{title}

@@ -673,7 +673,7 @@ class TestClassMethodExtraction: def test_extract_class_method_wraps_in_class(self, js_support): """Test that extracting a class method wraps it in a class definition.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class Calculator { + f.write("""export class Calculator { add(a, b) { return a + b; } @@ -694,6 +694,7 @@ def test_extract_class_method_wraps_in_class(self, js_support): context = js_support.extract_code_context(add_method, file_path.parent, file_path.parent) # Full string equality check for exact extraction output + # Note: export keyword is not included in extracted class wrapper expected_code = """class Calculator { add(a, b) { return a + b; @@ -709,7 +710,7 @@ def test_extract_class_method_with_jsdoc(self, js_support): f.write("""/** * A simple calculator class. */ -class Calculator { +export class Calculator { /** * Adds two numbers. * @param {number} a - First number @@ -730,10 +731,9 @@ class Calculator { context = js_support.extract_code_context(add_method, file_path.parent, file_path.parent) # Full string equality check - includes class JSDoc, class definition, method JSDoc, and method - expected_code = """/** - * A simple calculator class. - */ -class Calculator { + # Note: export keyword is not included in extracted class wrapper + # Note: Class-level JSDoc is not included when extracting a method + expected_code = """class Calculator { /** * Adds two numbers. * @param {number} a - First number @@ -751,7 +751,7 @@ class Calculator { def test_extract_class_method_syntax_valid(self, js_support): """Test that extracted class method code is always syntactically valid.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class FibonacciCalculator { + f.write("""export class FibonacciCalculator { fibonacci(n) { if (n <= 1) { return n; @@ -769,6 +769,7 @@ def test_extract_class_method_syntax_valid(self, js_support): context = js_support.extract_code_context(fib_method, file_path.parent, file_path.parent) # Full string equality check + # Note: export keyword is not included in extracted class wrapper expected_code = """class FibonacciCalculator { fibonacci(n) { if (n <= 1) { @@ -784,7 +785,7 @@ def test_extract_class_method_syntax_valid(self, js_support): def test_extract_nested_class_method(self, js_support): """Test extracting a method from a nested class structure.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class Outer { + f.write("""export class Outer { createInner() { return class Inner { getValue() { @@ -808,6 +809,7 @@ def test_extract_nested_class_method(self, js_support): context = js_support.extract_code_context(add_method, file_path.parent, file_path.parent) # Full string equality check + # Note: export keyword is not included in extracted class wrapper expected_code = """class Outer { add(a, b) { return a + b; @@ -820,7 +822,7 @@ def test_extract_nested_class_method(self, js_support): def test_extract_async_class_method(self, js_support): """Test extracting an async class method.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class ApiClient { + f.write("""export class ApiClient { async fetchData(url) { const response = await fetch(url); return response.json(); @@ -836,6 +838,7 @@ def test_extract_async_class_method(self, js_support): context = js_support.extract_code_context(fetch_method, file_path.parent, file_path.parent) # Full string equality check + # Note: export keyword is not included in extracted class wrapper expected_code = """class ApiClient { async fetchData(url) { const response = await fetch(url); @@ -849,7 +852,7 @@ def test_extract_async_class_method(self, js_support): def test_extract_static_class_method(self, js_support): """Test extracting a static class method.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class MathUtils { + f.write("""export class MathUtils { static add(a, b) { return a + b; } @@ -869,6 +872,7 @@ def test_extract_static_class_method(self, js_support): context = js_support.extract_code_context(add_method, file_path.parent, file_path.parent) # Full string equality check + # Note: export keyword is not included in extracted class wrapper expected_code = """class MathUtils { static add(a, b) { return a + b; @@ -881,7 +885,7 @@ def test_extract_static_class_method(self, js_support): def test_extract_class_method_without_class_jsdoc(self, js_support): """Test extracting a method from a class without JSDoc.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class SimpleClass { + f.write("""export class SimpleClass { simpleMethod() { return "hello"; } @@ -896,6 +900,7 @@ def test_extract_class_method_without_class_jsdoc(self, js_support): context = js_support.extract_code_context(method, file_path.parent, file_path.parent) # Full string equality check + # Note: export keyword is not included in extracted class wrapper expected_code = """class SimpleClass { simpleMethod() { return "hello"; @@ -1061,7 +1066,7 @@ class TestClassMethodEdgeCases: def test_class_with_constructor(self, js_support): """Test handling classes with constructors.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class Counter { + f.write("""export class Counter { constructor(start = 0) { this.value = start; } @@ -1083,7 +1088,7 @@ def test_class_with_constructor(self, js_support): def test_class_with_getters_setters(self, js_support): """Test handling classes with getters and setters.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class Person { + f.write("""export class Person { constructor(name) { this._name = name; } @@ -1113,13 +1118,13 @@ def test_class_with_getters_setters(self, js_support): def test_class_extending_another(self, js_support): """Test handling classes that extend another class.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class Animal { + f.write("""export class Animal { speak() { return 'sound'; } } -class Dog extends Animal { +export class Dog extends Animal { speak() { return 'bark'; } @@ -1141,6 +1146,7 @@ class Dog extends Animal { context = js_support.extract_code_context(fetch_method, file_path.parent, file_path.parent) # Full string equality check + # Note: export keyword is not included in extracted class wrapper expected_code = """class Dog { fetch() { return 'ball'; @@ -1153,7 +1159,7 @@ class Dog extends Animal { def test_class_with_private_method(self, js_support): """Test handling classes with private methods (ES2022+).""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class SecureClass { + f.write("""export class SecureClass { #privateMethod() { return 'secret'; } @@ -1175,7 +1181,7 @@ def test_class_with_private_method(self, js_support): def test_commonjs_class_export(self, js_support): """Test handling CommonJS exported classes.""" with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: - f.write("""class Calculator { + f.write("""export class Calculator { add(a, b) { return a + b; } @@ -1236,7 +1242,7 @@ def test_extract_context_then_replace_method(self, js_support): 3. Replace extracts just the method body and replaces in original """ original_source = """\ -class Counter { +export class Counter { constructor(initial = 0) { this.count = initial; } @@ -1303,7 +1309,7 @@ class Counter { # Verify result with exact string equality expected_result = """\ -class Counter { +export class Counter { constructor(initial = 0) { this.count = initial; } @@ -1333,7 +1339,7 @@ def test_typescript_extract_context_then_replace_method(self): ts_support = TypeScriptSupport() original_source = """\ -class User { +export class User { private name: string; private age: number; @@ -1350,8 +1356,6 @@ class User { return this.age; } } - -export { User }; """ with tempfile.NamedTemporaryFile(suffix=".ts", mode="w", delete=False) as f: f.write(original_source) @@ -1408,7 +1412,7 @@ class User { # Verify result with exact string equality expected_result = """\ -class User { +export class User { private name: string; private age: number; @@ -1426,8 +1430,6 @@ class User { return this.age; } } - -export { User }; """ assert result == expected_result, ( f"Replacement result does not match expected.\nExpected:\n{expected_result}\n\nGot:\n{result}" @@ -1437,7 +1439,7 @@ class User { def test_extract_replace_preserves_other_methods(self, js_support): """Test that replacing one method doesn't affect others.""" original_source = """\ -class Calculator { +export class Calculator { constructor(precision = 2) { this.precision = precision; } @@ -1499,7 +1501,7 @@ class Calculator { # Verify result with exact string equality expected_result = """\ -class Calculator { +export class Calculator { constructor(precision = 2) { this.precision = precision; } @@ -1525,7 +1527,7 @@ class Calculator { def test_extract_static_method_then_replace(self, js_support): """Test extracting and replacing a static method.""" original_source = """\ -class MathUtils { +export class MathUtils { constructor() { this.cache = {}; } @@ -1538,8 +1540,6 @@ class MathUtils { return a * b; } } - -module.exports = { MathUtils }; """ with tempfile.NamedTemporaryFile(suffix=".js", mode="w", delete=False) as f: f.write(original_source) @@ -1586,7 +1586,7 @@ class MathUtils { # Verify result with exact string equality expected_result = """\ -class MathUtils { +export class MathUtils { constructor() { this.cache = {}; } @@ -1600,8 +1600,6 @@ class MathUtils { return a * b; } } - -module.exports = { MathUtils }; """ assert result == expected_result, ( f"Replacement result does not match expected.\nExpected:\n{expected_result}\n\nGot:\n{result}" diff --git a/tests/test_languages/test_javascript_test_discovery.py b/tests/test_languages/test_javascript_test_discovery.py index 473bd330e..df697d482 100644 --- a/tests/test_languages/test_javascript_test_discovery.py +++ b/tests/test_languages/test_javascript_test_discovery.py @@ -29,7 +29,7 @@ def test_discover_tests_basic(self, js_support): # Create source file source_file = tmpdir / "math.js" source_file.write_text(""" -function add(a, b) { +export function add(a, b) { return a + b; } @@ -71,7 +71,7 @@ def test_discover_tests_spec_suffix(self, js_support): # Create source file source_file = tmpdir / "calculator.js" source_file.write_text(""" -function multiply(a, b) { +export function multiply(a, b) { return a * b; } @@ -103,7 +103,7 @@ def test_discover_tests_in_tests_directory(self, js_support): # Create source file source_file = tmpdir / "utils.js" source_file.write_text(""" -function formatDate(date) { +export function formatDate(date) { return date.toISOString(); } @@ -136,11 +136,11 @@ def test_discover_tests_nested_describe(self, js_support): source_file = tmpdir / "string_utils.js" source_file.write_text(""" -function capitalize(str) { +export function capitalize(str) { return str.charAt(0).toUpperCase() + str.slice(1); } -function lowercase(str) { +export function lowercase(str) { return str.toLowerCase(); } @@ -186,7 +186,7 @@ def test_discover_tests_with_it_block(self, js_support): source_file = tmpdir / "array_utils.js" source_file.write_text(""" -function sum(arr) { +export function sum(arr) { return arr.reduce((a, b) => a + b, 0); } @@ -254,7 +254,7 @@ def test_discover_tests_default_export(self, js_support): source_file = tmpdir / "greeter.js" source_file.write_text(""" -function greet(name) { +export function greet(name) { return `Hello, ${name}!`; } @@ -282,7 +282,7 @@ def test_discover_tests_class_methods(self, js_support): source_file = tmpdir / "calculator_class.js" source_file.write_text(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -333,7 +333,7 @@ def test_discover_tests_multi_level_directories(self, js_support): source_file = src_dir / "helpers.js" source_file.write_text(""" -function clamp(value, min, max) { +export function clamp(value, min, max) { return Math.min(Math.max(value, min), max); } @@ -375,11 +375,11 @@ def test_discover_tests_async_functions(self, js_support): source_file = tmpdir / "async_utils.js" source_file.write_text(""" -async function fetchData(url) { +export async function fetchData(url) { return await fetch(url).then(r => r.json()); } -async function delay(ms) { +export async function delay(ms) { return new Promise(resolve => setTimeout(resolve, ms)); } @@ -413,7 +413,7 @@ def test_discover_tests_jsx_component(self, js_support): source_file.write_text(""" import React from 'react'; -function Button({ onClick, children }) { +export function Button({ onClick, children }) { return ; } @@ -449,7 +449,7 @@ def test_discover_tests_no_matching_tests(self, js_support): source_file = tmpdir / "untested.js" source_file.write_text(""" -function untestedFunction() { +export function untestedFunction() { return 42; } @@ -479,11 +479,11 @@ def test_discover_tests_function_name_in_source(self, js_support): source_file = tmpdir / "validators.js" source_file.write_text(""" -function isEmail(str) { +export function isEmail(str) { return str.includes('@'); } -function isUrl(str) { +export function isUrl(str) { return str.startsWith('http'); } @@ -515,11 +515,11 @@ def test_discover_tests_multiple_test_files(self, js_support): source_file = tmpdir / "shared_utils.js" source_file.write_text(""" -function helper1() { +export function helper1() { return 1; } -function helper2() { +export function helper2() { return 2; } @@ -558,7 +558,7 @@ def test_discover_tests_template_literal_names(self, js_support): source_file = tmpdir / "format.js" source_file.write_text(""" -function formatNumber(n) { +export function formatNumber(n) { return n.toFixed(2); } @@ -587,7 +587,7 @@ def test_discover_tests_aliased_import(self, js_support): source_file = tmpdir / "transform.js" source_file.write_text(""" -function transformData(data) { +export function transformData(data) { return data.map(x => x * 2); } @@ -792,8 +792,8 @@ def test_require_named_import(self, js_support): source_file = tmpdir / "funcs.js" source_file.write_text(""" -function funcA() { return 1; } -function funcB() { return 2; } +export function funcA() { return 1; } +export function funcB() { return 2; } module.exports = { funcA, funcB }; """) @@ -846,7 +846,7 @@ def test_default_import(self, js_support): source_file = tmpdir / "default_export.js" source_file.write_text(""" -function mainFunc() { return 'main'; } +export function mainFunc() { return 'main'; } module.exports = mainFunc; """) @@ -875,7 +875,7 @@ def test_comments_in_test_file(self, js_support): source_file = tmpdir / "commented.js" source_file.write_text(""" -function compute() { return 42; } +export function compute() { return 42; } module.exports = { compute }; """) @@ -908,7 +908,7 @@ def test_test_file_with_syntax_error(self, js_support): source_file = tmpdir / "valid.js" source_file.write_text(""" -function validFunc() { return 1; } +export function validFunc() { return 1; } module.exports = { validFunc }; """) @@ -933,8 +933,8 @@ def test_function_with_same_name_as_jest_api(self, js_support): source_file = tmpdir / "conflict.js" source_file.write_text(""" -function test(value) { return value > 0; } -function describe(obj) { return JSON.stringify(obj); } +export function test(value) { return value > 0; } +export function describe(obj) { return JSON.stringify(obj); } module.exports = { test, describe }; """) @@ -962,7 +962,7 @@ def test_empty_test_directory(self, js_support): source_file = tmpdir / "lonely.js" source_file.write_text(""" -function lonelyFunc() { return 'alone'; } +export function lonelyFunc() { return 'alone'; } module.exports = { lonelyFunc }; """) @@ -980,14 +980,14 @@ def test_circular_imports(self, js_support): file_a = tmpdir / "moduleA.js" file_a.write_text(""" const { funcB } = require('./moduleB'); -function funcA() { return 'A' + (funcB ? funcB() : ''); } +export function funcA() { return 'A' + (funcB ? funcB() : ''); } module.exports = { funcA }; """) file_b = tmpdir / "moduleB.js" file_b.write_text(""" const { funcA } = require('./moduleA'); -function funcB() { return 'B'; } +export function funcB() { return 'B'; } module.exports = { funcB }; """) @@ -1126,17 +1126,17 @@ def test_full_discovery_workflow(self, js_support): # Source file source_file = src_dir / "utils.js" source_file.write_text(r""" -function validateEmail(email) { +export function validateEmail(email) { const re = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; return re.test(email); } -function validatePhone(phone) { +export function validatePhone(phone) { const re = /^\d{10}$/; return re.test(phone); } -function formatName(first, last) { +export function formatName(first, last) { return `${first} ${last}`.trim(); } @@ -1197,7 +1197,7 @@ def test_discovery_with_fixtures(self, js_support): source_file = tmpdir / "database.js" source_file.write_text(""" -class Database { +export class Database { constructor() { this.data = []; } @@ -1259,13 +1259,13 @@ def test_test_file_imports_different_module(self, js_support): # Create two source files source_a = tmpdir / "moduleA.js" source_a.write_text(""" -function funcA() { return 'A'; } +export function funcA() { return 'A'; } module.exports = { funcA }; """) source_b = tmpdir / "moduleB.js" source_b.write_text(""" -function funcB() { return 'B'; } +export function funcB() { return 'B'; } module.exports = { funcB }; """) @@ -1296,9 +1296,9 @@ def test_test_file_imports_only_specific_function(self, js_support): source_file = tmpdir / "utils.js" source_file.write_text(""" -function funcOne() { return 1; } -function funcTwo() { return 2; } -function funcThree() { return 3; } +export function funcOne() { return 1; } +export function funcTwo() { return 2; } +export function funcThree() { return 3; } module.exports = { funcOne, funcTwo, funcThree }; """) @@ -1325,7 +1325,7 @@ def test_function_name_as_string_not_import(self, js_support): source_file = tmpdir / "target.js" source_file.write_text(""" -function targetFunc() { return 'target'; } +export function targetFunc() { return 'target'; } module.exports = { targetFunc }; """) @@ -1354,7 +1354,7 @@ def test_module_import_with_method_access(self, js_support): source_file = tmpdir / "math.js" source_file.write_text(""" -function calculate(x) { return x * 2; } +export function calculate(x) { return x * 2; } module.exports = { calculate }; """) @@ -1380,7 +1380,7 @@ def test_class_method_discovery_via_class_import(self, js_support): source_file = tmpdir / "myclass.js" source_file.write_text(""" -class MyClass { +export class MyClass { methodA() { return 'A'; } methodB() { return 'B'; } } @@ -1416,7 +1416,7 @@ def test_nested_module_structure(self, js_support): source_file = src_dir / "helpers.js" source_file.write_text(""" -function deepHelper() { return 'deep'; } +export function deepHelper() { return 'deep'; } module.exports = { deepHelper }; """) @@ -1574,9 +1574,9 @@ def test_multiple_functions_same_file_different_tests(self, js_support): source_file = tmpdir / "multiple.js" source_file.write_text(""" -function addNumbers(a, b) { return a + b; } -function subtractNumbers(a, b) { return a - b; } -function multiplyNumbers(a, b) { return a * b; } +export function addNumbers(a, b) { return a + b; } +export function subtractNumbers(a, b) { return a - b; } +export function multiplyNumbers(a, b) { return a * b; } module.exports = { addNumbers, subtractNumbers, multiplyNumbers }; """) @@ -1613,7 +1613,7 @@ def test_test_in_wrong_describe_still_discovered(self, js_support): source_file = tmpdir / "funcs.js" source_file.write_text(""" -function targetFunc() { return 'target'; } +export function targetFunc() { return 'target'; } module.exports = { targetFunc }; """) @@ -1705,7 +1705,7 @@ def test_class_method_qualified_name(self, js_support): source_file = tmpdir / "calculator.js" source_file.write_text(""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } subtract(a, b) { return a - b; } } @@ -1726,7 +1726,7 @@ def test_nested_class_method(self, js_support): source_file = tmpdir / "nested.js" source_file.write_text(""" -class Outer { +export class Outer { innerMethod() { class Inner { deepMethod() { return 'deep'; } diff --git a/tests/test_languages/test_javascript_test_runner.py b/tests/test_languages/test_javascript_test_runner.py index 87e712038..905ef24a8 100644 --- a/tests/test_languages/test_javascript_test_runner.py +++ b/tests/test_languages/test_javascript_test_runner.py @@ -668,10 +668,10 @@ def test_create_codeflash_jest_config(self): assert result_path.exists() assert result_path.name == "jest.codeflash.config.js" - # Verify it contains the tsconfig reference + # Verify it contains ESM package transformation patterns content = result_path.read_text() - assert "tsconfig.codeflash.json" in content - assert "ts-jest" in content + assert "transformIgnorePatterns" in content + assert "node_modules" in content def test_get_jest_config_for_project_with_bundler(self): """Test that bundler projects get codeflash Jest config.""" diff --git a/tests/test_languages/test_js_code_extractor.py b/tests/test_languages/test_js_code_extractor.py index b1dcee81f..a21f15e2e 100644 --- a/tests/test_languages/test_js_code_extractor.py +++ b/tests/test_languages/test_js_code_extractor.py @@ -109,12 +109,7 @@ def test_extract_context_includes_direct_helpers(self, js_support, cjs_project): factorial_helper = helper_dict["factorial"] expected_factorial_code = """\ -/** - * Calculate factorial recursively. - * @param n - Non-negative integer - * @returns Factorial of n - */ -function factorial(n) { +export function factorial(n) { // Intentionally inefficient recursive implementation if (n <= 1) return 1; return n * factorial(n - 1); @@ -196,46 +191,22 @@ def test_extract_compound_interest_helpers(self, js_support, cjs_project): # STRICT: Verify each helper's code exactly expected_add_code = """\ -/** - * Add two numbers. - * @param a - First number - * @param b - Second number - * @returns Sum of a and b - */ -function add(a, b) { +export function add(a, b) { return a + b; }""" expected_multiply_code = """\ -/** - * Multiply two numbers. - * @param a - First number - * @param b - Second number - * @returns Product of a and b - */ -function multiply(a, b) { +export function multiply(a, b) { return a * b; }""" expected_format_number_code = """\ -/** - * Format a number to specified decimal places. - * @param num - Number to format - * @param decimals - Number of decimal places - * @returns Formatted number - */ -function formatNumber(num, decimals) { +export function formatNumber(num, decimals) { return Number(num.toFixed(decimals)); }""" expected_validate_input_code = """\ -/** - * Validate that input is a valid number. - * @param value - Value to validate - * @param name - Parameter name for error message - * @throws Error if value is not a valid number - */ -function validateInput(value, name) { +export function validateInput(value, name) { if (typeof value !== 'number' || isNaN(value)) { throw new Error(`Invalid ${name}: must be a number`); } @@ -317,13 +288,7 @@ class Calculator { assert set(helper_dict.keys()) == {"add"}, f"Expected 'add' helper, got: {list(helper_dict.keys())}" expected_add_code = """\ -/** - * Add two numbers. - * @param a - First number - * @param b - Second number - * @returns Sum of a and b - */ -function add(a, b) { +export function add(a, b) { return a + b; }""" @@ -702,7 +667,7 @@ def js_support(self): def test_standalone_function(self, js_support, tmp_path): """Test standalone function with no helpers.""" source = """\ -function standalone(x) { +export function standalone(x) { return x * 2; } @@ -718,7 +683,7 @@ def test_standalone_function(self, js_support, tmp_path): # STRICT: Exact code comparison expected_code = """\ -function standalone(x) { +export function standalone(x) { return x * 2; }""" assert context.target_code.strip() == expected_code.strip(), ( @@ -735,7 +700,7 @@ def test_external_package_excluded(self, js_support, tmp_path): source = """\ const _ = require('lodash'); -function processArray(arr) { +export function processArray(arr) { return _.map(arr, x => x * 2); } @@ -750,7 +715,7 @@ def test_external_package_excluded(self, js_support, tmp_path): context = js_support.extract_code_context(function=func, project_root=tmp_path, module_root=tmp_path) expected_code = """\ -function processArray(arr) { +export function processArray(arr) { return _.map(arr, x => x * 2); }""" @@ -769,7 +734,7 @@ def test_external_package_excluded(self, js_support, tmp_path): def test_recursive_function(self, js_support, tmp_path): """Test recursive function doesn't list itself as helper.""" source = """\ -function fibonacci(n) { +export function fibonacci(n) { if (n <= 1) return n; return fibonacci(n - 1) + fibonacci(n - 2); } @@ -786,7 +751,7 @@ def test_recursive_function(self, js_support, tmp_path): # STRICT: Exact code comparison expected_code = """\ -function fibonacci(n) { +export function fibonacci(n) { if (n <= 1) return n; return fibonacci(n - 1) + fibonacci(n - 2); }""" @@ -803,7 +768,7 @@ def test_arrow_function_helper(self, js_support, tmp_path): source = """\ const helper = (x) => x * 2; -const processValue = (value) => { +export const processValue = (value) => { return helper(value) + 1; }; @@ -818,7 +783,7 @@ def test_arrow_function_helper(self, js_support, tmp_path): context = js_support.extract_code_context(function=func, project_root=tmp_path, module_root=tmp_path) expected_code = """\ -const processValue = (value) => { +export const processValue = (value) => { return helper(value) + 1; };""" @@ -854,7 +819,7 @@ def ts_support(self): def test_method_extraction_includes_constructor(self, js_support, tmp_path): """Test that extracting a class method includes the constructor.""" source = """\ -class Counter { +export class Counter { constructor(initial = 0) { this.count = initial; } @@ -894,7 +859,7 @@ class Counter { def test_method_extraction_class_without_constructor(self, js_support, tmp_path): """Test extracting a method from a class that has no constructor.""" source = """\ -class MathUtils { +export class MathUtils { add(a, b) { return a + b; } @@ -928,7 +893,7 @@ class MathUtils { def test_typescript_method_extraction_includes_fields(self, ts_support, tmp_path): """Test that TypeScript method extraction includes class fields.""" source = """\ -class User { +export class User { private name: string; public age: number; @@ -941,8 +906,6 @@ class User { return this.name; } } - -export { User }; """ test_file = tmp_path / "user.ts" test_file.write_text(source) @@ -974,7 +937,7 @@ class User { def test_typescript_fields_only_no_constructor(self, ts_support, tmp_path): """Test TypeScript class with fields but no constructor.""" source = """\ -class Config { +export class Config { readonly apiUrl: string = "https://api.example.com"; timeout: number = 5000; @@ -982,8 +945,6 @@ class Config { return this.apiUrl; } } - -export { Config }; """ test_file = tmp_path / "config.ts" test_file.write_text(source) @@ -1010,7 +971,7 @@ class Config { def test_constructor_with_jsdoc(self, js_support, tmp_path): """Test that constructor with JSDoc is fully extracted.""" source = """\ -class Logger { +export class Logger { /** * Create a new Logger instance. * @param {string} prefix - The prefix to use for log messages. @@ -1056,7 +1017,7 @@ class Logger { def test_static_method_includes_constructor(self, js_support, tmp_path): """Test that static method extraction also includes constructor context.""" source = """\ -class Factory { +export class Factory { constructor(config) { this.config = config; } @@ -1212,13 +1173,11 @@ def test_extract_same_file_interface_from_parameter(self, ts_support, tmp_path): y: number; } -function distance(p1: Point, p2: Point): number { +export function distance(p1: Point, p2: Point): number { const dx = p2.x - p1.x; const dy = p2.y - p1.y; return Math.sqrt(dx * dx + dy * dy); } - -export { distance }; """ test_file = tmp_path / "geometry.ts" test_file.write_text(source) @@ -1251,7 +1210,7 @@ def test_extract_same_file_enum_from_parameter(self, ts_support, tmp_path): FAILURE = 'failure', } -function processStatus(status: Status): string { +export function processStatus(status: Status): string { switch (status) { case Status.PENDING: return 'Processing...'; @@ -1261,8 +1220,6 @@ def test_extract_same_file_enum_from_parameter(self, ts_support, tmp_path): return 'Failed!'; } } - -export { processStatus }; """ test_file = tmp_path / "status.ts" test_file.write_text(source) @@ -1295,11 +1252,9 @@ def test_extract_same_file_type_alias_from_return_type(self, ts_support, tmp_pat success: boolean; }; -function compute(x: number): Result { +export function compute(x: number): Result { return { value: x * 2, success: true }; } - -export { compute }; """ test_file = tmp_path / "compute.ts" test_file.write_text(source) @@ -1331,7 +1286,7 @@ def test_extract_class_field_types(self, ts_support, tmp_path): retries: number; } -class Service { +export class Service { private config: Config; constructor(config: Config) { @@ -1342,8 +1297,6 @@ class Service { return this.config.timeout; } } - -export { Service }; """ test_file = tmp_path / "service.ts" test_file.write_text(source) @@ -1372,11 +1325,9 @@ class Service { def test_primitive_types_not_included(self, ts_support, tmp_path): """Test that primitive types (number, string, etc.) are not extracted.""" source = """\ -function add(a: number, b: number): number { +export function add(a: number, b: number): number { return a + b; } - -export { add }; """ test_file = tmp_path / "add.ts" test_file.write_text(source) @@ -1405,11 +1356,9 @@ def test_extract_multiple_types(self, ts_support, tmp_path): height: number; } -function createRect(origin: Point, size: Size): { origin: Point; size: Size } { +export function createRect(origin: Point, size: Size): { origin: Point; size: Size } { return { origin, size }; } - -export { createRect }; """ test_file = tmp_path / "rect.ts" test_file.write_text(source) @@ -1447,7 +1396,7 @@ def test_extract_imported_type_definition(self, ts_support, ts_types_project): geometry_file.write_text("""\ import { Point, CalculationConfig } from './types'; -function calculateDistance(p1: Point, p2: Point, config: CalculationConfig): number { +export function calculateDistance(p1: Point, p2: Point, config: CalculationConfig): number { const dx = p2.x - p1.x; const dy = p2.y - p1.y; const distance = Math.sqrt(dx * dx + dy * dy); @@ -1458,8 +1407,6 @@ def test_extract_imported_type_definition(self, ts_support, ts_types_project): } return distance; } - -export { calculateDistance }; """) functions = ts_support.discover_functions(geometry_file) @@ -1506,11 +1453,9 @@ def test_type_with_jsdoc_included(self, ts_support, tmp_path): name: string; } -function greetUser(user: User): string { +export function greetUser(user: User): string { return `Hello, ${user.name}!`; } - -export { greetUser }; """ test_file = tmp_path / "user.ts" test_file.write_text(source) diff --git a/tests/test_languages/test_js_code_replacer.py b/tests/test_languages/test_js_code_replacer.py index c5b2cc001..9e251804a 100644 --- a/tests/test_languages/test_js_code_replacer.py +++ b/tests/test_languages/test_js_code_replacer.py @@ -757,7 +757,7 @@ class TestSimpleFunctionReplacement: def test_replace_simple_function_body(self, js_support, temp_project): """Test replacing a simple function body preserves structure exactly.""" original_source = """\ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -769,7 +769,7 @@ def test_replace_simple_function_body(self, js_support, temp_project): # Optimized version with different body optimized_code = """\ -function add(a, b) { +export function add(a, b) { // Optimized: direct return return a + b; } @@ -778,7 +778,7 @@ def test_replace_simple_function_body(self, js_support, temp_project): result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function add(a, b) { +export function add(a, b) { // Optimized: direct return return a + b; } @@ -789,7 +789,7 @@ def test_replace_simple_function_body(self, js_support, temp_project): def test_replace_function_with_multiple_statements(self, js_support, temp_project): """Test replacing function with complex multi-statement body.""" original_source = """\ -function processData(data) { +export function processData(data) { const result = []; for (let i = 0; i < data.length; i++) { result.push(data[i] * 2); @@ -805,7 +805,7 @@ def test_replace_function_with_multiple_statements(self, js_support, temp_projec # Optimized version using map optimized_code = """\ -function processData(data) { +export function processData(data) { return data.map(x => x * 2); } """ @@ -813,7 +813,7 @@ def test_replace_function_with_multiple_statements(self, js_support, temp_projec result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function processData(data) { +export function processData(data) { return data.map(x => x * 2); } """ @@ -825,12 +825,12 @@ def test_replace_preserves_surrounding_code(self, js_support, temp_project): original_source = """\ const CONFIG = { debug: true }; -function targetFunction(x) { +export function targetFunction(x) { console.log(x); return x * 2; } -function otherFunction(y) { +export function otherFunction(y) { return y + 1; } @@ -843,7 +843,7 @@ def test_replace_preserves_surrounding_code(self, js_support, temp_project): target_func = next(f for f in functions if f.function_name == "targetFunction") optimized_code = """\ -function targetFunction(x) { +export function targetFunction(x) { return x << 1; } """ @@ -853,11 +853,11 @@ def test_replace_preserves_surrounding_code(self, js_support, temp_project): expected_result = """\ const CONFIG = { debug: true }; -function targetFunction(x) { +export function targetFunction(x) { return x << 1; } -function otherFunction(y) { +export function otherFunction(y) { return y + 1; } @@ -873,7 +873,7 @@ class TestClassMethodReplacement: def test_replace_class_method_body(self, js_support, temp_project): """Test replacing a class method body preserves class structure.""" original_source = """\ -class Calculator { +export class Calculator { constructor(precision = 2) { this.precision = precision; } @@ -896,7 +896,7 @@ class Calculator { # Optimized version provided in class context optimized_code = """\ -class Calculator { +export class Calculator { constructor(precision = 2) { this.precision = precision; } @@ -910,7 +910,7 @@ class Calculator { result = js_support.replace_function(original_source, add_method, optimized_code) expected_result = """\ -class Calculator { +export class Calculator { constructor(precision = 2) { this.precision = precision; } @@ -930,7 +930,7 @@ class Calculator { def test_replace_method_calling_sibling_methods(self, js_support, temp_project): """Test replacing method that calls other methods in same class.""" original_source = """\ -class DataProcessor { +export class DataProcessor { constructor() { this.cache = new Map(); } @@ -958,7 +958,7 @@ class DataProcessor { process_method = next(f for f in functions if f.function_name == "process") optimized_code = """\ -class DataProcessor { +export class DataProcessor { constructor() { this.cache = new Map(); } @@ -975,7 +975,7 @@ class DataProcessor { result = js_support.replace_function(original_source, process_method, optimized_code) expected_result = """\ -class DataProcessor { +export class DataProcessor { constructor() { this.cache = new Map(); } @@ -1008,7 +1008,7 @@ def test_replace_preserves_jsdoc_above_function(self, js_support, temp_project): * @param {number} b - Second number * @returns {number} The sum */ -function add(a, b) { +export function add(a, b) { const sum = a + b; return sum; } @@ -1020,13 +1020,7 @@ def test_replace_preserves_jsdoc_above_function(self, js_support, temp_project): func = functions[0] optimized_code = """\ -/** - * Calculates the sum of two numbers. - * @param {number} a - First number - * @param {number} b - Second number - * @returns {number} The sum - */ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -1040,7 +1034,7 @@ def test_replace_preserves_jsdoc_above_function(self, js_support, temp_project): * @param {number} b - Second number * @returns {number} The sum */ -function add(a, b) { +export function add(a, b) { return a + b; } """ @@ -1054,7 +1048,7 @@ def test_replace_class_method_with_jsdoc(self, js_support, temp_project): * A simple cache implementation. * @class Cache */ -class Cache { +export class Cache { constructor() { this.data = new Map(); } @@ -1103,7 +1097,7 @@ class Cache { * A simple cache implementation. * @class Cache */ -class Cache { +export class Cache { constructor() { this.data = new Map(); } @@ -1128,7 +1122,7 @@ class TestAsyncFunctionReplacement: def test_replace_async_function_body(self, js_support, temp_project): """Test replacing async function preserves async keyword.""" original_source = """\ -async function fetchData(url) { +export async function fetchData(url) { const response = await fetch(url); const data = await response.json(); return data; @@ -1141,7 +1135,7 @@ def test_replace_async_function_body(self, js_support, temp_project): func = functions[0] optimized_code = """\ -async function fetchData(url) { +export async function fetchData(url) { return (await fetch(url)).json(); } """ @@ -1149,7 +1143,7 @@ def test_replace_async_function_body(self, js_support, temp_project): result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -async function fetchData(url) { +export async function fetchData(url) { return (await fetch(url)).json(); } """ @@ -1159,7 +1153,7 @@ def test_replace_async_function_body(self, js_support, temp_project): def test_replace_async_class_method(self, js_support, temp_project): """Test replacing async class method.""" original_source = """\ -class ApiClient { +export class ApiClient { constructor(baseUrl) { this.baseUrl = baseUrl; } @@ -1198,7 +1192,7 @@ class ApiClient { result = js_support.replace_function(original_source, get_method, optimized_code) expected_result = """\ -class ApiClient { +export class ApiClient { constructor(baseUrl) { this.baseUrl = baseUrl; } @@ -1220,7 +1214,7 @@ class TestGeneratorFunctionReplacement: def test_replace_generator_function_body(self, js_support, temp_project): """Test replacing generator function preserves generator syntax.""" original_source = """\ -function* range(start, end) { +export function* range(start, end) { for (let i = start; i < end; i++) { yield i; } @@ -1233,7 +1227,7 @@ def test_replace_generator_function_body(self, js_support, temp_project): func = functions[0] optimized_code = """\ -function* range(start, end) { +export function* range(start, end) { let i = start; while (i < end) yield i++; } @@ -1242,7 +1236,7 @@ def test_replace_generator_function_body(self, js_support, temp_project): result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function* range(start, end) { +export function* range(start, end) { let i = start; while (i < end) yield i++; } @@ -1257,7 +1251,7 @@ class TestTypeScriptReplacement: def test_replace_typescript_function_with_types(self, ts_support, temp_project): """Test replacing TypeScript function preserves type annotations.""" original_source = """\ -function processArray(items: number[]): number { +export function processArray(items: number[]): number { let sum = 0; for (let i = 0; i < items.length; i++) { sum += items[i]; @@ -1272,7 +1266,7 @@ def test_replace_typescript_function_with_types(self, ts_support, temp_project): func = functions[0] optimized_code = """\ -function processArray(items: number[]): number { +export function processArray(items: number[]): number { return items.reduce((a, b) => a + b, 0); } """ @@ -1280,7 +1274,7 @@ def test_replace_typescript_function_with_types(self, ts_support, temp_project): result = ts_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function processArray(items: number[]): number { +export function processArray(items: number[]): number { return items.reduce((a, b) => a + b, 0); } """ @@ -1290,7 +1284,7 @@ def test_replace_typescript_function_with_types(self, ts_support, temp_project): def test_replace_typescript_class_method_with_generics(self, ts_support, temp_project): """Test replacing TypeScript generic class method.""" original_source = """\ -class Container { +export class Container { private items: T[] = []; add(item: T): void { @@ -1325,7 +1319,7 @@ class Container { result = ts_support.replace_function(original_source, get_all_method, optimized_code) expected_result = """\ -class Container { +export class Container { private items: T[] = []; add(item: T): void { @@ -1349,7 +1343,7 @@ def test_replace_typescript_interface_typed_function(self, ts_support, temp_proj email: string; } -function createUser(name: string, email: string): User { +export function createUser(name: string, email: string): User { const id = Math.random().toString(36).substring(2, 15); const user: User = { id: id, @@ -1366,7 +1360,7 @@ def test_replace_typescript_interface_typed_function(self, ts_support, temp_proj func = next(f for f in functions if f.function_name == "createUser") optimized_code = """\ -function createUser(name: string, email: string): User { +export function createUser(name: string, email: string): User { return { id: Math.random().toString(36).substring(2, 15), name, @@ -1384,7 +1378,7 @@ def test_replace_typescript_interface_typed_function(self, ts_support, temp_proj email: string; } -function createUser(name: string, email: string): User { +export function createUser(name: string, email: string): User { return { id: Math.random().toString(36).substring(2, 15), name, @@ -1402,7 +1396,7 @@ class TestComplexReplacements: def test_replace_function_with_nested_functions(self, js_support, temp_project): """Test replacing function that contains nested function definitions.""" original_source = """\ -function processItems(items) { +export function processItems(items) { function helper(item) { return item * 2; } @@ -1421,7 +1415,7 @@ def test_replace_function_with_nested_functions(self, js_support, temp_project): process_func = next(f for f in functions if f.function_name == "processItems") optimized_code = """\ -function processItems(items) { +export function processItems(items) { const helper = x => x * 2; return items.map(helper); } @@ -1430,7 +1424,7 @@ def test_replace_function_with_nested_functions(self, js_support, temp_project): result = js_support.replace_function(original_source, process_func, optimized_code) expected_result = """\ -function processItems(items) { +export function processItems(items) { const helper = x => x * 2; return items.map(helper); } @@ -1441,7 +1435,7 @@ def test_replace_function_with_nested_functions(self, js_support, temp_project): def test_replace_multiple_methods_sequentially(self, js_support, temp_project): """Test replacing multiple methods in the same class sequentially.""" original_source = """\ -class MathUtils { +export class MathUtils { static sum(arr) { let total = 0; for (let i = 0; i < arr.length; i++) { @@ -1478,7 +1472,7 @@ class MathUtils { result = js_support.replace_function(original_source, sum_method, optimized_sum) expected_after_first = """\ -class MathUtils { +export class MathUtils { static sum(arr) { return arr.reduce((a, b) => a + b, 0); } @@ -1499,7 +1493,7 @@ class MathUtils { def test_replace_function_with_complex_destructuring(self, js_support, temp_project): """Test replacing function with complex parameter destructuring.""" original_source = """\ -function processConfig({ server: { host, port }, database: { url, poolSize } }) { +export function processConfig({ server: { host, port }, database: { url, poolSize } }) { const serverUrl = host + ':' + port; const dbConnection = url + '?poolSize=' + poolSize; return { @@ -1515,7 +1509,7 @@ def test_replace_function_with_complex_destructuring(self, js_support, temp_proj func = functions[0] optimized_code = """\ -function processConfig({ server: { host, port }, database: { url, poolSize } }) { +export function processConfig({ server: { host, port }, database: { url, poolSize } }) { return { server: `${host}:${port}`, db: `${url}?poolSize=${poolSize}` @@ -1526,7 +1520,7 @@ def test_replace_function_with_complex_destructuring(self, js_support, temp_proj result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function processConfig({ server: { host, port }, database: { url, poolSize } }) { +export function processConfig({ server: { host, port }, database: { url, poolSize } }) { return { server: `${host}:${port}`, db: `${url}?poolSize=${poolSize}` @@ -1543,7 +1537,7 @@ class TestEdgeCases: def test_replace_minimal_function_body(self, js_support, temp_project): """Test replacing function with minimal body.""" original_source = """\ -function minimal() { +export function minimal() { return null; } """ @@ -1554,7 +1548,7 @@ def test_replace_minimal_function_body(self, js_support, temp_project): func = functions[0] optimized_code = """\ -function minimal() { +export function minimal() { return { initialized: true, timestamp: Date.now() }; } """ @@ -1562,7 +1556,7 @@ def test_replace_minimal_function_body(self, js_support, temp_project): result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function minimal() { +export function minimal() { return { initialized: true, timestamp: Date.now() }; } """ @@ -1572,7 +1566,7 @@ def test_replace_minimal_function_body(self, js_support, temp_project): def test_replace_single_line_function(self, js_support, temp_project): """Test replacing single-line function.""" original_source = """\ -function identity(x) { return x; } +export function identity(x) { return x; } """ file_path = temp_project / "utils.js" file_path.write_text(original_source, encoding="utf-8") @@ -1581,13 +1575,13 @@ def test_replace_single_line_function(self, js_support, temp_project): func = functions[0] optimized_code = """\ -function identity(x) { return x ?? null; } +export function identity(x) { return x ?? null; } """ result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function identity(x) { return x ?? null; } +export function identity(x) { return x ?? null; } """ assert result == expected_result assert js_support.validate_syntax(result) is True @@ -1595,7 +1589,7 @@ def test_replace_single_line_function(self, js_support, temp_project): def test_replace_function_with_special_characters_in_strings(self, js_support, temp_project): """Test replacing function containing special characters in strings.""" original_source = """\ -function formatMessage(name) { +export function formatMessage(name) { const greeting = 'Hello, ' + name + '!'; const special = "Contains \\"quotes\\" and \\n newlines"; return greeting + ' ' + special; @@ -1608,7 +1602,7 @@ def test_replace_function_with_special_characters_in_strings(self, js_support, t func = functions[0] optimized_code = """\ -function formatMessage(name) { +export function formatMessage(name) { return `Hello, ${name}! Contains "quotes" and newlines`; } @@ -1617,7 +1611,7 @@ def test_replace_function_with_special_characters_in_strings(self, js_support, t result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function formatMessage(name) { +export function formatMessage(name) { return `Hello, ${name}! Contains "quotes" and newlines`; } @@ -1628,7 +1622,7 @@ def test_replace_function_with_special_characters_in_strings(self, js_support, t def test_replace_function_with_regex(self, js_support, temp_project): """Test replacing function containing regex patterns.""" original_source = """\ -function validateEmail(email) { +export function validateEmail(email) { const pattern = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$/; if (pattern.test(email)) { return true; @@ -1643,7 +1637,7 @@ def test_replace_function_with_regex(self, js_support, temp_project): func = functions[0] optimized_code = """\ -function validateEmail(email) { +export function validateEmail(email) { return /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$/.test(email); } """ @@ -1651,7 +1645,7 @@ def test_replace_function_with_regex(self, js_support, temp_project): result = js_support.replace_function(original_source, func, optimized_code) expected_result = """\ -function validateEmail(email) { +export function validateEmail(email) { return /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$/.test(email); } """ @@ -1665,11 +1659,11 @@ class TestModuleExportHandling: def test_replace_exported_function_commonjs(self, js_support, temp_project): """Test replacing function in CommonJS module preserves exports.""" original_source = """\ -function helper(x) { +export function helper(x) { return x * 2; } -function main(data) { +export function main(data) { const results = []; for (let i = 0; i < data.length; i++) { results.push(helper(data[i])); @@ -1686,7 +1680,7 @@ def test_replace_exported_function_commonjs(self, js_support, temp_project): main_func = next(f for f in functions if f.function_name == "main") optimized_code = """\ -function main(data) { +export function main(data) { return data.map(helper); } """ @@ -1694,11 +1688,11 @@ def test_replace_exported_function_commonjs(self, js_support, temp_project): result = js_support.replace_function(original_source, main_func, optimized_code) expected_result = """\ -function helper(x) { +export function helper(x) { return x * 2; } -function main(data) { +export function main(data) { return data.map(helper); } @@ -1757,18 +1751,18 @@ def test_all_replacements_produce_valid_syntax(self, js_support, temp_project): test_cases = [ # (original, optimized, description) ( - "function f(x) { return x + 1; }", - "function f(x) { return ++x; }", + "export function f(x) { return x + 1; }", + "export function f(x) { return ++x; }", "increment replacement" ), ( - "function f(arr) { return arr.length > 0; }", - "function f(arr) { return !!arr.length; }", + "export function f(arr) { return arr.length > 0; }", + "export function f(arr) { return !!arr.length; }", "boolean conversion" ), ( - "function f(a, b) { if (a) { return a; } return b; }", - "function f(a, b) { return a || b; }", + "export function f(a, b) { if (a) { return a; } return b; }", + "export function f(a, b) { return a || b; }", "logical OR replacement" ), ] diff --git a/tests/test_languages/test_language_parity.py b/tests/test_languages/test_language_parity.py index ae57eb426..2b2035c84 100644 --- a/tests/test_languages/test_language_parity.py +++ b/tests/test_languages/test_language_parity.py @@ -38,7 +38,7 @@ def add(a, b): return a + b """, javascript=""" -function add(a, b) { +export function add(a, b) { return a + b; } """, @@ -58,15 +58,15 @@ def multiply(a, b): return a * b """, javascript=""" -function add(a, b) { +export function add(a, b) { return a + b; } -function subtract(a, b) { +export function subtract(a, b) { return a - b; } -function multiply(a, b) { +export function multiply(a, b) { return a * b; } """, @@ -83,11 +83,11 @@ def without_return(): print("hello") """, javascript=""" -function withReturn() { +export function withReturn() { return 1; } -function withoutReturn() { +export function withoutReturn() { console.log("hello"); } """, @@ -105,7 +105,7 @@ def multiply(self, a, b): return a * b """, javascript=""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -128,11 +128,11 @@ def sync_function(): return 1 """, javascript=""" -async function fetchData(url) { +export async function fetchData(url) { return await fetch(url); } -function syncFunction() { +export function syncFunction() { return 1; } """, @@ -148,7 +148,7 @@ def inner(): return inner() """, javascript=""" -function outer() { +export function outer() { function inner() { return 1; } @@ -167,7 +167,7 @@ def helper(x): return x * 2 """, javascript=""" -class Utils { +export class Utils { static helper(x) { return x * 2; } @@ -194,7 +194,7 @@ def standalone(): return 42 """, javascript=""" -class Calculator { +export class Calculator { add(a, b) { return a + b; } @@ -204,13 +204,13 @@ class Calculator { } } -class StringUtils { +export class StringUtils { reverse(s) { return s.split('').reverse().join(''); } } -function standalone() { +export function standalone() { return 42; } """, @@ -227,11 +227,11 @@ def sync_func(): return 2 """, javascript=""" -async function asyncFunc() { +export async function asyncFunc() { return 1; } -function syncFunc() { +export function syncFunc() { return 2; } """, @@ -249,11 +249,11 @@ def method(self): return 2 """, javascript=""" -function standalone() { +export function standalone() { return 1; } -class MyClass { +export class MyClass { method() { return 2; } @@ -906,7 +906,7 @@ def test_discover_and_replace_workflow(self, python_support, js_support): return n return fibonacci(n - 1) + fibonacci(n - 2) """ - js_original = """function fibonacci(n) { + js_original = """export function fibonacci(n) { if (n <= 1) { return n; } @@ -933,7 +933,7 @@ def test_discover_and_replace_workflow(self, python_support, js_support): memo[i] = memo[i-1] + memo[i-2] return memo[n] """ - js_optimized = """function fibonacci(n) { + js_optimized = """export function fibonacci(n) { // Memoized version const memo = {0: 0, 1: 1}; for (let i = 2; i <= n; i++) { @@ -994,13 +994,13 @@ def test_function_info_fields_populated(self, python_support, js_support): def test_arrow_functions_unique_to_js(self, js_support): """JavaScript arrow functions should be discovered (no Python equivalent).""" js_code = """ -const add = (a, b) => { +export const add = (a, b) => { return a + b; }; -const multiply = (x, y) => x * y; +export const multiply = (x, y) => x * y; -const identity = x => x; +export const identity = x => x; """ js_file = write_temp_file(js_code, ".js") funcs = js_support.discover_functions(js_file) @@ -1021,7 +1021,7 @@ def number_generator(): return 3 """ js_code = """ -function* numberGenerator() { +export function* numberGenerator() { yield 1; yield 2; return 3; @@ -1065,11 +1065,11 @@ def multi_decorated(): def test_function_expressions_js(self, js_support): """JavaScript function expressions should be discovered.""" js_code = """ -const add = function(a, b) { +export const add = function(a, b) { return a + b; }; -const namedExpr = function myFunc(x) { +export const namedExpr = function myFunc(x) { return x * 2; }; """ @@ -1132,7 +1132,7 @@ def greeting(): return "Hello, 世界! 🌍" """ js_code = """ -function greeting() { +export function greeting() { return "Hello, 世界! 🌍"; } """ diff --git a/tests/test_languages/test_typescript_code_extraction.py b/tests/test_languages/test_typescript_code_extraction.py index f97049943..b344a2492 100644 --- a/tests/test_languages/test_typescript_code_extraction.py +++ b/tests/test_languages/test_typescript_code_extraction.py @@ -119,7 +119,7 @@ def test_extract_simple_function(self, ts_support): """Test extracting code context for a simple function.""" with tempfile.NamedTemporaryFile(suffix=".ts", mode="w", delete=False) as f: f.write(""" -function add(a: number, b: number): number { +export function add(a: number, b: number): number { return a + b; } """) @@ -147,7 +147,7 @@ def test_extract_async_function_with_template_literal(self, ts_support): const command_args = process.argv.slice(3); -async function execMongoEval(queryExpression, appsmithMongoURI) { +export async function execMongoEval(queryExpression, appsmithMongoURI) { queryExpression = queryExpression.trim(); if (command_args.includes("--pretty")) { @@ -186,7 +186,7 @@ def test_extract_function_with_complex_try_catch(self, ts_support): import fsPromises from "fs/promises"; import path from "path"; -async function figureOutContentsPath(root: string): Promise { +export async function figureOutContentsPath(root: string): Promise { const subfolders = await fsPromises.readdir(root, { withFileTypes: true }); try { @@ -238,7 +238,7 @@ def test_extracted_code_includes_imports(self, ts_support): import fs from "fs"; import path from "path"; -function readConfig(filename: string): string { +export function readConfig(filename: string): string { const fullPath = path.join(__dirname, filename); return fs.readFileSync(fullPath, "utf8"); } @@ -264,7 +264,7 @@ def test_extracted_code_includes_global_variables(self, ts_support): const CONFIG = { timeout: 5000 }; const MAX_RETRIES = 3; -async function fetchWithRetry(url: string): Promise { +export async function fetchWithRetry(url: string): Promise { for (let i = 0; i < MAX_RETRIES; i++) { try { const response = await fetch(url, { signal: AbortSignal.timeout(CONFIG.timeout) }); @@ -289,6 +289,164 @@ def test_extracted_code_includes_global_variables(self, ts_support): assert ts_support.validate_syntax(code_context.target_code) is True +class TestSameClassHelperExtraction: + """Tests for same-class helper method extraction. + + When a class method calls other methods from the same class, those helper + methods should be included inside the class wrapper (not appended outside), + because they may use class-specific syntax like 'private'. + """ + + def test_private_helper_method_inside_class_wrapper(self, ts_support): + """Test that private helper methods are included inside the class wrapper.""" + with tempfile.NamedTemporaryFile(suffix=".ts", mode="w", delete=False) as f: + # Export the class and add return statements so discover_functions finds the methods + f.write(""" +export class EndpointGroup { + private endpoints: any[] = []; + + constructor() { + this.endpoints = []; + } + + post(path: string, handler: Function): EndpointGroup { + this.addEndpoint("POST", path, handler); + return this; + } + + private addEndpoint(method: string, path: string, handler: Function): void { + this.endpoints.push({ method, path, handler }); + return; + } +} +""") + f.flush() + file_path = Path(f.name) + + # Discover the 'post' method + functions = ts_support.discover_functions(file_path) + post_method = None + for func in functions: + if func.function_name == "post": + post_method = func + break + + assert post_method is not None, "post method should be discovered" + + # Extract code context + code_context = ts_support.extract_code_context( + post_method, file_path.parent, file_path.parent + ) + + # The extracted code should be syntactically valid + assert ts_support.validate_syntax(code_context.target_code) is True, ( + f"Extracted code should be valid TypeScript:\n{code_context.target_code}" + ) + + # Both post and addEndpoint should be inside the class + assert "class EndpointGroup" in code_context.target_code + assert "post(" in code_context.target_code + assert "private addEndpoint" in code_context.target_code + + # The private method should be inside the class, not outside + # Check that addEndpoint appears BEFORE the closing brace of the class + class_end_index = code_context.target_code.rfind("}") + add_endpoint_index = code_context.target_code.find("addEndpoint") + assert add_endpoint_index < class_end_index, ( + "addEndpoint should be inside the class wrapper" + ) + + def test_multiple_private_helpers_inside_class(self, ts_support): + """Test that multiple private helpers are all included inside the class.""" + with tempfile.NamedTemporaryFile(suffix=".ts", mode="w", delete=False) as f: + f.write(""" +export class Router { + private routes: Map = new Map(); + + addRoute(path: string, handler: Function): boolean { + const normalizedPath = this.normalizePath(path); + this.validatePath(normalizedPath); + this.routes.set(normalizedPath, handler); + return true; + } + + private normalizePath(path: string): string { + return path.toLowerCase().trim(); + } + + private validatePath(path: string): boolean { + if (!path.startsWith("/")) { + throw new Error("Path must start with /"); + } + return true; + } +} +""") + f.flush() + file_path = Path(f.name) + + # Discover the 'addRoute' method + functions = ts_support.discover_functions(file_path) + add_route_method = None + for func in functions: + if func.function_name == "addRoute": + add_route_method = func + break + + assert add_route_method is not None + + code_context = ts_support.extract_code_context( + add_route_method, file_path.parent, file_path.parent + ) + + # Should be valid TypeScript + assert ts_support.validate_syntax(code_context.target_code) is True + + # All methods should be inside the class + assert "private normalizePath" in code_context.target_code + assert "private validatePath" in code_context.target_code + + def test_same_class_helpers_filtered_from_helper_list(self, ts_support): + """Test that same-class helpers are not duplicated in the helpers list.""" + with tempfile.NamedTemporaryFile(suffix=".ts", mode="w", delete=False) as f: + f.write(""" +export class Calculator { + add(a: number, b: number): number { + return this.compute(a, b, "+"); + } + + private compute(a: number, b: number, op: string): number { + if (op === "+") return a + b; + return 0; + } +} +""") + f.flush() + file_path = Path(f.name) + + functions = ts_support.discover_functions(file_path) + add_method = None + for func in functions: + if func.function_name == "add": + add_method = func + break + + assert add_method is not None + + code_context = ts_support.extract_code_context( + add_method, file_path.parent, file_path.parent + ) + + # 'compute' should be in target_code (inside class) + assert "compute" in code_context.target_code + + # 'compute' should NOT be in helper_functions (would be duplicate) + helper_names = [h.name for h in code_context.helper_functions] + assert "compute" not in helper_names, ( + "Same-class helper 'compute' should not be in helper_functions list" + ) + + class TestTypeScriptLanguageProperties: """Tests for TypeScript language support properties.""" diff --git a/tests/test_languages/test_typescript_e2e.py b/tests/test_languages/test_typescript_e2e.py index 199094a1d..a638f01a1 100644 --- a/tests/test_languages/test_typescript_e2e.py +++ b/tests/test_languages/test_typescript_e2e.py @@ -285,7 +285,7 @@ def test_function_to_optimize_has_correct_fields(self): with tempfile.NamedTemporaryFile(suffix=".ts", mode="w", delete=False) as f: f.write(""" -class Calculator { +export class Calculator { add(a: number, b: number): number { return a + b; } @@ -295,7 +295,7 @@ class Calculator { } } -function standalone(x: number): number { +export function standalone(x: number): number { return x * 2; } """) diff --git a/tiles/codeflash-docs/docs/ai-service.md b/tiles/codeflash-docs/docs/ai-service.md new file mode 100644 index 000000000..4197a97d0 --- /dev/null +++ b/tiles/codeflash-docs/docs/ai-service.md @@ -0,0 +1,108 @@ +# AI Service + +How codeflash communicates with the AI optimization backend. + +## `AiServiceClient` (`api/aiservice.py`) + +The client connects to the AI service at `https://app.codeflash.ai` (or `http://localhost:8000` when `CODEFLASH_AIS_SERVER=local`). + +Authentication uses Bearer token from `get_codeflash_api_key()`. All requests go through `make_ai_service_request()` which handles JSON serialization via Pydantic encoder. + +Timeout: 90s for production, 300s for local. + +## Endpoints + +### `/ai/optimize` — Generate Candidates + +Method: `optimize_code()` + +Sends source code + dependency context to generate optimization candidates. + +Payload: +- `source_code` — The read-writable code (markdown format) +- `dependency_code` — Read-only context code +- `trace_id` — Unique trace ID for the optimization run +- `language` — `"python"`, `"javascript"`, or `"typescript"` +- `n_candidates` — Number of candidates to generate (controlled by effort level) +- `is_async` — Whether the function is async +- `is_numerical_code` — Whether the code is numerical (affects optimization strategy) + +Returns: `list[OptimizedCandidate]` with `source=OptimizedCandidateSource.OPTIMIZE` + +### `/ai/optimize_line_profiler` — Line-Profiler-Guided Candidates + +Method: `optimize_python_code_line_profiler()` + +Like `/optimize` but includes `line_profiler_results` to guide the LLM toward hot lines. + +Returns: candidates with `source=OptimizedCandidateSource.OPTIMIZE_LP` + +### `/ai/refine` — Refine Existing Candidate + +Method: `refine_code()` + +Request type: `AIServiceRefinerRequest` + +Sends an existing candidate with runtime data and line profiler results to generate an improved version. + +Key fields: +- `original_source_code` / `optimized_source_code` — Before and after +- `original_code_runtime` / `optimized_code_runtime` — Timing data +- `speedup` — Current speedup ratio +- `original_line_profiler_results` / `optimized_line_profiler_results` + +Returns: candidates with `source=OptimizedCandidateSource.REFINE` and `parent_id` set to the refined candidate's ID + +### `/ai/repair` — Fix Failed Candidate + +Method: `repair_code()` + +Request type: `AIServiceCodeRepairRequest` + +Sends a failed candidate with test diffs showing what went wrong. + +Key fields: +- `original_source_code` / `modified_source_code` +- `test_diffs: list[TestDiff]` — Each with `scope` (return_value/stdout/did_pass), original vs candidate values, and test source code + +Returns: candidates with `source=OptimizedCandidateSource.REPAIR` and `parent_id` set + +### `/ai/adaptive_optimize` — Multi-Candidate Adaptive + +Method: `adaptive_optimize()` + +Request type: `AIServiceAdaptiveOptimizeRequest` + +Sends multiple previous candidates with their speedups for the LLM to learn from and generate better candidates. + +Key fields: +- `candidates: list[AdaptiveOptimizedCandidate]` — Previous candidates with source code, explanation, source type, and speedup + +Returns: candidates with `source=OptimizedCandidateSource.ADAPTIVE` + +### `/ai/rewrite_jit` — JIT Rewrite + +Method: `get_jit_rewritten_code()` + +Rewrites code to use JIT compilation (e.g., Numba). + +Returns: candidates with `source=OptimizedCandidateSource.JIT_REWRITE` + +## Candidate Parsing + +All endpoints return JSON with an `optimizations` array. Each entry has: +- `source_code` — Markdown-formatted code blocks +- `explanation` — LLM explanation +- `optimization_id` — Unique ID +- `parent_id` — Optional parent reference +- `model` — Which LLM model was used + +`_get_valid_candidates()` parses the markdown code via `CodeStringsMarkdown.parse_markdown_code()` and filters out entries with empty code blocks. + +## `LocalAiServiceClient` + +Used when `CODEFLASH_EXPERIMENT_ID` is set. Mirrors `AiServiceClient` but sends to a separate experimental endpoint for A/B testing optimization strategies. + +## LLM Call Sequencing + +`AiServiceClient` tracks call sequence via `llm_call_counter` (itertools.count). Each request includes a `call_sequence` number, used by the backend to maintain conversation context across multiple calls for the same function. diff --git a/tiles/codeflash-docs/docs/configuration.md b/tiles/codeflash-docs/docs/configuration.md new file mode 100644 index 000000000..32dd8d53d --- /dev/null +++ b/tiles/codeflash-docs/docs/configuration.md @@ -0,0 +1,79 @@ +# Configuration + +Key configuration constants, effort levels, and thresholds. + +## Constants (`code_utils/config_consts.py`) + +### Test Execution + +| Constant | Value | Description | +|----------|-------|-------------| +| `MAX_TEST_RUN_ITERATIONS` | 5 | Maximum test loop iterations | +| `INDIVIDUAL_TESTCASE_TIMEOUT` | 15s | Timeout per individual test case | +| `MAX_FUNCTION_TEST_SECONDS` | 60s | Max total time for function testing | +| `MAX_TEST_FUNCTION_RUNS` | 50 | Max test function executions | +| `MAX_CUMULATIVE_TEST_RUNTIME_NANOSECONDS` | 100ms | Max cumulative test runtime | +| `TOTAL_LOOPING_TIME` | 10s | Candidate benchmarking budget | +| `MIN_TESTCASE_PASSED_THRESHOLD` | 6 | Minimum test cases that must pass | + +### Performance Thresholds + +| Constant | Value | Description | +|----------|-------|-------------| +| `MIN_IMPROVEMENT_THRESHOLD` | 0.05 (5%) | Minimum speedup to accept a candidate | +| `MIN_THROUGHPUT_IMPROVEMENT_THRESHOLD` | 0.10 (10%) | Minimum async throughput improvement | +| `MIN_CONCURRENCY_IMPROVEMENT_THRESHOLD` | 0.20 (20%) | Minimum concurrency ratio improvement | +| `COVERAGE_THRESHOLD` | 60.0% | Minimum test coverage | + +### Stability Thresholds + +| Constant | Value | Description | +|----------|-------|-------------| +| `STABILITY_WINDOW_SIZE` | 0.35 | 35% of total iteration window | +| `STABILITY_CENTER_TOLERANCE` | 0.0025 | ±0.25% around median | +| `STABILITY_SPREAD_TOLERANCE` | 0.0025 | 0.25% window spread | + +### Context Limits + +| Constant | Value | Description | +|----------|-------|-------------| +| `OPTIMIZATION_CONTEXT_TOKEN_LIMIT` | 16000 | Max tokens for optimization context | +| `TESTGEN_CONTEXT_TOKEN_LIMIT` | 16000 | Max tokens for test generation context | +| `MAX_CONTEXT_LEN_REVIEW` | 1000 | Max context length for optimization review | + +### Other + +| Constant | Value | Description | +|----------|-------|-------------| +| `MIN_CORRECT_CANDIDATES` | 2 | Min correct candidates before skipping repair | +| `REPEAT_OPTIMIZATION_PROBABILITY` | 0.1 | Probability of re-optimizing a function | +| `DEFAULT_IMPORTANCE_THRESHOLD` | 0.001 | Minimum addressable time to consider a function | +| `CONCURRENCY_FACTOR` | 10 | Number of concurrent executions for concurrency benchmark | +| `REFINED_CANDIDATE_RANKING_WEIGHTS` | (2, 1) | (runtime, diff) weights — runtime 2x more important | + +## Effort Levels + +`EffortLevel` enum: `LOW`, `MEDIUM`, `HIGH` + +Effort controls the number of candidates, repairs, and refinements: + +| Key | LOW | MEDIUM | HIGH | +|-----|-----|--------|------| +| `N_OPTIMIZER_CANDIDATES` | 3 | 5 | 6 | +| `N_OPTIMIZER_LP_CANDIDATES` | 4 | 6 | 7 | +| `N_GENERATED_TESTS` | 2 | 2 | 2 | +| `MAX_CODE_REPAIRS_PER_TRACE` | 2 | 3 | 5 | +| `REPAIR_UNMATCHED_PERCENTAGE_LIMIT` | 0.2 | 0.3 | 0.4 | +| `TOP_VALID_CANDIDATES_FOR_REFINEMENT` | 2 | 3 | 4 | +| `ADAPTIVE_OPTIMIZATION_THRESHOLD` | 0 | 0 | 2 | +| `MAX_ADAPTIVE_OPTIMIZATIONS_PER_TRACE` | 0 | 0 | 4 | + +Use `get_effort_value(EffortKeys.KEY, effort_level)` to retrieve values. + +## Project Configuration + +Configuration is read from `pyproject.toml` under `[tool.codeflash]`. Key settings are auto-detected by `setup/detector.py`: +- `module-root` — Root of the module to optimize +- `tests-root` — Root of test files +- `test-framework` — pytest, unittest, jest, etc. +- `formatter-cmds` — Code formatting commands diff --git a/tiles/codeflash-docs/docs/context-extraction.md b/tiles/codeflash-docs/docs/context-extraction.md new file mode 100644 index 000000000..8e0f366c9 --- /dev/null +++ b/tiles/codeflash-docs/docs/context-extraction.md @@ -0,0 +1,60 @@ +# Context Extraction + +How codeflash extracts and limits code context for optimization and test generation. + +## Overview + +Context extraction (`context/code_context_extractor.py`) builds a `CodeOptimizationContext` containing all code needed for the LLM to understand and optimize a function, split into: + +- **Read-writable code** (`CodeContextType.READ_WRITABLE`): The function being optimized plus its helper functions — code the LLM is allowed to modify +- **Read-only context** (`CodeContextType.READ_ONLY`): Dependency code for reference — imports, type definitions, base classes +- **Testgen context** (`CodeContextType.TESTGEN`): Context for test generation, may include imported class definitions and external base class inits +- **Hashing context** (`CodeContextType.HASHING`): Used for deduplication of optimization runs + +## Token Limits + +Both optimization and test generation contexts are token-limited: +- `OPTIMIZATION_CONTEXT_TOKEN_LIMIT = 16000` tokens +- `TESTGEN_CONTEXT_TOKEN_LIMIT = 16000` tokens + +Token counting uses `encoded_tokens_len()` from `code_utils/code_utils.py`. Functions whose context exceeds these limits are skipped. + +## Context Building Process + +### 1. Helper Discovery + +For the target function (`FunctionToOptimize`), the extractor finds: +- **Helpers of the function**: Functions/classes in the same file that the target function calls +- **Helpers of helpers**: Transitive dependencies of the helper functions + +These are organized as `dict[Path, set[FunctionSource]]` — mapping file paths to the set of helper functions found in each file. + +### 2. Code Extraction + +`extract_code_markdown_context_from_files()` builds `CodeStringsMarkdown` from the helper dictionaries. Each file's relevant code is extracted as a `CodeString` with its file path. + +### 3. Testgen Context Enrichment + +`build_testgen_context()` extends the basic context with: +- Imported class definitions (resolved from imports) +- External base class `__init__` methods +- External class `__init__` methods referenced in the context + +### 4. Unused Definition Removal + +`detect_unused_helper_functions()` and `remove_unused_definitions_by_function_names()` from `context/unused_definition_remover.py` prune definitions that are not transitively reachable from the target function, reducing token usage. + +### 5. Deduplication + +The hashing context (`hashing_code_context`) generates a hash (`hashing_code_context_hash`) used to detect when the same function context has already been optimized in a previous run, avoiding redundant work. + +## Key Functions + +| Function | Location | Purpose | +|----------|----------|---------| +| `build_testgen_context()` | `context/code_context_extractor.py` | Build enriched testgen context | +| `extract_code_markdown_context_from_files()` | `context/code_context_extractor.py` | Convert helper dicts to `CodeStringsMarkdown` | +| `detect_unused_helper_functions()` | `context/unused_definition_remover.py` | Find unused definitions | +| `remove_unused_definitions_by_function_names()` | `context/unused_definition_remover.py` | Remove unused definitions | +| `collect_top_level_defs_with_usages()` | `context/unused_definition_remover.py` | Analyze definition usage | +| `encoded_tokens_len()` | `code_utils/code_utils.py` | Count tokens in code | diff --git a/tiles/codeflash-docs/docs/domain-types.md b/tiles/codeflash-docs/docs/domain-types.md new file mode 100644 index 000000000..7bc2dd868 --- /dev/null +++ b/tiles/codeflash-docs/docs/domain-types.md @@ -0,0 +1,153 @@ +# Domain Types + +Core data types used throughout the codeflash optimization pipeline. + +## Function Representation + +### `FunctionToOptimize` (`models/function_types.py`) + +The canonical dataclass representing a function candidate for optimization. Works across Python, JavaScript, and TypeScript. + +Key fields: +- `function_name: str` — The function name +- `file_path: Path` — Absolute file path where the function is located +- `parents: list[FunctionParent]` — Parent scopes (classes/functions), each with `name` and `type` +- `starting_line / ending_line: Optional[int]` — Line range (1-indexed) +- `is_async: bool` — Whether the function is async +- `is_method: bool` — Whether it belongs to a class +- `language: str` — Programming language (default: `"python"`) + +Key properties: +- `qualified_name` — Full dotted name including parent classes (e.g., `MyClass.my_method`) +- `top_level_parent_name` — Name of outermost parent, or function name if no parents +- `class_name` — Immediate parent class name, or `None` + +### `FunctionParent` (`models/function_types.py`) + +Represents a parent scope: `name: str` (e.g., `"MyClass"`) and `type: str` (e.g., `"ClassDef"`). + +### `FunctionSource` (`models/models.py`) + +Represents a resolved function with source code. Used for helper functions in context extraction. + +Fields: `file_path`, `qualified_name`, `fully_qualified_name`, `only_function_name`, `source_code`, `jedi_definition`. + +## Code Representation + +### `CodeString` (`models/models.py`) + +A single code block with validated syntax: +- `code: str` — The source code +- `file_path: Optional[Path]` — Origin file path +- `language: str` — Language for validation (default: `"python"`) + +Validates syntax on construction via `model_validator`. + +### `CodeStringsMarkdown` (`models/models.py`) + +A collection of `CodeString` blocks — the primary format for passing code through the pipeline. + +Key properties: +- `.flat` — Combined source code with file-path comment prefixes (e.g., `# file: path/to/file.py`) +- `.markdown` — Markdown-formatted with fenced code blocks: `` ```python:filepath\ncode\n``` `` +- `.file_to_path()` — Dict mapping file path strings to code + +Static method: +- `parse_markdown_code(markdown_code, expected_language)` — Parses markdown code blocks back into `CodeStringsMarkdown` + +## Optimization Context + +### `CodeOptimizationContext` (`models/models.py`) + +Holds all code context needed for optimization: +- `read_writable_code: CodeStringsMarkdown` — Code the LLM can modify +- `read_only_context_code: str` — Reference-only dependency code +- `testgen_context: CodeStringsMarkdown` — Context for test generation +- `hashing_code_context: str` / `hashing_code_context_hash: str` — For deduplication +- `helper_functions: list[FunctionSource]` — Helper functions in the writable code +- `preexisting_objects: set[tuple[str, tuple[FunctionParent, ...]]]` — Objects that already exist in the code + +### `CodeContextType` enum (`models/models.py`) + +Defines context categories: `READ_WRITABLE`, `READ_ONLY`, `TESTGEN`, `HASHING`. + +## Candidates + +### `OptimizedCandidate` (`models/models.py`) + +A generated code variant: +- `source_code: CodeStringsMarkdown` — The optimized code +- `explanation: str` — LLM explanation of the optimization +- `optimization_id: str` — Unique identifier +- `source: OptimizedCandidateSource` — How it was generated +- `parent_id: str | None` — ID of parent candidate (for refinements/repairs) +- `model: str | None` — Which LLM model generated it + +### `OptimizedCandidateSource` enum (`models/models.py`) + +How a candidate was generated: `OPTIMIZE`, `OPTIMIZE_LP` (line profiler), `REFINE`, `REPAIR`, `ADAPTIVE`, `JIT_REWRITE`. + +### `CandidateEvaluationContext` (`models/models.py`) + +Tracks state during candidate evaluation: +- `speedup_ratios` / `optimized_runtimes` / `is_correct` — Per-candidate results +- `ast_code_to_id` — Deduplication map (normalized AST → first seen candidate) +- `valid_optimizations` — Candidates that passed all checks + +Key methods: `record_failed_candidate()`, `record_successful_candidate()`, `handle_duplicate_candidate()`, `register_new_candidate()`. + +## Baseline & Results + +### `OriginalCodeBaseline` (`models/models.py`) + +Baseline measurements for the original code: +- `behavior_test_results: TestResults` / `benchmarking_test_results: TestResults` +- `line_profile_results: dict` +- `runtime: int` — Total runtime in nanoseconds +- `coverage_results: Optional[CoverageData]` + +### `BestOptimization` (`models/models.py`) + +The winning candidate after evaluation: +- `candidate: OptimizedCandidate` +- `helper_functions: list[FunctionSource]` +- `code_context: CodeOptimizationContext` +- `runtime: int` +- `winning_behavior_test_results` / `winning_benchmarking_test_results: TestResults` + +## Test Types + +### `TestType` enum (`models/test_type.py`) + +- `EXISTING_UNIT_TEST` (1) — Pre-existing tests from the codebase +- `INSPIRED_REGRESSION` (2) — Tests inspired by existing tests +- `GENERATED_REGRESSION` (3) — AI-generated regression tests +- `REPLAY_TEST` (4) — Tests from recorded benchmark data +- `CONCOLIC_COVERAGE_TEST` (5) — Coverage-guided tests +- `INIT_STATE_TEST` (6) — Class init state verification + +### `TestFile` / `TestFiles` (`models/models.py`) + +`TestFile` represents a single test file with `instrumented_behavior_file_path`, optional `benchmarking_file_path`, `original_file_path`, `test_type`, and `tests_in_file`. + +`TestFiles` is a collection with lookup methods: `get_by_type()`, `get_by_original_file_path()`, `get_test_type_by_instrumented_file_path()`. + +### `TestResults` (`models/models.py`) + +Collection of `FunctionTestInvocation` results with indexed lookup. Key methods: +- `add(invocation)` — Deduplicated insert +- `total_passed_runtime()` — Sum of minimum runtimes per test case (nanoseconds) +- `number_of_loops()` — Max loop index across all results +- `usable_runtime_data_by_test_case()` — Dict of invocation ID → list of runtimes + +## Result Type + +### `Result[L, R]` / `Success` / `Failure` (`either.py`) + +Functional error handling type: +- `Success(value)` — Wraps a successful result +- `Failure(error)` — Wraps an error +- `result.is_successful()` / `result.is_failure()` — Check type +- `result.unwrap()` — Get success value (raises if Failure) +- `result.failure()` — Get failure value (raises if Success) +- `is_successful(result)` — Module-level helper function diff --git a/tiles/codeflash-docs/docs/index.md b/tiles/codeflash-docs/docs/index.md new file mode 100644 index 000000000..930e287eb --- /dev/null +++ b/tiles/codeflash-docs/docs/index.md @@ -0,0 +1,41 @@ +# Codeflash Internal Documentation + +CodeFlash is an AI-powered Python code optimizer that automatically improves code performance while maintaining correctness. It uses LLMs to generate optimization candidates, verifies correctness through test execution, and benchmarks performance improvements. + +## Pipeline Overview + +``` +Discovery → Ranking → Context Extraction → Test Gen + Optimization → Baseline → Candidate Evaluation → PR +``` + +1. **Discovery** (`discovery/`): Find optimizable functions across the codebase using `FunctionVisitor` +2. **Ranking** (`benchmarking/function_ranker.py`): Rank functions by addressable time using trace data +3. **Context** (`context/`): Extract code dependencies — split into read-writable (modifiable) and read-only (reference) +4. **Optimization** (`optimization/`, `api/`): Generate candidates via AI service, runs concurrently with test generation +5. **Verification** (`verification/`): Run candidates against tests via custom pytest plugin, compare outputs +6. **Benchmarking** (`benchmarking/`): Measure performance, select best candidate by speedup +7. **Result** (`result/`, `github/`): Create PR with winning optimization + +## Key Entry Points + +| Task | File | +|------|------| +| CLI arguments & commands | `cli_cmds/cli.py` | +| Optimization orchestration | `optimization/optimizer.py` → `Optimizer.run()` | +| Per-function optimization | `optimization/function_optimizer.py` → `FunctionOptimizer` | +| Function discovery | `discovery/functions_to_optimize.py` | +| Context extraction | `context/code_context_extractor.py` | +| Test execution | `verification/test_runner.py`, `verification/pytest_plugin.py` | +| Performance ranking | `benchmarking/function_ranker.py` | +| Domain types | `models/models.py`, `models/function_types.py` | +| AI service | `api/aiservice.py` → `AiServiceClient` | +| Configuration | `code_utils/config_consts.py` | + +## Documentation Pages + +- [Domain Types](domain-types.md) — Core data types and their relationships +- [Optimization Pipeline](optimization-pipeline.md) — Step-by-step data flow through the pipeline +- [Context Extraction](context-extraction.md) — How code context is extracted and token-limited +- [Verification](verification.md) — Test execution, pytest plugin, deterministic patches +- [AI Service](ai-service.md) — AI service client endpoints and request types +- [Configuration](configuration.md) — Config schema, effort levels, thresholds diff --git a/tiles/codeflash-docs/docs/optimization-pipeline.md b/tiles/codeflash-docs/docs/optimization-pipeline.md new file mode 100644 index 000000000..9a3879ccc --- /dev/null +++ b/tiles/codeflash-docs/docs/optimization-pipeline.md @@ -0,0 +1,84 @@ +# Optimization Pipeline + +Step-by-step data flow from function discovery to PR creation. + +## 1. Entry Point: `Optimizer.run()` (`optimization/optimizer.py`) + +The `Optimizer` class is initialized with CLI args and creates: +- `TestConfig` with test roots, project root, pytest command +- `AiServiceClient` for AI service communication +- Optional `LocalAiServiceClient` for experiments + +`run()` orchestrates the full pipeline: discovers functions, optionally ranks them, then optimizes each in turn. + +## 2. Function Discovery (`discovery/functions_to_optimize.py`) + +`FunctionVisitor` traverses source files to find optimizable functions, producing `FunctionToOptimize` instances. Filters include: +- Skipping functions that are too small or trivial +- Skipping previously optimized functions (via `was_function_previously_optimized()`) +- Applying user-configured include/exclude patterns + +## 3. Function Ranking (`benchmarking/function_ranker.py`) + +When trace data is available, `FunctionRanker` ranks functions by **addressable time** — the time a function spends that could be optimized (own time + callee time / call count). Functions below `DEFAULT_IMPORTANCE_THRESHOLD=0.001` are skipped. + +## 4. Per-Function Optimization: `FunctionOptimizer` (`optimization/function_optimizer.py`) + +For each function, `FunctionOptimizer.optimize_function()` runs the full optimization loop: + +### 4a. Context Extraction (`context/code_context_extractor.py`) + +Extracts `CodeOptimizationContext` containing: +- `read_writable_code` — Code the LLM can modify (the function + helpers) +- `read_only_context_code` — Dependency code for reference only +- `testgen_context` — Context for test generation (may include imported class definitions) + +Token limits are enforced: `OPTIMIZATION_CONTEXT_TOKEN_LIMIT=16000` and `TESTGEN_CONTEXT_TOKEN_LIMIT=16000`. Functions exceeding these are rejected. + +### 4b. Concurrent Test Generation + LLM Optimization + +These run in parallel using `concurrent.futures`: +- **Test generation**: Generates regression tests from the function context +- **LLM optimization**: Sends `read_writable_code.markdown` + `read_only_context_code` to the AI service + +The number of candidates depends on effort level (see Configuration docs). + +### 4c. Candidate Evaluation + +For each `OptimizedCandidate`: + +1. **Deduplication**: Normalize code AST and check against `CandidateEvaluationContext.ast_code_to_id`. If duplicate, copy results from previous evaluation. + +2. **Code replacement**: Replace the original function with the candidate using `replace_function_definitions_in_module()`. + +3. **Behavioral testing**: Run instrumented tests in subprocess. The custom pytest plugin applies deterministic patches. Compare return values, stdout, and pass/fail status against the original baseline. + +4. **Benchmarking**: If behavior matches, run performance tests with looping (`TOTAL_LOOPING_TIME=10s`). Calculate speedup ratio. + +5. **Validation**: Candidate must beat `MIN_IMPROVEMENT_THRESHOLD=0.05` (5% speedup) and pass stability checks. + +### 4d. Refinement & Repair + +- **Repair**: If fewer than `MIN_CORRECT_CANDIDATES=2` pass, failed candidates can be repaired via `AIServiceCodeRepairRequest` (sends test diffs to LLM). +- **Refinement**: Top valid candidates are refined via `AIServiceRefinerRequest` (sends runtime data, line profiler results). +- **Adaptive**: At HIGH effort, additional adaptive optimization rounds via `AIServiceAdaptiveOptimizeRequest`. + +### 4e. Best Candidate Selection + +The winning candidate is selected by: +1. Highest speedup ratio +2. For tied speedups, shortest diff length from original +3. Refinement candidates use weighted ranking: `(2 * runtime_rank + 1 * diff_rank)` + +Result is a `BestOptimization` with the candidate, context, test results, and runtime. + +## 5. PR Creation (`github/`) + +If a winning candidate is found, a PR is created with: +- The optimized code diff +- Performance benchmark details +- Explanation from the LLM + +## Worktree Mode + +When `--worktree` is enabled, optimization runs in an isolated git worktree (`code_utils/git_worktree_utils.py`). This allows parallel optimization without affecting the working tree. Changes are captured as patch files. diff --git a/tiles/codeflash-docs/docs/verification.md b/tiles/codeflash-docs/docs/verification.md new file mode 100644 index 000000000..2a84f9340 --- /dev/null +++ b/tiles/codeflash-docs/docs/verification.md @@ -0,0 +1,93 @@ +# Verification + +How codeflash verifies candidate correctness and measures performance. + +## Test Execution Architecture + +Tests are executed in a **subprocess** to isolate the test environment from the main codeflash process. The test runner (`verification/test_runner.py`) invokes pytest (or Jest for JS/TS) with specific plugin configurations. + +### Plugin Blocklists + +- **Behavioral tests**: Block `benchmark`, `codspeed`, `xdist`, `sugar` +- **Benchmarking tests**: Block `codspeed`, `cov`, `benchmark`, `profiling`, `xdist`, `sugar` + +These are defined as `BEHAVIORAL_BLOCKLISTED_PLUGINS` and `BENCHMARKING_BLOCKLISTED_PLUGINS` in `verification/test_runner.py`. + +## Custom Pytest Plugin (`verification/pytest_plugin.py`) + +The plugin is loaded into the test subprocess and provides: + +### Deterministic Patches + +`_apply_deterministic_patches()` replaces non-deterministic functions with fixed values to ensure reproducible test output: + +| Module | Function | Fixed Value | +|--------|----------|-------------| +| `time` | `time()` | `1761717605.108106` | +| `time` | `perf_counter()` | Incrementing by 1ms per call | +| `datetime` | `datetime.now()` | `2021-01-01 02:05:10 UTC` | +| `datetime` | `datetime.utcnow()` | `2021-01-01 02:05:10 UTC` | +| `uuid` | `uuid4()` / `uuid1()` | `12345678-1234-5678-9abc-123456789012` | +| `random` | `random()` | `0.123456789` (seeded with 42) | +| `os` | `urandom(n)` | `b"\x42" * n` | +| `numpy.random` | seed | `42` | + +Patches call the original function first to maintain performance characteristics (same call overhead). + +### Timing Markers + +Test results include timing markers in stdout: `!######:######!` + +The pattern `_TIMING_MARKER_PATTERN` extracts timing data for calculating function utilization fraction. + +### Loop Stability + +Performance benchmarking uses configurable stability thresholds: +- `STABILITY_WINDOW_SIZE = 0.35` (35% of total iterations) +- `STABILITY_CENTER_TOLERANCE = 0.0025` (±0.25% around median) +- `STABILITY_SPREAD_TOLERANCE = 0.0025` (0.25% window spread) + +### Memory Limits (Linux) + +On Linux, the plugin sets `RLIMIT_AS` to 85% of total system memory (RAM + swap) to prevent OOM kills. + +## Test Result Processing + +### `TestResults` (`models/models.py`) + +Collects `FunctionTestInvocation` results with: +- Deduplicated insertion via `unique_invocation_loop_id` +- `total_passed_runtime()` — Sum of minimum runtimes per test case (nanoseconds) +- `number_of_loops()` — Max loop index +- `usable_runtime_data_by_test_case()` — Grouped timing data + +### `FunctionTestInvocation` + +Each invocation records: +- `loop_index` — Iteration number (starts at 1) +- `id: InvocationId` — Fully qualified test identifier +- `did_pass: bool` — Pass/fail status +- `runtime: Optional[int]` — Time in nanoseconds +- `return_value: Optional[object]` — Captured return value +- `test_type: TestType` — Which test category + +### Behavioral vs Performance Testing + +1. **Behavioral**: Runs with `TestingMode.BEHAVIOR`. Compares return values and stdout between original and candidate. Any difference = candidate rejected. +2. **Performance**: Runs with `TestingMode.PERFORMANCE`. Loops for `TOTAL_LOOPING_TIME=10s` to get stable timing. Calculates speedup ratio. +3. **Line Profile**: Runs with `TestingMode.LINE_PROFILE`. Collects per-line timing data for refinement. + +## Test Types + +| TestType | Value | Description | +|----------|-------|-------------| +| `EXISTING_UNIT_TEST` | 1 | Pre-existing tests from the codebase | +| `INSPIRED_REGRESSION` | 2 | Tests inspired by existing tests | +| `GENERATED_REGRESSION` | 3 | AI-generated regression tests | +| `REPLAY_TEST` | 4 | Tests from recorded benchmark data | +| `CONCOLIC_COVERAGE_TEST` | 5 | Coverage-guided tests | +| `INIT_STATE_TEST` | 6 | Class init state verification | + +## Coverage + +Coverage is measured via `CoverageData` with a threshold of `COVERAGE_THRESHOLD=60.0%`. Low coverage may affect confidence in the optimization's correctness. diff --git a/tiles/codeflash-docs/evals/capabilities.json b/tiles/codeflash-docs/evals/capabilities.json new file mode 100644 index 000000000..1e39768a4 --- /dev/null +++ b/tiles/codeflash-docs/evals/capabilities.json @@ -0,0 +1,118 @@ +{ + "package_name": "codeflash-docs", + "total_capabilities": 16, + "capabilities": [ + { + "id": 0, + "name": "pipeline-stage-ordering", + "description": "Know the correct ordering of codeflash pipeline stages: Discovery → Ranking → Context Extraction → Test Gen + Optimization (concurrent) → Baseline → Candidate Evaluation → PR", + "complexity": "basic", + "api_elements": ["Optimizer.run()", "FunctionOptimizer.optimize_function()"] + }, + { + "id": 1, + "name": "function-to-optimize-fields", + "description": "Know FunctionToOptimize key fields (function_name, file_path, parents, starting_line/ending_line, is_async, is_method, language) and properties (qualified_name, top_level_parent_name, class_name)", + "complexity": "intermediate", + "api_elements": ["FunctionToOptimize", "FunctionParent", "models/function_types.py"] + }, + { + "id": 2, + "name": "code-strings-markdown-format", + "description": "Know that code is serialized as markdown fenced blocks with language:filepath syntax (```python:filepath\\ncode\\n```) and parsed via CodeStringsMarkdown.parse_markdown_code()", + "complexity": "intermediate", + "api_elements": ["CodeStringsMarkdown", "CodeString", ".markdown", ".flat", "parse_markdown_code()"] + }, + { + "id": 3, + "name": "read-writable-vs-read-only", + "description": "Distinguish read_writable_code (LLM can modify) from read_only_context_code (reference only) in CodeOptimizationContext", + "complexity": "basic", + "api_elements": ["CodeOptimizationContext", "read_writable_code", "read_only_context_code"] + }, + { + "id": 4, + "name": "candidate-source-types", + "description": "Know OptimizedCandidateSource variants: OPTIMIZE, OPTIMIZE_LP, REFINE, REPAIR, ADAPTIVE, JIT_REWRITE and when each is used", + "complexity": "intermediate", + "api_elements": ["OptimizedCandidateSource", "OptimizedCandidate"] + }, + { + "id": 5, + "name": "candidate-forest-dag", + "description": "Know that candidates form a forest/DAG via parent_id references where refinements and repairs build on previous candidates", + "complexity": "intermediate", + "api_elements": ["parent_id", "OptimizedCandidate", "CandidateForest"] + }, + { + "id": 6, + "name": "concurrent-testgen-optimization", + "description": "Know that test generation and LLM optimization run concurrently using concurrent.futures, not sequentially", + "complexity": "intermediate", + "api_elements": ["concurrent.futures", "FunctionOptimizer.optimize_function()"] + }, + { + "id": 7, + "name": "deterministic-patch-values", + "description": "Know the specific fixed values used by deterministic patches: time=1761717605.108106, datetime=2021-01-01 02:05:10 UTC, uuid=12345678-1234-5678-9abc-123456789012, random seeded with 42", + "complexity": "advanced", + "api_elements": ["_apply_deterministic_patches()", "pytest_plugin.py"] + }, + { + "id": 8, + "name": "test-type-enum", + "description": "Know the 6 TestType variants: EXISTING_UNIT_TEST, INSPIRED_REGRESSION, GENERATED_REGRESSION, REPLAY_TEST, CONCOLIC_COVERAGE_TEST, INIT_STATE_TEST", + "complexity": "basic", + "api_elements": ["TestType", "models/test_type.py"] + }, + { + "id": 9, + "name": "ai-service-endpoints", + "description": "Know the AI service endpoints: /ai/optimize, /ai/optimize_line_profiler, /ai/refine, /ai/repair, /ai/adaptive_optimize, /ai/rewrite_jit", + "complexity": "intermediate", + "api_elements": ["AiServiceClient", "api/aiservice.py"] + }, + { + "id": 10, + "name": "repair-request-structure", + "description": "Know that AIServiceCodeRepairRequest includes TestDiff objects with scope (RETURN_VALUE/STDOUT/DID_PASS), original vs candidate values, and test source code", + "complexity": "advanced", + "api_elements": ["AIServiceCodeRepairRequest", "TestDiff", "TestDiffScope"] + }, + { + "id": 11, + "name": "effort-level-values", + "description": "Know specific effort level values: LOW gets 3 candidates, MEDIUM gets 5, HIGH gets 6 (N_OPTIMIZER_CANDIDATES)", + "complexity": "intermediate", + "api_elements": ["EffortLevel", "N_OPTIMIZER_CANDIDATES", "EFFORT_VALUES"] + }, + { + "id": 12, + "name": "context-token-limits", + "description": "Know OPTIMIZATION_CONTEXT_TOKEN_LIMIT=16000 and TESTGEN_CONTEXT_TOKEN_LIMIT=16000 and that encoded_tokens_len() is used for counting", + "complexity": "basic", + "api_elements": ["OPTIMIZATION_CONTEXT_TOKEN_LIMIT", "TESTGEN_CONTEXT_TOKEN_LIMIT", "encoded_tokens_len()"] + }, + { + "id": 13, + "name": "best-candidate-selection", + "description": "Know the selection criteria: highest speedup, then shortest diff for ties, and refinement weighted ranking (2*runtime + 1*diff)", + "complexity": "advanced", + "api_elements": ["BestOptimization", "REFINED_CANDIDATE_RANKING_WEIGHTS"] + }, + { + "id": 14, + "name": "plugin-blocklists", + "description": "Know behavioral test blocklisted plugins (benchmark, codspeed, xdist, sugar) and benchmarking blocklist (adds cov, profiling)", + "complexity": "intermediate", + "api_elements": ["BEHAVIORAL_BLOCKLISTED_PLUGINS", "BENCHMARKING_BLOCKLISTED_PLUGINS"] + }, + { + "id": 15, + "name": "result-type-usage", + "description": "Know that Result[L,R] from either.py uses Success(value)/Failure(error) with is_successful() check before unwrap()", + "complexity": "basic", + "api_elements": ["Result", "Success", "Failure", "is_successful", "either.py"] + } + ] +} diff --git a/tiles/codeflash-docs/evals/scenario-1/capability.txt b/tiles/codeflash-docs/evals/scenario-1/capability.txt new file mode 100644 index 000000000..5bd3f0115 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-1/capability.txt @@ -0,0 +1 @@ +Code serialization format and context splitting \ No newline at end of file diff --git a/tiles/codeflash-docs/evals/scenario-1/criteria.json b/tiles/codeflash-docs/evals/scenario-1/criteria.json new file mode 100644 index 000000000..48a4eb178 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-1/criteria.json @@ -0,0 +1,21 @@ +{ + "context": "Tests whether the agent knows the CodeStringsMarkdown serialization format and the distinction between read-writable and read-only code context in the codeflash pipeline.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Markdown code block format", + "description": "Uses the correct fenced code block format with language:filepath syntax (```python:path/to/file.py) when constructing code for the AI service, NOT plain code blocks without file paths", + "max_score": 30 + }, + { + "name": "Read-writable vs read-only split", + "description": "Correctly separates code into read_writable_code (code the LLM can modify) and read_only_context_code (reference-only dependency code), NOT treating all code as modifiable", + "max_score": 35 + }, + { + "name": "parse_markdown_code usage", + "description": "Uses CodeStringsMarkdown.parse_markdown_code() to parse AI service responses back into structured code, NOT manual string splitting or regex", + "max_score": 35 + } + ] +} diff --git a/tiles/codeflash-docs/evals/scenario-1/task.md b/tiles/codeflash-docs/evals/scenario-1/task.md new file mode 100644 index 000000000..93761be4b --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-1/task.md @@ -0,0 +1,35 @@ +# Format Code for AI Service Request + +## Context + +You are working on the codeflash optimization engine. The AI service accepts optimization requests with source code and dependency context. A function `calculate_total` in `analytics/metrics.py` needs to be optimized. It calls a helper `normalize_values` in the same file (both modifiable), and imports `BaseMetric` from `analytics/base.py` (not modifiable, just for reference). + +```python +# analytics/metrics.py +from analytics.base import BaseMetric + +def normalize_values(data: list[float]) -> list[float]: + max_val = max(data) + return [x / max_val for x in data] + +def calculate_total(metrics: list[BaseMetric]) -> float: + values = [m.value for m in metrics] + normalized = normalize_values(values) + return sum(normalized) +``` + +```python +# analytics/base.py +class BaseMetric: + def __init__(self, name: str, value: float): + self.name = name + self.value = value +``` + +## Task + +Write a Python function `prepare_optimization_payload` that constructs the code payload for an AI service optimization request for `calculate_total`. It should properly format the source code and dependency code, and include a function to parse the AI service response back into structured code objects. + +## Expected Outputs + +- A Python file `payload_builder.py` with the payload construction and response parsing logic diff --git a/tiles/codeflash-docs/evals/scenario-2/capability.txt b/tiles/codeflash-docs/evals/scenario-2/capability.txt new file mode 100644 index 000000000..5afa5a2e4 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-2/capability.txt @@ -0,0 +1 @@ +Candidate source types and DAG relationships \ No newline at end of file diff --git a/tiles/codeflash-docs/evals/scenario-2/criteria.json b/tiles/codeflash-docs/evals/scenario-2/criteria.json new file mode 100644 index 000000000..8460c1420 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-2/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent knows the different OptimizedCandidateSource types and how candidates form a DAG via parent_id references in the codeflash pipeline.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Lists source types", + "description": "Identifies at least 4 of the 6 OptimizedCandidateSource variants: OPTIMIZE, OPTIMIZE_LP, REFINE, REPAIR, ADAPTIVE, JIT_REWRITE", + "max_score": 25 + }, + { + "name": "Parent ID linkage", + "description": "Explains that REFINE and REPAIR candidates reference their parent via parent_id, creating a DAG/forest structure, NOT independent candidates", + "max_score": 25 + }, + { + "name": "Refinement uses runtime data", + "description": "States that refinement sends runtime data and line profiler results to the AI service (AIServiceRefinerRequest), NOT just the source code", + "max_score": 25 + }, + { + "name": "Repair uses test diffs", + "description": "States that repair sends test failure diffs (TestDiff with scope: RETURN_VALUE/STDOUT/DID_PASS) to the AI service, NOT just error messages", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-docs/evals/scenario-2/task.md b/tiles/codeflash-docs/evals/scenario-2/task.md new file mode 100644 index 000000000..f55b25e3e --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-2/task.md @@ -0,0 +1,13 @@ +# Document the Candidate Lifecycle + +## Context + +A new engineer is joining the codeflash team and needs to understand how optimization candidates are generated, improved, and related to each other throughout the pipeline. They've asked for a clear explanation of the different ways candidates are produced and how the system iterates on them. + +## Task + +Write a technical document explaining the full lifecycle of an optimization candidate in codeflash — from initial generation through improvement iterations. Cover all the different ways candidates can be created, what data is sent to the AI service for each type, and how candidates relate to each other structurally. + +## Expected Outputs + +- A markdown file `candidate-lifecycle.md` diff --git a/tiles/codeflash-docs/evals/scenario-3/capability.txt b/tiles/codeflash-docs/evals/scenario-3/capability.txt new file mode 100644 index 000000000..707dd8109 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-3/capability.txt @@ -0,0 +1 @@ +Deterministic patch values and test execution architecture \ No newline at end of file diff --git a/tiles/codeflash-docs/evals/scenario-3/criteria.json b/tiles/codeflash-docs/evals/scenario-3/criteria.json new file mode 100644 index 000000000..bf5c9f34f --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-3/criteria.json @@ -0,0 +1,31 @@ +{ + "context": "Tests whether the agent knows the specific deterministic patch values used in codeflash's pytest plugin and the subprocess-based test execution architecture.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Subprocess isolation", + "description": "States that tests run in a subprocess to isolate the test environment from the main codeflash process, NOT in the same process", + "max_score": 20 + }, + { + "name": "Fixed time value", + "description": "References the specific fixed timestamp 1761717605.108106 for time.time() or the fixed datetime 2021-01-01 02:05:10 UTC for datetime.now()", + "max_score": 20 + }, + { + "name": "Fixed UUID value", + "description": "References the specific fixed UUID 12345678-1234-5678-9abc-123456789012 for uuid4/uuid1", + "max_score": 20 + }, + { + "name": "Random seed", + "description": "States that random is seeded with 42 (NOT a different seed value)", + "max_score": 20 + }, + { + "name": "Plugin blocklists", + "description": "Mentions that behavioral tests block specific pytest plugins (at least 2 of: benchmark, codspeed, xdist, sugar) to ensure deterministic execution", + "max_score": 20 + } + ] +} diff --git a/tiles/codeflash-docs/evals/scenario-3/task.md b/tiles/codeflash-docs/evals/scenario-3/task.md new file mode 100644 index 000000000..b3970b839 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-3/task.md @@ -0,0 +1,13 @@ +# Explain Test Reproducibility Guarantees + +## Context + +A codeflash user notices that their optimization candidate passes behavioral tests on one run but fails on the next. They suspect non-determinism in the test execution. They want to understand what guarantees codeflash provides for test reproducibility and how the system ensures consistent results. + +## Task + +Write a technical explanation of how codeflash ensures deterministic test execution. Cover the execution environment setup, what sources of non-determinism are controlled, and any specific values or configurations used. Also explain the test execution architecture. + +## Expected Outputs + +- A markdown file `test-reproducibility.md` diff --git a/tiles/codeflash-docs/evals/scenario-4/capability.txt b/tiles/codeflash-docs/evals/scenario-4/capability.txt new file mode 100644 index 000000000..64848618a --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-4/capability.txt @@ -0,0 +1 @@ +Effort level configuration and candidate selection criteria \ No newline at end of file diff --git a/tiles/codeflash-docs/evals/scenario-4/criteria.json b/tiles/codeflash-docs/evals/scenario-4/criteria.json new file mode 100644 index 000000000..4fdc078ae --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-4/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent knows the specific effort level values for candidate generation and the criteria used to select the best optimization candidate.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Candidate counts by effort", + "description": "States correct N_OPTIMIZER_CANDIDATES values: LOW=3, MEDIUM=5, HIGH=6 (at least 2 of 3 correct)", + "max_score": 25 + }, + { + "name": "Speedup as primary selector", + "description": "States that the winning candidate is selected primarily by highest speedup ratio", + "max_score": 25 + }, + { + "name": "Diff length as tiebreaker", + "description": "States that for tied speedups, shortest diff length from original is used as tiebreaker", + "max_score": 25 + }, + { + "name": "Refinement ranking weights", + "description": "States that refinement candidates use weighted ranking with runtime weighted more heavily than diff (2:1 ratio or REFINED_CANDIDATE_RANKING_WEIGHTS=(2,1))", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-docs/evals/scenario-4/task.md b/tiles/codeflash-docs/evals/scenario-4/task.md new file mode 100644 index 000000000..e44e2738d --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-4/task.md @@ -0,0 +1,18 @@ +# Design a Candidate Selection Dashboard + +## Context + +The codeflash team wants to build a dashboard that shows users how optimization candidates were evaluated and why a particular candidate won. The dashboard needs to display the selection process at each stage, from initial candidate pool through to the final winner. + +## Task + +Write a specification document for the dashboard that explains: +1. How many candidates are generated at each effort level +2. The exact criteria and order of operations used to pick the winning candidate +3. How refinement candidates are ranked differently from initial candidates + +Include concrete examples showing how two hypothetical candidates would be compared. + +## Expected Outputs + +- A markdown file `selection-dashboard-spec.md` diff --git a/tiles/codeflash-docs/evals/scenario-5/capability.txt b/tiles/codeflash-docs/evals/scenario-5/capability.txt new file mode 100644 index 000000000..0ec01e24f --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-5/capability.txt @@ -0,0 +1 @@ +Pipeline concurrency and FunctionToOptimize structure \ No newline at end of file diff --git a/tiles/codeflash-docs/evals/scenario-5/criteria.json b/tiles/codeflash-docs/evals/scenario-5/criteria.json new file mode 100644 index 000000000..13887ac34 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-5/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent knows the FunctionToOptimize data structure and the concurrent execution model for test generation and optimization.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "FunctionToOptimize fields", + "description": "Includes at least 4 of: function_name, file_path, parents (list of FunctionParent), starting_line, ending_line, is_async, is_method, language", + "max_score": 25 + }, + { + "name": "Qualified name property", + "description": "Mentions qualified_name as a property that produces the full dotted name including parent classes (e.g., MyClass.my_method)", + "max_score": 25 + }, + { + "name": "Concurrent execution", + "description": "States that test generation and LLM optimization run concurrently (in parallel), NOT sequentially one after the other", + "max_score": 25 + }, + { + "name": "Entry point identification", + "description": "Correctly identifies Optimizer.run() as the top-level entry point and FunctionOptimizer.optimize_function() as the per-function entry point", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-docs/evals/scenario-5/task.md b/tiles/codeflash-docs/evals/scenario-5/task.md new file mode 100644 index 000000000..42cb34653 --- /dev/null +++ b/tiles/codeflash-docs/evals/scenario-5/task.md @@ -0,0 +1,17 @@ +# Implement a Function Optimization Status Tracker + +## Context + +The codeflash team needs a status tracker that logs what happens to each function during an optimization run. For each function, it should record the function identity, which pipeline stages it passed through, and how long each stage took. + +## Task + +Write a design document explaining: +1. What data structure represents a function being optimized, including its identity fields and how nested functions (methods inside classes) are represented +2. The full name resolution strategy for identifying functions uniquely +3. Which stages of the pipeline operate on a single function at a time vs. operating on multiple functions +4. Where in the codebase the per-function optimization is orchestrated and what the top-level entry point is + +## Expected Outputs + +- A markdown file `status-tracker-design.md` diff --git a/tiles/codeflash-docs/evals/summary.json b/tiles/codeflash-docs/evals/summary.json new file mode 100644 index 000000000..38e0ca577 --- /dev/null +++ b/tiles/codeflash-docs/evals/summary.json @@ -0,0 +1,40 @@ +{ + "total_scenarios": 5, + "capabilities_coverage": { + "total_capabilities": 16, + "capabilities_tested": 12, + "coverage_percentage": 75.0 + }, + "complexity_distribution": { + "basic": 1, + "intermediate": 3, + "advanced": 1 + }, + "scenarios": [ + { + "index": 1, + "capability": "code-strings-markdown-format, read-writable-vs-read-only", + "complexity": "intermediate" + }, + { + "index": 2, + "capability": "candidate-source-types, candidate-forest-dag, repair-request-structure", + "complexity": "intermediate" + }, + { + "index": 3, + "capability": "deterministic-patch-values, plugin-blocklists", + "complexity": "advanced" + }, + { + "index": 4, + "capability": "effort-level-values, best-candidate-selection", + "complexity": "intermediate" + }, + { + "index": 5, + "capability": "function-to-optimize-fields, concurrent-testgen-optimization, pipeline-stage-ordering", + "complexity": "basic" + } + ] +} diff --git a/tiles/codeflash-docs/evals/summary_infeasible.json b/tiles/codeflash-docs/evals/summary_infeasible.json new file mode 100644 index 000000000..7450bd0b1 --- /dev/null +++ b/tiles/codeflash-docs/evals/summary_infeasible.json @@ -0,0 +1,25 @@ +{ + "total_infeasible": 4, + "infeasible_capabilities": [ + { + "capability": "ai-service-endpoints", + "complexity": "intermediate", + "reasoning": "Testing knowledge of specific API endpoints requires actual HTTP requests or mocking that bypasses the capability being tested" + }, + { + "capability": "context-token-limits", + "complexity": "basic", + "reasoning": "Already covered by the skills tile eval (scenario-1). Testing token counting requires the actual tokenizer library" + }, + { + "capability": "test-type-enum", + "complexity": "basic", + "reasoning": "Simple enum knowledge is better verified through skills that use test types rather than isolated recall" + }, + { + "capability": "result-type-usage", + "complexity": "basic", + "reasoning": "Already covered by the skills tile eval (scenario-2). Testing Result type usage is better done through implementation tasks" + } + ] +} diff --git a/tiles/codeflash-docs/tile.json b/tiles/codeflash-docs/tile.json new file mode 100644 index 000000000..8d18aa129 --- /dev/null +++ b/tiles/codeflash-docs/tile.json @@ -0,0 +1,7 @@ +{ + "name": "codeflash/codeflash-docs", + "version": "0.1.0", + "summary": "Internal documentation for the codeflash optimization engine", + "private": true, + "docs": "docs/index.md" +} diff --git a/tiles/codeflash-rules/rules/architecture.md b/tiles/codeflash-rules/rules/architecture.md new file mode 100644 index 000000000..3aaf78507 --- /dev/null +++ b/tiles/codeflash-rules/rules/architecture.md @@ -0,0 +1,45 @@ +# Architecture + +``` +codeflash/ +├── main.py # CLI entry point +├── cli_cmds/ # Command handling, console output (Rich) +├── discovery/ # Find optimizable functions +├── context/ # Extract code dependencies and imports +├── optimization/ # Generate optimized code via AI +│ ├── optimizer.py # Main optimization orchestration +│ └── function_optimizer.py # Per-function optimization logic +├── verification/ # Run deterministic tests (pytest plugin) +├── benchmarking/ # Performance measurement +├── github/ # PR creation +├── api/ # AI service communication +├── code_utils/ # Code parsing, git utilities +├── models/ # Pydantic models and types +├── languages/ # Multi-language support (Python, JavaScript/TypeScript) +├── setup/ # Config schema, auto-detection, first-run experience +├── picklepatch/ # Serialization/deserialization utilities +├── tracing/ # Function call tracing +├── tracer.py # Root-level tracer entry point for profiling +├── lsp/ # IDE integration (Language Server Protocol) +├── telemetry/ # Sentry, PostHog +├── either.py # Functional Result type for error handling +├── result/ # Result types and handling +└── version.py # Version information +``` + +## Key Entry Points + +| Task | Start here | +|------|------------| +| CLI arguments & commands | `cli_cmds/cli.py` | +| Optimization orchestration | `optimization/optimizer.py` → `Optimizer.run()` | +| Per-function optimization | `optimization/function_optimizer.py` → `FunctionOptimizer` | +| Function discovery | `discovery/functions_to_optimize.py` | +| Context extraction | `context/code_context_extractor.py` | +| Test execution | `verification/test_runner.py`, `verification/pytest_plugin.py` | +| Performance ranking | `benchmarking/function_ranker.py` | +| Domain types | `models/models.py`, `models/function_types.py` | +| Result handling | `either.py` (`Result`, `Success`, `Failure`, `is_successful`) | +| AI service communication | `api/aiservice.py` → `AiServiceClient` | +| Configuration constants | `code_utils/config_consts.py` | +| Language support | `languages/registry.py` → `get_language_support()` | diff --git a/tiles/codeflash-rules/rules/code-style.md b/tiles/codeflash-rules/rules/code-style.md new file mode 100644 index 000000000..2a2fbdf6b --- /dev/null +++ b/tiles/codeflash-rules/rules/code-style.md @@ -0,0 +1,11 @@ +# Code Style + +- **Line length**: 120 characters +- **Python**: 3.9+ syntax (use `from __future__ import annotations` for type hints) +- **Package management**: Always use `uv`, never `pip` — run commands via `uv run` +- **Tooling**: Ruff for linting/formatting, mypy strict mode, prek for pre-commit checks (`uv run prek run`) +- **Comments**: Minimal — only explain "why", not "what" +- **Docstrings**: Do not add unless explicitly requested +- **Naming**: NEVER use leading underscores (`_function_name`) — Python has no true private functions, use public names +- **Paths**: Always use absolute `Path` objects, handle encoding explicitly (UTF-8) +- **Source transforms**: Use `libcst` for code modification/transformation to preserve formatting; `ast` is acceptable for read-only analysis and parsing diff --git a/tiles/codeflash-rules/rules/git-conventions.md b/tiles/codeflash-rules/rules/git-conventions.md new file mode 100644 index 000000000..1835dfdca --- /dev/null +++ b/tiles/codeflash-rules/rules/git-conventions.md @@ -0,0 +1,9 @@ +# Git Conventions + +- **Always create a new branch from `main`** — never commit directly to `main` or reuse an existing feature branch for unrelated changes +- Use conventional commit format: `fix:`, `feat:`, `refactor:`, `docs:`, `test:`, `chore:` +- Keep commits atomic — one logical change per commit +- Commit message body should be concise (1-2 sentences max) +- PR titles should also use conventional format +- Branch naming: `cf-#-title` (lowercase, hyphenated) where `#` is the Linear issue number +- If related to a Linear issue, include `CF-#` in the PR body diff --git a/tiles/codeflash-rules/rules/language-rules.md b/tiles/codeflash-rules/rules/language-rules.md new file mode 100644 index 000000000..3b045a4f4 --- /dev/null +++ b/tiles/codeflash-rules/rules/language-rules.md @@ -0,0 +1,9 @@ +# Language Support Rules + +- Current language is a module-level singleton in `languages/current.py` — use `set_current_language()` / `current_language()`, never pass language as a parameter through call chains +- Use `get_language_support(identifier)` from `languages/registry.py` to get a `LanguageSupport` instance — accepts `Path`, `Language` enum, or string; never import language classes directly +- New language support classes must use the `@register_language` decorator to register with the extension and language registries +- `languages/__init__.py` uses `__getattr__` for lazy imports to avoid circular dependencies — follow this pattern when adding new exports +- `is_javascript()` returns `True` for both JavaScript and TypeScript +- Language modules are lazily imported on first `get_language_support()` call via `_ensure_languages_registered()` — the `@register_language` decorator fires on import and populates `_EXTENSION_REGISTRY` and `_LANGUAGE_REGISTRY` +- `LanguageSupport` instances are cached in `_SUPPORT_CACHE` — use `clear_cache()` only in tests diff --git a/tiles/codeflash-rules/rules/optimization-patterns.md b/tiles/codeflash-rules/rules/optimization-patterns.md new file mode 100644 index 000000000..7b879d227 --- /dev/null +++ b/tiles/codeflash-rules/rules/optimization-patterns.md @@ -0,0 +1,11 @@ +# Optimization Pipeline Patterns + +- All major operations return `Result[SuccessType, ErrorType]` — construct with `Success(value)` / `Failure(error)`, check with `is_successful()` before calling `unwrap()` +- Code context has token limits (`OPTIMIZATION_CONTEXT_TOKEN_LIMIT=16000`, `TESTGEN_CONTEXT_TOKEN_LIMIT=16000` in `code_utils/config_consts.py`) — exceeding them rejects the function +- `read_writable_code` (modifiable code) can span multiple files; `read_only_context_code` is reference-only dependency code +- Code is serialized as markdown code blocks: `` ```language:filepath\ncode\n``` `` — see `CodeStringsMarkdown` in `models/models.py` +- Candidates form a forest (DAG): refinements/repairs reference `parent_id` on previous candidates via `OptimizedCandidateSource` (OPTIMIZE, REFINE, REPAIR, ADAPTIVE, JIT_REWRITE) +- Test generation and optimization run concurrently — coordinate through `CandidateEvaluationContext` +- Generated tests are instrumented with `codeflash_capture.py` to record return values and traces +- Minimum improvement threshold is 5% (`MIN_IMPROVEMENT_THRESHOLD=0.05`) — candidates below this are rejected +- Stability thresholds: `STABILITY_WINDOW_SIZE=0.35`, `STABILITY_CENTER_TOLERANCE=0.0025`, `STABILITY_SPREAD_TOLERANCE=0.0025` diff --git a/tiles/codeflash-rules/rules/testing-rules.md b/tiles/codeflash-rules/rules/testing-rules.md new file mode 100644 index 000000000..780b48d60 --- /dev/null +++ b/tiles/codeflash-rules/rules/testing-rules.md @@ -0,0 +1,13 @@ +# Testing Rules + +- Code context extraction and replacement tests must assert full string equality — no substring matching +- Use pytest's `tmp_path` fixture for temp directories (it's a `Path` object) +- Write temp files inside `tmp_path`, never use `NamedTemporaryFile` (causes Windows file contention) +- Always call `.resolve()` on Path objects to ensure absolute paths and resolve symlinks +- Use `.as_posix()` when converting resolved paths to strings (normalizes to forward slashes) +- Any new feature or bug fix that can be tested automatically must have test cases +- If changes affect existing test expectations, update the tests accordingly — tests must always pass after changes +- The pytest plugin patches `time`, `random`, `uuid`, `datetime`, `os.urandom`, and `numpy.random` for deterministic test execution — never assume real randomness or real time in verification tests +- `conftest.py` uses an autouse fixture that calls `reset_current_language()` — tests always start with Python as the default language +- Test types are defined by the `TestType` enum: `EXISTING_UNIT_TEST`, `INSPIRED_REGRESSION`, `GENERATED_REGRESSION`, `REPLAY_TEST`, `CONCOLIC_COVERAGE_TEST`, `INIT_STATE_TEST` +- Verification runs tests in a subprocess using a custom pytest plugin (`verification/pytest_plugin.py`) — behavioral tests use blocklisted plugins (`benchmark`, `codspeed`, `xdist`, `sugar`), benchmarking tests additionally block `cov` and `profiling` diff --git a/tiles/codeflash-rules/tile.json b/tiles/codeflash-rules/tile.json new file mode 100644 index 000000000..a286ba09b --- /dev/null +++ b/tiles/codeflash-rules/tile.json @@ -0,0 +1,26 @@ +{ + "name": "codeflash/codeflash-rules", + "version": "0.1.0", + "summary": "Coding standards and conventions for the codeflash codebase", + "private": true, + "rules": { + "code-style": { + "rules": "rules/code-style.md" + }, + "architecture": { + "rules": "rules/architecture.md" + }, + "optimization-patterns": { + "rules": "rules/optimization-patterns.md" + }, + "git-conventions": { + "rules": "rules/git-conventions.md" + }, + "testing-rules": { + "rules": "rules/testing-rules.md" + }, + "language-rules": { + "rules": "rules/language-rules.md" + } + } +} diff --git a/tiles/codeflash-skills/evals/capabilities.json b/tiles/codeflash-skills/evals/capabilities.json new file mode 100644 index 000000000..cda33c968 --- /dev/null +++ b/tiles/codeflash-skills/evals/capabilities.json @@ -0,0 +1,104 @@ +{ + "package_name": "codeflash-skills", + "total_capabilities": 14, + "capabilities": [ + { + "id": 0, + "name": "sequential-pipeline-debugging", + "description": "Debug optimization failures by walking through pipeline stages sequentially and stopping at the first failure found", + "complexity": "intermediate", + "api_elements": ["discovery", "ranking", "context", "AI service", "verification", "deduplication", "repair"] + }, + { + "id": 1, + "name": "token-limit-awareness", + "description": "Know that OPTIMIZATION_CONTEXT_TOKEN_LIMIT and TESTGEN_CONTEXT_TOKEN_LIMIT are both 16000 tokens and that exceeding them causes function rejection", + "complexity": "basic", + "api_elements": ["OPTIMIZATION_CONTEXT_TOKEN_LIMIT", "TESTGEN_CONTEXT_TOKEN_LIMIT", "encoded_tokens_len()"] + }, + { + "id": 2, + "name": "improvement-threshold", + "description": "Know that MIN_IMPROVEMENT_THRESHOLD is 0.05 (5%) and candidates below this speedup are rejected", + "complexity": "basic", + "api_elements": ["MIN_IMPROVEMENT_THRESHOLD", "STABILITY_WINDOW_SIZE"] + }, + { + "id": 3, + "name": "ast-deduplication", + "description": "Know that candidates are deduplicated via AST normalization using normalize_code() and CandidateEvaluationContext.ast_code_to_id", + "complexity": "intermediate", + "api_elements": ["normalize_code()", "CandidateEvaluationContext.ast_code_to_id", "code_utils/deduplicate_code.py"] + }, + { + "id": 4, + "name": "repair-trigger-conditions", + "description": "Know that repair only triggers when fewer than MIN_CORRECT_CANDIDATES=2 pass, and is skipped when REPAIR_UNMATCHED_PERCENTAGE_LIMIT is exceeded", + "complexity": "advanced", + "api_elements": ["MIN_CORRECT_CANDIDATES", "REPAIR_UNMATCHED_PERCENTAGE_LIMIT", "AIServiceCodeRepairRequest"] + }, + { + "id": 5, + "name": "ai-service-error-patterns", + "description": "Know specific log patterns to search for when AI service fails: 'Error generating optimized candidates', 'cli-optimize-error-caught', 'cli-optimize-error-response'", + "complexity": "intermediate", + "api_elements": ["AiServiceClient", "api/aiservice.py"] + }, + { + "id": 6, + "name": "behavioral-vs-benchmark-failures", + "description": "Distinguish between behavioral test failures (return value/stdout/pass-fail mismatches via TestDiffScope) and benchmark failures (speedup below threshold)", + "complexity": "intermediate", + "api_elements": ["TestDiffScope", "RETURN_VALUE", "STDOUT", "DID_PASS"] + }, + { + "id": 7, + "name": "result-type-pattern", + "description": "Use Result[L, R] from either.py with Success/Failure constructors and is_successful() checks before unwrap()", + "complexity": "basic", + "api_elements": ["Result", "Success", "Failure", "is_successful", "unwrap()", "either.py"] + }, + { + "id": 8, + "name": "effort-config-pattern", + "description": "Add effort-dependent config via EffortKeys enum, EFFORT_VALUES dict with LOW/MEDIUM/HIGH levels, and get_effort_value()", + "complexity": "intermediate", + "api_elements": ["EffortKeys", "EffortLevel", "EFFORT_VALUES", "get_effort_value()", "config_consts.py"] + }, + { + "id": 9, + "name": "module-to-feature-mapping", + "description": "Know which codeflash module to modify for different feature types (optimization/ for strategies, api/ for endpoints, languages/ for language support, etc.)", + "complexity": "basic", + "api_elements": ["MODULE_REFERENCE.md"] + }, + { + "id": 10, + "name": "domain-type-conventions", + "description": "Use @dataclass(frozen=True) for immutable data, BaseModel for serializable models, and keep function_types.py dependency-free", + "complexity": "intermediate", + "api_elements": ["@dataclass(frozen=True)", "BaseModel", "models/models.py", "models/function_types.py"] + }, + { + "id": 11, + "name": "test-patterns", + "description": "Use tmp_path fixture, .resolve() on Paths, .as_posix() for string conversion, full string equality assertions, and awareness of deterministic patches", + "complexity": "basic", + "api_elements": ["tmp_path", ".resolve()", ".as_posix()", "pytest_plugin.py"] + }, + { + "id": 12, + "name": "quality-check-commands", + "description": "Run uv run prek run for formatting/linting, uv run mypy for type checking, and uv run pytest for tests", + "complexity": "basic", + "api_elements": ["uv run prek run", "uv run mypy", "uv run pytest"] + }, + { + "id": 13, + "name": "language-support-patterns", + "description": "Use @register_language decorator, get_language_support() for lookup, singleton pattern via set_current_language()/current_language(), and is_python()/is_javascript() guards", + "complexity": "advanced", + "api_elements": ["@register_language", "get_language_support()", "set_current_language()", "is_python()", "is_javascript()"] + } + ] +} diff --git a/tiles/codeflash-skills/evals/scenario-1/capability.txt b/tiles/codeflash-skills/evals/scenario-1/capability.txt new file mode 100644 index 000000000..c4d34b1aa --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-1/capability.txt @@ -0,0 +1 @@ +Sequential pipeline debugging with specific thresholds \ No newline at end of file diff --git a/tiles/codeflash-skills/evals/scenario-1/criteria.json b/tiles/codeflash-skills/evals/scenario-1/criteria.json new file mode 100644 index 000000000..cec7afda7 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-1/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent follows the sequential debugging workflow from the skill, checking pipeline stages in order and using correct threshold values when diagnosing an optimization that produced no results.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Sequential stage order", + "description": "Investigates pipeline stages in order: discovery before ranking before context before AI service before test failures. Does NOT jump to later stages without checking earlier ones first.", + "max_score": 25 + }, + { + "name": "Token limit value", + "description": "References the specific token limit of 16000 for OPTIMIZATION_CONTEXT_TOKEN_LIMIT or TESTGEN_CONTEXT_TOKEN_LIMIT when checking context extraction", + "max_score": 25 + }, + { + "name": "Importance threshold", + "description": "References DEFAULT_IMPORTANCE_THRESHOLD=0.001 when checking function ranking", + "max_score": 25 + }, + { + "name": "Stops at failure", + "description": "Identifies the failing stage and focuses investigation there rather than continuing through all remaining stages", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-skills/evals/scenario-1/task.md b/tiles/codeflash-skills/evals/scenario-1/task.md new file mode 100644 index 000000000..17c74d8cb --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-1/task.md @@ -0,0 +1,13 @@ +# Diagnose Silent Optimization Skip + +## Context + +A user reports that when running codeflash on their project, a specific function `calculate_metrics` in `analytics/processor.py` never appears in the optimization results. The function exists in the module root, is not in the exclude list, and has not been previously optimized. Trace data shows the function is called frequently but with very short execution times (averaging 0.0005 seconds total addressable time). The function has moderate dependencies. + +## Task + +Write a diagnostic report explaining why this function is being skipped and at which stage in the pipeline the function is filtered out. Include the specific threshold or condition that causes the skip. + +## Expected Outputs + +A markdown file `diagnostic-report.md` explaining the root cause. diff --git a/tiles/codeflash-skills/evals/scenario-2/capability.txt b/tiles/codeflash-skills/evals/scenario-2/capability.txt new file mode 100644 index 000000000..72b283863 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-2/capability.txt @@ -0,0 +1 @@ +Result type pattern and effort-dependent configuration \ No newline at end of file diff --git a/tiles/codeflash-skills/evals/scenario-2/criteria.json b/tiles/codeflash-skills/evals/scenario-2/criteria.json new file mode 100644 index 000000000..9c49891b8 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-2/criteria.json @@ -0,0 +1,31 @@ +{ + "context": "Tests whether the agent uses the codeflash Result type pattern from either.py and the effort-dependent configuration pattern when implementing a new pipeline feature.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Imports from either.py", + "description": "Imports Success, Failure, and is_successful from codeflash.either (NOT from a different error handling module)", + "max_score": 20 + }, + { + "name": "Result return type", + "description": "Function returns Result type using Success() for success and Failure() for errors, not exceptions or None", + "max_score": 20 + }, + { + "name": "is_successful check", + "description": "Calls is_successful() or .is_successful() before calling unwrap() on the result", + "max_score": 20 + }, + { + "name": "EffortKeys enum entry", + "description": "Adds a new entry to the EffortKeys enum in config_consts.py", + "max_score": 20 + }, + { + "name": "Three effort levels", + "description": "Adds values for all three EffortLevel variants (LOW, MEDIUM, HIGH) in EFFORT_VALUES dict", + "max_score": 20 + } + ] +} diff --git a/tiles/codeflash-skills/evals/scenario-2/task.md b/tiles/codeflash-skills/evals/scenario-2/task.md new file mode 100644 index 000000000..dfe684d14 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-2/task.md @@ -0,0 +1,21 @@ +# Add Candidate Timeout Feature + +## Context + +The codeflash optimization engine currently has no per-candidate timeout. Some candidates take too long during verification, wasting the optimization budget. A new feature is needed to skip candidates that exceed a configurable time limit during behavioral testing. + +The timeout should vary based on the optimization effort setting — shorter timeouts for low effort runs (to save time) and longer for high effort runs (to allow more complex optimizations). + +## Task + +Implement a `check_candidate_timeout` function in `codeflash/optimization/function_optimizer.py` that: +1. Takes a candidate runtime and returns whether the candidate should be skipped +2. Uses a configurable timeout threshold that scales with optimization effort +3. Handles the error case where the runtime measurement is unavailable + +Also add the necessary configuration constant to `codeflash/code_utils/config_consts.py`. + +## Expected Outputs + +- Modified `function_optimizer.py` with the new function +- Modified `config_consts.py` with the new configuration diff --git a/tiles/codeflash-skills/evals/scenario-3/capability.txt b/tiles/codeflash-skills/evals/scenario-3/capability.txt new file mode 100644 index 000000000..1fa504dee --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-3/capability.txt @@ -0,0 +1 @@ +Test patterns and deterministic patch awareness \ No newline at end of file diff --git a/tiles/codeflash-skills/evals/scenario-3/criteria.json b/tiles/codeflash-skills/evals/scenario-3/criteria.json new file mode 100644 index 000000000..ccf96e3fa --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-3/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent follows codeflash test conventions when writing tests, including path handling, temp directory patterns, and awareness of the deterministic patching system.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Uses tmp_path fixture", + "description": "Test function uses pytest tmp_path fixture parameter, NOT tempfile.NamedTemporaryFile or tempfile.mkdtemp", + "max_score": 25 + }, + { + "name": "Calls resolve on paths", + "description": "Calls .resolve() on Path objects before using them in assertions or function calls", + "max_score": 25 + }, + { + "name": "Full string equality", + "description": "Uses exact equality assertions (== or assert_equal) for code string comparisons, NOT substring checks like 'in' or assertIn or contains", + "max_score": 25 + }, + { + "name": "No real time dependency", + "description": "Test does NOT depend on real time.time(), datetime.now(), random values, or uuid generation for correctness. Acknowledges or accounts for deterministic patches if time/random values are involved.", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-skills/evals/scenario-3/task.md b/tiles/codeflash-skills/evals/scenario-3/task.md new file mode 100644 index 000000000..5b13a15d6 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-3/task.md @@ -0,0 +1,24 @@ +# Write Tests for Context Hash Comparison + +## Context + +The codeflash context extraction module has a function `compare_context_hashes(context_a, context_b)` that takes two `CodeOptimizationContext` objects and returns whether their hashing contexts are identical. This is used to detect when the same function has already been optimized. + +```python +# In codeflash/context/code_context_extractor.py +def compare_context_hashes(context_a: CodeOptimizationContext, context_b: CodeOptimizationContext) -> bool: + return context_a.hashing_code_context_hash == context_b.hashing_code_context_hash +``` + +## Task + +Write a test file `tests/test_context/test_hash_comparison.py` with tests for this function. Include tests for: +1. Two contexts with identical code producing the same hash +2. Two contexts with different code producing different hashes +3. A context compared with itself + +The tests should create temporary Python source files to build realistic context objects. + +## Expected Outputs + +- `tests/test_context/test_hash_comparison.py` diff --git a/tiles/codeflash-skills/evals/scenario-4/capability.txt b/tiles/codeflash-skills/evals/scenario-4/capability.txt new file mode 100644 index 000000000..c0d3fea71 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-4/capability.txt @@ -0,0 +1 @@ +Domain type conventions and module identification \ No newline at end of file diff --git a/tiles/codeflash-skills/evals/scenario-4/criteria.json b/tiles/codeflash-skills/evals/scenario-4/criteria.json new file mode 100644 index 000000000..20861011c --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-4/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent follows codeflash domain type conventions and correctly identifies the right module when adding a new data type for the optimization pipeline.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "Placed in models/models.py", + "description": "New data type is added to codeflash/models/models.py (NOT models/function_types.py, since it has dependencies on other codeflash modules)", + "max_score": 25 + }, + { + "name": "Uses frozen dataclass", + "description": "Immutable data type uses @dataclass(frozen=True) decorator, NOT a regular class or unfrozen dataclass", + "max_score": 25 + }, + { + "name": "BaseModel for serializable", + "description": "If a serializable model is needed, uses Pydantic BaseModel (NOT dataclass or dict)", + "max_score": 25 + }, + { + "name": "Correct module for feature", + "description": "Places the main logic in the correct module for the feature type (e.g., verification/ for test-related, optimization/ for candidate-related, api/ for service-related)", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-skills/evals/scenario-4/task.md b/tiles/codeflash-skills/evals/scenario-4/task.md new file mode 100644 index 000000000..61299a115 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-4/task.md @@ -0,0 +1,21 @@ +# Add Optimization Confidence Score + +## Context + +The codeflash team wants to add a confidence score to each optimization result. The score should capture how confident the system is that an optimization is both correct and beneficial. It combines test coverage percentage, number of passing test cases, and speedup stability into a single metric. + +The score needs to be: +- Attached to each candidate during evaluation (immutable once computed) +- Included in the final PR report (needs JSON serialization) +- Computed during the candidate evaluation phase + +## Task + +1. Define the data types needed for the confidence score +2. Write a `compute_confidence_score` function that takes coverage percentage (float), passing test count (int), and stability ratio (float) and returns the confidence result +3. Place all code in the appropriate codeflash modules + +## Expected Outputs + +- New/modified type definitions in the appropriate models file +- New function in the appropriate module diff --git a/tiles/codeflash-skills/evals/scenario-5/capability.txt b/tiles/codeflash-skills/evals/scenario-5/capability.txt new file mode 100644 index 000000000..28a3fe8ee --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-5/capability.txt @@ -0,0 +1 @@ +Deduplication mechanics and repair trigger conditions \ No newline at end of file diff --git a/tiles/codeflash-skills/evals/scenario-5/criteria.json b/tiles/codeflash-skills/evals/scenario-5/criteria.json new file mode 100644 index 000000000..8c3f8e817 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-5/criteria.json @@ -0,0 +1,26 @@ +{ + "context": "Tests whether the agent understands codeflash's candidate deduplication via AST normalization and the specific conditions under which code repair is triggered vs skipped.", + "type": "weighted_checklist", + "checklist": [ + { + "name": "AST normalization", + "description": "Mentions that deduplication uses AST normalization (normalize_code from code_utils/deduplicate_code.py), NOT simple string comparison", + "max_score": 25 + }, + { + "name": "Duplicate result copying", + "description": "Explains that duplicate candidates copy results from the first-seen candidate rather than being re-tested", + "max_score": 25 + }, + { + "name": "Repair trigger threshold", + "description": "States that repair triggers when fewer than 2 candidates pass (MIN_CORRECT_CANDIDATES=2), NOT when zero candidates pass or when any candidate fails", + "max_score": 25 + }, + { + "name": "Unmatched percentage limit", + "description": "Mentions REPAIR_UNMATCHED_PERCENTAGE_LIMIT as a condition that can cause repair to be skipped entirely, with effort-dependent values (0.2/0.3/0.4)", + "max_score": 25 + } + ] +} diff --git a/tiles/codeflash-skills/evals/scenario-5/task.md b/tiles/codeflash-skills/evals/scenario-5/task.md new file mode 100644 index 000000000..19995f3e6 --- /dev/null +++ b/tiles/codeflash-skills/evals/scenario-5/task.md @@ -0,0 +1,17 @@ +# Investigate Low Candidate Diversity + +## Context + +A codeflash user is optimizing a data processing function at medium effort level. The AI service returns 5 candidates, but the optimization log shows only 1 candidate was actually benchmarked. Of the 5 candidates, 1 passed behavioral tests but didn't meet the performance threshold. The user wants to understand what happened to the other 4 candidates and why no repair attempts were made. + +## Task + +Write an analysis document explaining: +1. Why only 1 out of 5 candidates was benchmarked +2. How the system determines which candidates to actually test +3. Under what conditions the system would have attempted to repair the failing candidates +4. What the user could change to get more diverse results + +## Expected Outputs + +A markdown file `analysis.md` with the explanation. diff --git a/tiles/codeflash-skills/evals/summary.json b/tiles/codeflash-skills/evals/summary.json new file mode 100644 index 000000000..c5929299f --- /dev/null +++ b/tiles/codeflash-skills/evals/summary.json @@ -0,0 +1,40 @@ +{ + "total_scenarios": 5, + "capabilities_coverage": { + "total_capabilities": 14, + "capabilities_tested": 10, + "coverage_percentage": 71.4 + }, + "complexity_distribution": { + "basic": 2, + "intermediate": 2, + "advanced": 1 + }, + "scenarios": [ + { + "index": 1, + "capability": "sequential-pipeline-debugging, token-limit-awareness, improvement-threshold", + "complexity": "intermediate" + }, + { + "index": 2, + "capability": "result-type-pattern, effort-config-pattern", + "complexity": "intermediate" + }, + { + "index": 3, + "capability": "test-patterns, quality-check-commands", + "complexity": "basic" + }, + { + "index": 4, + "capability": "domain-type-conventions, module-to-feature-mapping", + "complexity": "basic" + }, + { + "index": 5, + "capability": "ast-deduplication, repair-trigger-conditions", + "complexity": "advanced" + } + ] +} diff --git a/tiles/codeflash-skills/evals/summary_infeasible.json b/tiles/codeflash-skills/evals/summary_infeasible.json new file mode 100644 index 000000000..36da50727 --- /dev/null +++ b/tiles/codeflash-skills/evals/summary_infeasible.json @@ -0,0 +1,25 @@ +{ + "total_infeasible": 4, + "infeasible_capabilities": [ + { + "capability": "ai-service-error-patterns", + "complexity": "intermediate", + "reasoning": "Requires actual AI service API responses and log output that cannot be meaningfully mocked without bypassing the capability being tested" + }, + { + "capability": "behavioral-vs-benchmark-failures", + "complexity": "intermediate", + "reasoning": "Requires actual test execution results with JUnit XML output and timing data that cannot be generated in a one-shot file-based eval" + }, + { + "capability": "language-support-patterns", + "complexity": "advanced", + "reasoning": "Requires the full language registry system with imports and decorators that would need the codeflash runtime to verify" + }, + { + "capability": "quality-check-commands", + "complexity": "basic", + "reasoning": "Requires running actual uv/prek/mypy commands which need the project environment and dependencies installed" + } + ] +} diff --git a/tiles/codeflash-skills/skills/add-codeflash-feature/MODULE_REFERENCE.md b/tiles/codeflash-skills/skills/add-codeflash-feature/MODULE_REFERENCE.md new file mode 100644 index 000000000..9012fb294 --- /dev/null +++ b/tiles/codeflash-skills/skills/add-codeflash-feature/MODULE_REFERENCE.md @@ -0,0 +1,13 @@ +# Module Reference + +| Feature area | Primary module | Key files | +|-------------|----------------|-----------| +| New optimization strategy | `optimization/` | `function_optimizer.py`, `optimizer.py` | +| New test type | `verification/`, `models/` | `test_runner.py`, `pytest_plugin.py`, `test_type.py` | +| New AI service endpoint | `api/` | `aiservice.py` | +| New language support | `languages/` | Create new `languages//support.py` | +| Context extraction change | `context/` | `code_context_extractor.py` | +| New CLI command | `cli_cmds/` | `cli.py` | +| New config option | `setup/`, `code_utils/` | `config_consts.py`, `setup/detector.py` | +| Discovery filter | `discovery/` | `functions_to_optimize.py` | +| PR/result changes | `github/`, `result/` | Relevant handlers | diff --git a/tiles/codeflash-skills/skills/add-codeflash-feature/SKILL.md b/tiles/codeflash-skills/skills/add-codeflash-feature/SKILL.md new file mode 100644 index 000000000..f61abfe83 --- /dev/null +++ b/tiles/codeflash-skills/skills/add-codeflash-feature/SKILL.md @@ -0,0 +1,146 @@ +--- +name: add-codeflash-feature +description: > + Guides implementation of new functionality in the codeflash optimization engine. + Use when adding a feature, building new functionality, implementing a new + optimization strategy, adding a language backend, creating an API endpoint, + extending the verification pipeline, or developing any new codeflash capability. + Covers module identification, Result type patterns, config, types, tests, and + quality checks. +--- + +# Add Codeflash Feature + +Use this workflow when implementing new functionality in the codeflash codebase — new optimization strategies, language backends, API endpoints, CLI commands, config options, or pipeline extensions. + +## Step 1: Identify Target Modules + +Determine which module(s) need modification. See [MODULE_REFERENCE.md](MODULE_REFERENCE.md) for the full mapping of feature areas to modules and key files. + +**Checkpoint**: Read the target files and understand existing patterns before writing any code. Look for similar features already implemented as reference. + +## Step 2: Follow Result Type Pattern + +Use the `Result[L, R]` type from `either.py` for error handling in pipeline operations: + +```python +from codeflash.either import Success, Failure, is_successful + +def my_operation() -> Result[str, MyResultType]: + if error_condition: + return Failure("descriptive error message") + return Success(result_value) + +# Usage: +result = my_operation() +if not is_successful(result): + logger.error(result.failure()) + return +value = result.unwrap() +``` + +**Checkpoint**: Verify your function signatures match the `Result` pattern used in surrounding code. Not all functions use `Result` — match the convention of the module you're modifying. + +## Step 3: Add Configuration Constants + +If the feature needs configurable thresholds or limits: + +1. Add constants to `code_utils/config_consts.py` +2. If effort-dependent, add to `EFFORT_VALUES` dict with values for all three levels: + ```python + # In config_consts.py: + class EffortKeys(str, Enum): + MY_NEW_KEY = "MY_NEW_KEY" + + EFFORT_VALUES: dict[str, dict[EffortLevel, Any]] = { + # ... existing entries ... + EffortKeys.MY_NEW_KEY.value: { + EffortLevel.LOW: 1, + EffortLevel.MEDIUM: 3, + EffortLevel.HIGH: 5, + }, + } + ``` +3. Access via `get_effort_value(EffortKeys.MY_NEW_KEY, effort_level)` + +**Checkpoint**: Skip this step if the feature doesn't need configuration. Not every feature requires new constants. + +## Step 4: Add Domain Types + +If new data structures are needed: + +1. Add Pydantic models or frozen dataclasses to `models/models.py` or `models/function_types.py` +2. Use `@dataclass(frozen=True)` for immutable data, `BaseModel` for models that need serialization +3. Keep `function_types.py` dependency-free — no imports from other codeflash modules + +Example following existing patterns: +```python +# In models/models.py: +@dataclass(frozen=True) +class MyNewType: + name: str + value: int + source: OptimizedCandidateSource + +# For serializable models: +class MyNewModel(BaseModel): + items: list[MyNewType] = [] +``` + +**Checkpoint**: Skip this step if you can reuse existing types. Check `models/models.py` for types that already fit your needs. + +## Step 5: Write Tests + +Follow existing test patterns: + +1. Create test files in `tests/` mirroring the source structure (e.g., `tests/test_optimization/test_my_feature.py`) +2. Use pytest's `tmp_path` fixture for temp directories — never `NamedTemporaryFile` +3. Always call `.resolve()` on Path objects and `.as_posix()` for string conversion +4. Assert full string equality for code context tests — no substring matching +5. The pytest plugin patches `time`, `random`, `uuid`, `datetime` — never rely on real values in verification tests + +```python +def test_my_feature(tmp_path: Path) -> None: + test_file = tmp_path / "test_module.py" + test_file.write_text("def foo(): return 1", encoding="utf-8") + result = my_operation(test_file.resolve()) + assert is_successful(result) + assert result.unwrap() == expected_value +``` + +**Checkpoint**: Run the new tests in isolation before proceeding: `uv run pytest tests/path/to/test_file.py -x` + +## Step 6: Run Quality Checks + +Run all validation before committing: + +```bash +# Pre-commit checks (ruff format + lint) +uv run prek run + +# Type checking +uv run mypy codeflash/ + +# Run relevant tests +uv run pytest tests/path/to/relevant/tests -x +``` + +**If checks fail**: +- `prek run` failures: Fix formatting/lint issues reported by ruff, then re-run +- `mypy` failures: Fix type errors — common issues are missing return types, wrong `Optional` usage, or missing imports in `TYPE_CHECKING` block +- Test failures: Fix the failing test or the implementation, then re-run + +## Step 7: Language Support Considerations + +If the feature needs to work across languages: + +1. Use `get_language_support(identifier)` from `languages/registry.py` — never import language classes directly +2. Current language is a singleton: `set_current_language()` / `current_language()` from `languages/current.py` +3. Use `is_python()` / `is_javascript()` guards for language-specific branches +4. New language support classes must use `@register_language` decorator and be instantiable without arguments + +**Checkpoint**: Skip this step if the feature is Python-only. Most features don't need multi-language support. + +## Troubleshooting + +If you run into issues, see [TROUBLESHOOTING.md](TROUBLESHOOTING.md) for common problems and fixes (circular imports, `UnsupportedLanguageError`, CI path failures, Pydantic validation errors, token limit exceeded). diff --git a/tiles/codeflash-skills/skills/add-codeflash-feature/TROUBLESHOOTING.md b/tiles/codeflash-skills/skills/add-codeflash-feature/TROUBLESHOOTING.md new file mode 100644 index 000000000..6c56f8d0b --- /dev/null +++ b/tiles/codeflash-skills/skills/add-codeflash-feature/TROUBLESHOOTING.md @@ -0,0 +1,9 @@ +# Troubleshooting + +| Problem | Likely cause | Fix | +|---------|-------------|-----| +| Circular import at startup | Importing from `models/` in a module loaded early | Move import into `TYPE_CHECKING` block or use lazy import | +| `UnsupportedLanguageError` | Language modules not registered yet | Call `_ensure_languages_registered()` or use `get_language_support()` which does it automatically | +| Tests pass locally but fail in CI | Path differences (absolute vs relative) | Always use `.resolve()` on Path objects | +| `ValidationError` from Pydantic | Invalid code passed to `CodeString` | Check that generated code passes syntax validation for the target language | +| `encoded_tokens_len` exceeds limit | Context too large | Reduce helper functions or split into read-only vs read-writable | diff --git a/tiles/codeflash-skills/skills/debug-optimization-failure/SKILL.md b/tiles/codeflash-skills/skills/debug-optimization-failure/SKILL.md new file mode 100644 index 000000000..f85c56641 --- /dev/null +++ b/tiles/codeflash-skills/skills/debug-optimization-failure/SKILL.md @@ -0,0 +1,124 @@ +--- +name: debug-optimization-failure +description: > + Diagnose why a codeflash optimization produced no results or failed silently. + Use when an optimization run errors out, returns no candidates, or all candidates + are rejected. Walks through discovery, ranking, context limits, AI service, + test verification, deduplication, and repair stages. +--- + +# Debug Optimization Failure + +Use this workflow when an optimization run fails or produces no results. Work through the stages sequentially — stop at the first failure found. + +## Step 1: Check Function Discovery + +Determine if the function was discovered by `FunctionVisitor`. + +1. Search logs for the function name in discovery output: + ```python + # In discovery/functions_to_optimize.py, FunctionVisitor filters out: + # - Functions matching exclude patterns in pyproject.toml [tool.codeflash] + # - Functions already optimized (was_function_previously_optimized()) + # - Functions outside the configured module-root + ``` +2. Verify the function file is under the configured `module-root` in `pyproject.toml` +3. Check if the function was previously optimized — look for it in the optimization history + +**Checkpoint**: If the function doesn't appear in discovery output, fix config patterns or file location before proceeding. + +## Step 2: Check Ranking + +If trace data is used, check if the function was ranked high enough. + +1. Look at `benchmarking/function_ranker.py` output for the function's addressable time +2. The function must exceed `DEFAULT_IMPORTANCE_THRESHOLD=0.001`: + ```python + # Addressable time = own time + callee time / call count + # Grep for the function in ranking output: + # grep -i "function_name" in ranking logs + ``` +3. Functions below the threshold are silently skipped + +**Checkpoint**: If ranked too low, the function doesn't spend enough time to be worth optimizing. No fix needed — this is expected. + +## Step 3: Check Context Token Limits + +Verify the function's context fits within token limits. + +1. Check thresholds in `code_utils/config_consts.py`: + ```python + OPTIMIZATION_CONTEXT_TOKEN_LIMIT = 16000 # tokens + TESTGEN_CONTEXT_TOKEN_LIMIT = 16000 # tokens + ``` +2. Token counting uses `encoded_tokens_len()` from `code_utils/code_utils.py` +3. Common causes: large helper function chains, deep dependency trees, large class hierarchies + +**Checkpoint**: If context exceeds limits, the function is rejected. Consider refactoring to reduce dependencies or splitting large modules. + +## Step 4: Check AI Service Response + +Verify the AI service returned valid candidates. + +1. Look for HTTP errors in logs: + ``` + # Error patterns to search for: + "Error generating optimized candidates" + "Error generating jit rewritten candidate" + "cli-optimize-error-caught" + "cli-optimize-error-response" + ``` +2. Check `_get_valid_candidates()` in `api/aiservice.py` — empty `code_strings` after `CodeStringsMarkdown.parse_markdown_code()` means the LLM returned malformed code blocks +3. Verify API key is valid (`get_codeflash_api_key()`) + +**Checkpoint**: If no candidates returned, check API key, network, and service status before proceeding. + +## Step 5: Check Test Failures + +Determine if candidates failed behavioral or benchmark tests. + +1. **Behavioral failures** — compare return values, stdout, pass/fail between baseline and candidate: + ```python + # TestDiffScope enum values to look for: + # RETURN_VALUE - function returned different value + # STDOUT - different stdout output + # DID_PASS - test passed/failed differently + ``` +2. **Benchmark failures** — candidate must beat `MIN_IMPROVEMENT_THRESHOLD=0.05` (5% speedup) +3. **Stability failures** — timing must be stable within `STABILITY_WINDOW_SIZE=0.35` (35% of iterations) +4. Check JUnit XML test results in the temp directory for specific failure messages + +**Checkpoint**: Behavioral failure = optimization changed behavior (check test diffs). Benchmark failure = not fast enough. Stability failure = noisy timing environment. + +## Step 6: Check Deduplication + +Verify candidates weren't deduplicated away. + +1. `CandidateEvaluationContext.ast_code_to_id` tracks normalized AST → candidate mapping +2. `normalize_code()` from `code_utils/deduplicate_code.py` strips comments/whitespace and normalizes the AST +3. If all candidates normalize to identical code, only the first is tested — the rest copy its results + +**Checkpoint**: If all duplicates, the LLM generated the same optimization repeatedly. Try a higher effort level for more diverse candidates. + +## Step 7: Check Repair/Refinement + +If initial candidates failed, check repair and refinement stages. + +1. Repair only triggers if fewer than `MIN_CORRECT_CANDIDATES=2` passed behavioral tests +2. Repair sends `AIServiceCodeRepairRequest` with `TestDiff` objects showing what went wrong +3. Check `REPAIR_UNMATCHED_PERCENTAGE_LIMIT` (effort-dependent: 0.2/0.3/0.4) — if too many tests failed, repair is skipped entirely +4. Refinement only runs on the top valid candidates (count depends on effort level) + +**Checkpoint**: If repair also fails, the optimization approach likely doesn't work for this function. The function may rely on side effects or external state that the LLM can't safely optimize. + +## Key Files Reference + +| File | What to check | +|------|---------------| +| `optimization/function_optimizer.py` | Main loop, `determine_best_candidate()` | +| `verification/test_runner.py` | Test subprocess execution | +| `api/aiservice.py` | AI service requests/responses | +| `code_utils/config_consts.py` | All thresholds and limits | +| `context/code_context_extractor.py` | Context extraction and token counting | +| `models/models.py` | `CandidateEvaluationContext`, `TestResults`, `TestDiff` | +| `code_utils/deduplicate_code.py` | AST normalization for deduplication | diff --git a/tiles/codeflash-skills/tile.json b/tiles/codeflash-skills/tile.json new file mode 100644 index 000000000..01d7a9481 --- /dev/null +++ b/tiles/codeflash-skills/tile.json @@ -0,0 +1,14 @@ +{ + "name": "codeflash/codeflash-skills", + "version": "0.2.0", + "summary": "Procedural workflows for developing and debugging codeflash", + "private": true, + "skills": { + "debug-optimization-failure": { + "path": "skills/debug-optimization-failure/SKILL.md" + }, + "add-codeflash-feature": { + "path": "skills/add-codeflash-feature/SKILL.md" + } + } +} diff --git a/uv.lock b/uv.lock index a55ae4db2..f8dd0a855 100644 --- a/uv.lock +++ b/uv.lock @@ -200,7 +200,7 @@ dependencies = [ { name = "mypy-extensions", marker = "python_full_version >= '3.10'" }, { name = "packaging", marker = "python_full_version >= '3.10'" }, { name = "pathspec", marker = "python_full_version >= '3.10'" }, - { name = "platformdirs", version = "4.5.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs", version = "4.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "pytokens", marker = "python_full_version >= '3.10'" }, { name = "tomli", marker = "python_full_version == '3.10.*'" }, { name = "typing-extensions", marker = "python_full_version == '3.10.*'" }, @@ -431,7 +431,7 @@ dependencies = [ { name = "crosshair-tool" }, { name = "dill" }, { name = "filelock", version = "3.19.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "filelock", version = "3.20.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "filelock", version = "3.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "gitpython" }, { name = "humanize", version = "4.13.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "humanize", version = "4.15.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, @@ -446,9 +446,9 @@ dependencies = [ { name = "lxml" }, { name = "parameterized" }, { name = "platformdirs", version = "4.4.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "platformdirs", version = "4.5.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "platformdirs", version = "4.7.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "posthog", version = "6.9.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "posthog", version = "7.8.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "posthog", version = "7.8.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "pydantic" }, { name = "pygls" }, { name = "pytest", version = "8.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, @@ -942,10 +942,10 @@ wheels = [ [[package]] name = "cuda-pathfinder" -version = "1.3.3" +version = "1.3.4" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/02/4dbe7568a42e46582248942f54dc64ad094769532adbe21e525e4edf7bc4/cuda_pathfinder-1.3.3-py3-none-any.whl", hash = "sha256:9984b664e404f7c134954a771be8775dfd6180ea1e1aef4a5a37d4be05d9bbb1", size = 27154, upload-time = "2025-12-04T22:35:08.996Z" }, + { url = "https://files.pythonhosted.org/packages/b8/5e/db279a3bfbd18d59d0598922a3b3c1454908d0969e8372260afec9736376/cuda_pathfinder-1.3.4-py3-none-any.whl", hash = "sha256:fb983f6e0d43af27ef486e14d5989b5f904ef45cedf40538bfdcbffa6bb01fb2", size = 30878, upload-time = "2026-02-11T18:50:31.008Z" }, ] [[package]] @@ -1065,7 +1065,7 @@ wheels = [ [[package]] name = "filelock" -version = "3.20.3" +version = "3.21.2" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -1082,9 +1082,9 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", "python_full_version == '3.10.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } +sdist = { url = "https://files.pythonhosted.org/packages/73/71/74364ff065ca78914d8bd90b312fe78ddc5e11372d38bc9cb7104f887ce1/filelock-3.21.2.tar.gz", hash = "sha256:cfd218cfccf8b947fce7837da312ec3359d10ef2a47c8602edd59e0bacffb708", size = 31486, upload-time = "2026-02-13T01:27:15.223Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, + { url = "https://files.pythonhosted.org/packages/98/73/3a18f1e1276810e81477c431009b55eeccebbd7301d28a350b77aacf3c33/filelock-3.21.2-py3-none-any.whl", hash = "sha256:d6cd4dbef3e1bb63bc16500fc5aa100f16e405bbff3fb4231711851be50c1560", size = 21479, upload-time = "2026-02-13T01:27:13.611Z" }, ] [[package]] @@ -2055,85 +2055,99 @@ wheels = [ [[package]] name = "librt" -version = "0.7.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/24/5f3646ff414285e0f7708fa4e946b9bf538345a41d1c375c439467721a5e/librt-0.7.8.tar.gz", hash = "sha256:1a4ede613941d9c3470b0368be851df6bb78ab218635512d0370b27a277a0862", size = 148323, upload-time = "2026-01-14T12:56:16.876Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/13/57b06758a13550c5f09563893b004f98e9537ee6ec67b7df85c3571c8832/librt-0.7.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b45306a1fc5f53c9330fbee134d8b3227fe5da2ab09813b892790400aa49352d", size = 56521, upload-time = "2026-01-14T12:54:40.066Z" }, - { url = "https://files.pythonhosted.org/packages/c2/24/bbea34d1452a10612fb45ac8356f95351ba40c2517e429602160a49d1fd0/librt-0.7.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:864c4b7083eeee250ed55135d2127b260d7eb4b5e953a9e5df09c852e327961b", size = 58456, upload-time = "2026-01-14T12:54:41.471Z" }, - { url = "https://files.pythonhosted.org/packages/04/72/a168808f92253ec3a810beb1eceebc465701197dbc7e865a1c9ceb3c22c7/librt-0.7.8-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6938cc2de153bc927ed8d71c7d2f2ae01b4e96359126c602721340eb7ce1a92d", size = 164392, upload-time = "2026-01-14T12:54:42.843Z" }, - { url = "https://files.pythonhosted.org/packages/14/5c/4c0d406f1b02735c2e7af8ff1ff03a6577b1369b91aa934a9fa2cc42c7ce/librt-0.7.8-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66daa6ac5de4288a5bbfbe55b4caa7bf0cd26b3269c7a476ffe8ce45f837f87d", size = 172959, upload-time = "2026-01-14T12:54:44.602Z" }, - { url = "https://files.pythonhosted.org/packages/82/5f/3e85351c523f73ad8d938989e9a58c7f59fb9c17f761b9981b43f0025ce7/librt-0.7.8-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4864045f49dc9c974dadb942ac56a74cd0479a2aafa51ce272c490a82322ea3c", size = 186717, upload-time = "2026-01-14T12:54:45.986Z" }, - { url = "https://files.pythonhosted.org/packages/08/f8/18bfe092e402d00fe00d33aa1e01dda1bd583ca100b393b4373847eade6d/librt-0.7.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a36515b1328dc5b3ffce79fe204985ca8572525452eacabee2166f44bb387b2c", size = 184585, upload-time = "2026-01-14T12:54:47.139Z" }, - { url = "https://files.pythonhosted.org/packages/4e/fc/f43972ff56fd790a9fa55028a52ccea1875100edbb856b705bd393b601e3/librt-0.7.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b7e7f140c5169798f90b80d6e607ed2ba5059784968a004107c88ad61fb3641d", size = 180497, upload-time = "2026-01-14T12:54:48.946Z" }, - { url = "https://files.pythonhosted.org/packages/e1/3a/25e36030315a410d3ad0b7d0f19f5f188e88d1613d7d3fd8150523ea1093/librt-0.7.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ff71447cb778a4f772ddc4ce360e6ba9c95527ed84a52096bd1bbf9fee2ec7c0", size = 200052, upload-time = "2026-01-14T12:54:50.382Z" }, - { url = "https://files.pythonhosted.org/packages/fc/b8/f3a5a1931ae2a6ad92bf6893b9ef44325b88641d58723529e2c2935e8abe/librt-0.7.8-cp310-cp310-win32.whl", hash = "sha256:047164e5f68b7a8ebdf9fae91a3c2161d3192418aadd61ddd3a86a56cbe3dc85", size = 43477, upload-time = "2026-01-14T12:54:51.815Z" }, - { url = "https://files.pythonhosted.org/packages/fe/91/c4202779366bc19f871b4ad25db10fcfa1e313c7893feb942f32668e8597/librt-0.7.8-cp310-cp310-win_amd64.whl", hash = "sha256:d6f254d096d84156a46a84861183c183d30734e52383602443292644d895047c", size = 49806, upload-time = "2026-01-14T12:54:53.149Z" }, - { url = "https://files.pythonhosted.org/packages/1b/a3/87ea9c1049f2c781177496ebee29430e4631f439b8553a4969c88747d5d8/librt-0.7.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ff3e9c11aa260c31493d4b3197d1e28dd07768594a4f92bec4506849d736248f", size = 56507, upload-time = "2026-01-14T12:54:54.156Z" }, - { url = "https://files.pythonhosted.org/packages/5e/4a/23bcef149f37f771ad30203d561fcfd45b02bc54947b91f7a9ac34815747/librt-0.7.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddb52499d0b3ed4aa88746aaf6f36a08314677d5c346234c3987ddc506404eac", size = 58455, upload-time = "2026-01-14T12:54:55.978Z" }, - { url = "https://files.pythonhosted.org/packages/22/6e/46eb9b85c1b9761e0f42b6e6311e1cc544843ac897457062b9d5d0b21df4/librt-0.7.8-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e9c0afebbe6ce177ae8edba0c7c4d626f2a0fc12c33bb993d163817c41a7a05c", size = 164956, upload-time = "2026-01-14T12:54:57.311Z" }, - { url = "https://files.pythonhosted.org/packages/7a/3f/aa7c7f6829fb83989feb7ba9aa11c662b34b4bd4bd5b262f2876ba3db58d/librt-0.7.8-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:631599598e2c76ded400c0a8722dec09217c89ff64dc54b060f598ed68e7d2a8", size = 174364, upload-time = "2026-01-14T12:54:59.089Z" }, - { url = "https://files.pythonhosted.org/packages/3f/2d/d57d154b40b11f2cb851c4df0d4c4456bacd9b1ccc4ecb593ddec56c1a8b/librt-0.7.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c1ba843ae20db09b9d5c80475376168feb2640ce91cd9906414f23cc267a1ff", size = 188034, upload-time = "2026-01-14T12:55:00.141Z" }, - { url = "https://files.pythonhosted.org/packages/59/f9/36c4dad00925c16cd69d744b87f7001792691857d3b79187e7a673e812fb/librt-0.7.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b5b007bb22ea4b255d3ee39dfd06d12534de2fcc3438567d9f48cdaf67ae1ae3", size = 186295, upload-time = "2026-01-14T12:55:01.303Z" }, - { url = "https://files.pythonhosted.org/packages/23/9b/8a9889d3df5efb67695a67785028ccd58e661c3018237b73ad081691d0cb/librt-0.7.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dbd79caaf77a3f590cbe32dc2447f718772d6eea59656a7dcb9311161b10fa75", size = 181470, upload-time = "2026-01-14T12:55:02.492Z" }, - { url = "https://files.pythonhosted.org/packages/43/64/54d6ef11afca01fef8af78c230726a9394759f2addfbf7afc5e3cc032a45/librt-0.7.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:87808a8d1e0bd62a01cafc41f0fd6818b5a5d0ca0d8a55326a81643cdda8f873", size = 201713, upload-time = "2026-01-14T12:55:03.919Z" }, - { url = "https://files.pythonhosted.org/packages/2d/29/73e7ed2991330b28919387656f54109139b49e19cd72902f466bd44415fd/librt-0.7.8-cp311-cp311-win32.whl", hash = "sha256:31724b93baa91512bd0a376e7cf0b59d8b631ee17923b1218a65456fa9bda2e7", size = 43803, upload-time = "2026-01-14T12:55:04.996Z" }, - { url = "https://files.pythonhosted.org/packages/3f/de/66766ff48ed02b4d78deea30392ae200bcbd99ae61ba2418b49fd50a4831/librt-0.7.8-cp311-cp311-win_amd64.whl", hash = "sha256:978e8b5f13e52cf23a9e80f3286d7546baa70bc4ef35b51d97a709d0b28e537c", size = 50080, upload-time = "2026-01-14T12:55:06.489Z" }, - { url = "https://files.pythonhosted.org/packages/6f/e3/33450438ff3a8c581d4ed7f798a70b07c3206d298cf0b87d3806e72e3ed8/librt-0.7.8-cp311-cp311-win_arm64.whl", hash = "sha256:20e3946863d872f7cabf7f77c6c9d370b8b3d74333d3a32471c50d3a86c0a232", size = 43383, upload-time = "2026-01-14T12:55:07.49Z" }, - { url = "https://files.pythonhosted.org/packages/56/04/79d8fcb43cae376c7adbab7b2b9f65e48432c9eced62ac96703bcc16e09b/librt-0.7.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9b6943885b2d49c48d0cff23b16be830ba46b0152d98f62de49e735c6e655a63", size = 57472, upload-time = "2026-01-14T12:55:08.528Z" }, - { url = "https://files.pythonhosted.org/packages/b4/ba/60b96e93043d3d659da91752689023a73981336446ae82078cddf706249e/librt-0.7.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46ef1f4b9b6cc364b11eea0ecc0897314447a66029ee1e55859acb3dd8757c93", size = 58986, upload-time = "2026-01-14T12:55:09.466Z" }, - { url = "https://files.pythonhosted.org/packages/7c/26/5215e4cdcc26e7be7eee21955a7e13cbf1f6d7d7311461a6014544596fac/librt-0.7.8-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:907ad09cfab21e3c86e8f1f87858f7049d1097f77196959c033612f532b4e592", size = 168422, upload-time = "2026-01-14T12:55:10.499Z" }, - { url = "https://files.pythonhosted.org/packages/0f/84/e8d1bc86fa0159bfc24f3d798d92cafd3897e84c7fea7fe61b3220915d76/librt-0.7.8-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2991b6c3775383752b3ca0204842743256f3ad3deeb1d0adc227d56b78a9a850", size = 177478, upload-time = "2026-01-14T12:55:11.577Z" }, - { url = "https://files.pythonhosted.org/packages/57/11/d0268c4b94717a18aa91df1100e767b010f87b7ae444dafaa5a2d80f33a6/librt-0.7.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03679b9856932b8c8f674e87aa3c55ea11c9274301f76ae8dc4d281bda55cf62", size = 192439, upload-time = "2026-01-14T12:55:12.7Z" }, - { url = "https://files.pythonhosted.org/packages/8d/56/1e8e833b95fe684f80f8894ae4d8b7d36acc9203e60478fcae599120a975/librt-0.7.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3968762fec1b2ad34ce57458b6de25dbb4142713e9ca6279a0d352fa4e9f452b", size = 191483, upload-time = "2026-01-14T12:55:13.838Z" }, - { url = "https://files.pythonhosted.org/packages/17/48/f11cf28a2cb6c31f282009e2208312aa84a5ee2732859f7856ee306176d5/librt-0.7.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:bb7a7807523a31f03061288cc4ffc065d684c39db7644c676b47d89553c0d714", size = 185376, upload-time = "2026-01-14T12:55:15.017Z" }, - { url = "https://files.pythonhosted.org/packages/b8/6a/d7c116c6da561b9155b184354a60a3d5cdbf08fc7f3678d09c95679d13d9/librt-0.7.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad64a14b1e56e702e19b24aae108f18ad1bf7777f3af5fcd39f87d0c5a814449", size = 206234, upload-time = "2026-01-14T12:55:16.571Z" }, - { url = "https://files.pythonhosted.org/packages/61/de/1975200bb0285fc921c5981d9978ce6ce11ae6d797df815add94a5a848a3/librt-0.7.8-cp312-cp312-win32.whl", hash = "sha256:0241a6ed65e6666236ea78203a73d800dbed896cf12ae25d026d75dc1fcd1dac", size = 44057, upload-time = "2026-01-14T12:55:18.077Z" }, - { url = "https://files.pythonhosted.org/packages/8e/cd/724f2d0b3461426730d4877754b65d39f06a41ac9d0a92d5c6840f72b9ae/librt-0.7.8-cp312-cp312-win_amd64.whl", hash = "sha256:6db5faf064b5bab9675c32a873436b31e01d66ca6984c6f7f92621656033a708", size = 50293, upload-time = "2026-01-14T12:55:19.179Z" }, - { url = "https://files.pythonhosted.org/packages/bd/cf/7e899acd9ee5727ad8160fdcc9994954e79fab371c66535c60e13b968ffc/librt-0.7.8-cp312-cp312-win_arm64.whl", hash = "sha256:57175aa93f804d2c08d2edb7213e09276bd49097611aefc37e3fa38d1fb99ad0", size = 43574, upload-time = "2026-01-14T12:55:20.185Z" }, - { url = "https://files.pythonhosted.org/packages/a1/fe/b1f9de2829cf7fc7649c1dcd202cfd873837c5cc2fc9e526b0e7f716c3d2/librt-0.7.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4c3995abbbb60b3c129490fa985dfe6cac11d88fc3c36eeb4fb1449efbbb04fc", size = 57500, upload-time = "2026-01-14T12:55:21.219Z" }, - { url = "https://files.pythonhosted.org/packages/eb/d4/4a60fbe2e53b825f5d9a77325071d61cd8af8506255067bf0c8527530745/librt-0.7.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:44e0c2cbc9bebd074cf2cdbe472ca185e824be4e74b1c63a8e934cea674bebf2", size = 59019, upload-time = "2026-01-14T12:55:22.256Z" }, - { url = "https://files.pythonhosted.org/packages/6a/37/61ff80341ba5159afa524445f2d984c30e2821f31f7c73cf166dcafa5564/librt-0.7.8-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d2f1e492cae964b3463a03dc77a7fe8742f7855d7258c7643f0ee32b6651dd3", size = 169015, upload-time = "2026-01-14T12:55:23.24Z" }, - { url = "https://files.pythonhosted.org/packages/1c/86/13d4f2d6a93f181ebf2fc953868826653ede494559da8268023fe567fca3/librt-0.7.8-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:451e7ffcef8f785831fdb791bd69211f47e95dc4c6ddff68e589058806f044c6", size = 178161, upload-time = "2026-01-14T12:55:24.826Z" }, - { url = "https://files.pythonhosted.org/packages/88/26/e24ef01305954fc4d771f1f09f3dd682f9eb610e1bec188ffb719374d26e/librt-0.7.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3469e1af9f1380e093ae06bedcbdd11e407ac0b303a56bbe9afb1d6824d4982d", size = 193015, upload-time = "2026-01-14T12:55:26.04Z" }, - { url = "https://files.pythonhosted.org/packages/88/a0/92b6bd060e720d7a31ed474d046a69bd55334ec05e9c446d228c4b806ae3/librt-0.7.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f11b300027ce19a34f6d24ebb0a25fd0e24a9d53353225a5c1e6cadbf2916b2e", size = 192038, upload-time = "2026-01-14T12:55:27.208Z" }, - { url = "https://files.pythonhosted.org/packages/06/bb/6f4c650253704279c3a214dad188101d1b5ea23be0606628bc6739456624/librt-0.7.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4adc73614f0d3c97874f02f2c7fd2a27854e7e24ad532ea6b965459c5b757eca", size = 186006, upload-time = "2026-01-14T12:55:28.594Z" }, - { url = "https://files.pythonhosted.org/packages/dc/00/1c409618248d43240cadf45f3efb866837fa77e9a12a71481912135eb481/librt-0.7.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:60c299e555f87e4c01b2eca085dfccda1dde87f5a604bb45c2906b8305819a93", size = 206888, upload-time = "2026-01-14T12:55:30.214Z" }, - { url = "https://files.pythonhosted.org/packages/d9/83/b2cfe8e76ff5c1c77f8a53da3d5de62d04b5ebf7cf913e37f8bca43b5d07/librt-0.7.8-cp313-cp313-win32.whl", hash = "sha256:b09c52ed43a461994716082ee7d87618096851319bf695d57ec123f2ab708951", size = 44126, upload-time = "2026-01-14T12:55:31.44Z" }, - { url = "https://files.pythonhosted.org/packages/a9/0b/c59d45de56a51bd2d3a401fc63449c0ac163e4ef7f523ea8b0c0dee86ec5/librt-0.7.8-cp313-cp313-win_amd64.whl", hash = "sha256:f8f4a901a3fa28969d6e4519deceab56c55a09d691ea7b12ca830e2fa3461e34", size = 50262, upload-time = "2026-01-14T12:55:33.01Z" }, - { url = "https://files.pythonhosted.org/packages/fc/b9/973455cec0a1ec592395250c474164c4a58ebf3e0651ee920fef1a2623f1/librt-0.7.8-cp313-cp313-win_arm64.whl", hash = "sha256:43d4e71b50763fcdcf64725ac680d8cfa1706c928b844794a7aa0fa9ac8e5f09", size = 43600, upload-time = "2026-01-14T12:55:34.054Z" }, - { url = "https://files.pythonhosted.org/packages/1a/73/fa8814c6ce2d49c3827829cadaa1589b0bf4391660bd4510899393a23ebc/librt-0.7.8-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:be927c3c94c74b05128089a955fba86501c3b544d1d300282cc1b4bd370cb418", size = 57049, upload-time = "2026-01-14T12:55:35.056Z" }, - { url = "https://files.pythonhosted.org/packages/53/fe/f6c70956da23ea235fd2e3cc16f4f0b4ebdfd72252b02d1164dd58b4e6c3/librt-0.7.8-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:7b0803e9008c62a7ef79058233db7ff6f37a9933b8f2573c05b07ddafa226611", size = 58689, upload-time = "2026-01-14T12:55:36.078Z" }, - { url = "https://files.pythonhosted.org/packages/1f/4d/7a2481444ac5fba63050d9abe823e6bc16896f575bfc9c1e5068d516cdce/librt-0.7.8-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:79feb4d00b2a4e0e05c9c56df707934f41fcb5fe53fd9efb7549068d0495b758", size = 166808, upload-time = "2026-01-14T12:55:37.595Z" }, - { url = "https://files.pythonhosted.org/packages/ac/3c/10901d9e18639f8953f57c8986796cfbf4c1c514844a41c9197cf87cb707/librt-0.7.8-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9122094e3f24aa759c38f46bd8863433820654927370250f460ae75488b66ea", size = 175614, upload-time = "2026-01-14T12:55:38.756Z" }, - { url = "https://files.pythonhosted.org/packages/db/01/5cbdde0951a5090a80e5ba44e6357d375048123c572a23eecfb9326993a7/librt-0.7.8-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7e03bea66af33c95ce3addf87a9bf1fcad8d33e757bc479957ddbc0e4f7207ac", size = 189955, upload-time = "2026-01-14T12:55:39.939Z" }, - { url = "https://files.pythonhosted.org/packages/6a/b4/e80528d2f4b7eaf1d437fcbd6fc6ba4cbeb3e2a0cb9ed5a79f47c7318706/librt-0.7.8-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f1ade7f31675db00b514b98f9ab9a7698c7282dad4be7492589109471852d398", size = 189370, upload-time = "2026-01-14T12:55:41.057Z" }, - { url = "https://files.pythonhosted.org/packages/c1/ab/938368f8ce31a9787ecd4becb1e795954782e4312095daf8fd22420227c8/librt-0.7.8-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a14229ac62adcf1b90a15992f1ab9c69ae8b99ffb23cb64a90878a6e8a2f5b81", size = 183224, upload-time = "2026-01-14T12:55:42.328Z" }, - { url = "https://files.pythonhosted.org/packages/3c/10/559c310e7a6e4014ac44867d359ef8238465fb499e7eb31b6bfe3e3f86f5/librt-0.7.8-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5bcaaf624fd24e6a0cb14beac37677f90793a96864c67c064a91458611446e83", size = 203541, upload-time = "2026-01-14T12:55:43.501Z" }, - { url = "https://files.pythonhosted.org/packages/f8/db/a0db7acdb6290c215f343835c6efda5b491bb05c3ddc675af558f50fdba3/librt-0.7.8-cp314-cp314-win32.whl", hash = "sha256:7aa7d5457b6c542ecaed79cec4ad98534373c9757383973e638ccced0f11f46d", size = 40657, upload-time = "2026-01-14T12:55:44.668Z" }, - { url = "https://files.pythonhosted.org/packages/72/e0/4f9bdc2a98a798511e81edcd6b54fe82767a715e05d1921115ac70717f6f/librt-0.7.8-cp314-cp314-win_amd64.whl", hash = "sha256:3d1322800771bee4a91f3b4bd4e49abc7d35e65166821086e5afd1e6c0d9be44", size = 46835, upload-time = "2026-01-14T12:55:45.655Z" }, - { url = "https://files.pythonhosted.org/packages/f9/3d/59c6402e3dec2719655a41ad027a7371f8e2334aa794ed11533ad5f34969/librt-0.7.8-cp314-cp314-win_arm64.whl", hash = "sha256:5363427bc6a8c3b1719f8f3845ea53553d301382928a86e8fab7984426949bce", size = 39885, upload-time = "2026-01-14T12:55:47.138Z" }, - { url = "https://files.pythonhosted.org/packages/4e/9c/2481d80950b83085fb14ba3c595db56330d21bbc7d88a19f20165f3538db/librt-0.7.8-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ca916919793a77e4a98d4a1701e345d337ce53be4a16620f063191f7322ac80f", size = 59161, upload-time = "2026-01-14T12:55:48.45Z" }, - { url = "https://files.pythonhosted.org/packages/96/79/108df2cfc4e672336765d54e3ff887294c1cc36ea4335c73588875775527/librt-0.7.8-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:54feb7b4f2f6706bb82325e836a01be805770443e2400f706e824e91f6441dde", size = 61008, upload-time = "2026-01-14T12:55:49.527Z" }, - { url = "https://files.pythonhosted.org/packages/46/f2/30179898f9994a5637459d6e169b6abdc982012c0a4b2d4c26f50c06f911/librt-0.7.8-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39a4c76fee41007070f872b648cc2f711f9abf9a13d0c7162478043377b52c8e", size = 187199, upload-time = "2026-01-14T12:55:50.587Z" }, - { url = "https://files.pythonhosted.org/packages/b4/da/f7563db55cebdc884f518ba3791ad033becc25ff68eb70902b1747dc0d70/librt-0.7.8-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ac9c8a458245c7de80bc1b9765b177055efff5803f08e548dd4bb9ab9a8d789b", size = 198317, upload-time = "2026-01-14T12:55:51.991Z" }, - { url = "https://files.pythonhosted.org/packages/b3/6c/4289acf076ad371471fa86718c30ae353e690d3de6167f7db36f429272f1/librt-0.7.8-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b67aa7eff150f075fda09d11f6bfb26edffd300f6ab1666759547581e8f666", size = 210334, upload-time = "2026-01-14T12:55:53.682Z" }, - { url = "https://files.pythonhosted.org/packages/4a/7f/377521ac25b78ac0a5ff44127a0360ee6d5ddd3ce7327949876a30533daa/librt-0.7.8-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:535929b6eff670c593c34ff435d5440c3096f20fa72d63444608a5aef64dd581", size = 211031, upload-time = "2026-01-14T12:55:54.827Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b1/e1e96c3e20b23d00cf90f4aad48f0deb4cdfec2f0ed8380d0d85acf98bbf/librt-0.7.8-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:63937bd0f4d1cb56653dc7ae900d6c52c41f0015e25aaf9902481ee79943b33a", size = 204581, upload-time = "2026-01-14T12:55:56.811Z" }, - { url = "https://files.pythonhosted.org/packages/43/71/0f5d010e92ed9747e14bef35e91b6580533510f1e36a8a09eb79ee70b2f0/librt-0.7.8-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf243da9e42d914036fd362ac3fa77d80a41cadcd11ad789b1b5eec4daaf67ca", size = 224731, upload-time = "2026-01-14T12:55:58.175Z" }, - { url = "https://files.pythonhosted.org/packages/22/f0/07fb6ab5c39a4ca9af3e37554f9d42f25c464829254d72e4ebbd81da351c/librt-0.7.8-cp314-cp314t-win32.whl", hash = "sha256:171ca3a0a06c643bd0a2f62a8944e1902c94aa8e5da4db1ea9a8daf872685365", size = 41173, upload-time = "2026-01-14T12:55:59.315Z" }, - { url = "https://files.pythonhosted.org/packages/24/d4/7e4be20993dc6a782639625bd2f97f3c66125c7aa80c82426956811cfccf/librt-0.7.8-cp314-cp314t-win_amd64.whl", hash = "sha256:445b7304145e24c60288a2f172b5ce2ca35c0f81605f5299f3fa567e189d2e32", size = 47668, upload-time = "2026-01-14T12:56:00.261Z" }, - { url = "https://files.pythonhosted.org/packages/fc/85/69f92b2a7b3c0f88ffe107c86b952b397004b5b8ea5a81da3d9c04c04422/librt-0.7.8-cp314-cp314t-win_arm64.whl", hash = "sha256:8766ece9de08527deabcd7cb1b4f1a967a385d26e33e536d6d8913db6ef74f06", size = 40550, upload-time = "2026-01-14T12:56:01.542Z" }, - { url = "https://files.pythonhosted.org/packages/3b/9b/2668bb01f568bc89ace53736df950845f8adfcacdf6da087d5cef12110cb/librt-0.7.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c7e8f88f79308d86d8f39c491773cbb533d6cb7fa6476f35d711076ee04fceb6", size = 56680, upload-time = "2026-01-14T12:56:02.602Z" }, - { url = "https://files.pythonhosted.org/packages/b3/d4/dbb3edf2d0ec4ba08dcaf1865833d32737ad208962d4463c022cea6e9d3c/librt-0.7.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:389bd25a0db916e1d6bcb014f11aa9676cedaa485e9ec3752dfe19f196fd377b", size = 58612, upload-time = "2026-01-14T12:56:03.616Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c9/64b029de4ac9901fcd47832c650a0fd050555a452bd455ce8deddddfbb9f/librt-0.7.8-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73fd300f501a052f2ba52ede721232212f3b06503fa12665408ecfc9d8fd149c", size = 163654, upload-time = "2026-01-14T12:56:04.975Z" }, - { url = "https://files.pythonhosted.org/packages/81/5c/95e2abb1b48eb8f8c7fc2ae945321a6b82777947eb544cc785c3f37165b2/librt-0.7.8-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d772edc6a5f7835635c7562f6688e031f0b97e31d538412a852c49c9a6c92d5", size = 172477, upload-time = "2026-01-14T12:56:06.103Z" }, - { url = "https://files.pythonhosted.org/packages/7e/27/9bdf12e05b0eb089dd008d9c8aabc05748aad9d40458ade5e627c9538158/librt-0.7.8-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde8a130bd0f239e45503ab39fab239ace094d63ee1d6b67c25a63d741c0f71", size = 186220, upload-time = "2026-01-14T12:56:09.958Z" }, - { url = "https://files.pythonhosted.org/packages/53/6a/c3774f4cc95e68ed444a39f2c8bd383fd18673db7d6b98cfa709f6634b93/librt-0.7.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fdec6e2368ae4f796fc72fad7fd4bd1753715187e6d870932b0904609e7c878e", size = 183841, upload-time = "2026-01-14T12:56:11.109Z" }, - { url = "https://files.pythonhosted.org/packages/58/6b/48702c61cf83e9c04ad5cec8cad7e5e22a2cde23a13db8ef341598897ddd/librt-0.7.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:00105e7d541a8f2ee5be52caacea98a005e0478cfe78c8080fbb7b5d2b340c63", size = 179751, upload-time = "2026-01-14T12:56:12.278Z" }, - { url = "https://files.pythonhosted.org/packages/35/87/5f607fc73a131d4753f4db948833063c6aad18e18a4e6fbf64316c37ae65/librt-0.7.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c6f8947d3dfd7f91066c5b4385812c18be26c9d5a99ca56667547f2c39149d94", size = 199319, upload-time = "2026-01-14T12:56:13.425Z" }, - { url = "https://files.pythonhosted.org/packages/6e/cc/b7c5ac28ae0f0645a9681248bae4ede665bba15d6f761c291853c5c5b78e/librt-0.7.8-cp39-cp39-win32.whl", hash = "sha256:41d7bb1e07916aeb12ae4a44e3025db3691c4149ab788d0315781b4d29b86afb", size = 43434, upload-time = "2026-01-14T12:56:14.781Z" }, - { url = "https://files.pythonhosted.org/packages/e4/5d/dce0c92f786495adf2c1e6784d9c50a52fb7feb1cfb17af97a08281a6e82/librt-0.7.8-cp39-cp39-win_amd64.whl", hash = "sha256:e90a8e237753c83b8e484d478d9a996dc5e39fd5bd4c6ce32563bc8123f132be", size = 49801, upload-time = "2026-01-14T12:56:15.827Z" }, +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8a/3f/4ca7dd7819bf8ff303aca39c3c60e5320e46e766ab7f7dd627d3b9c11bdf/librt-0.8.0.tar.gz", hash = "sha256:cb74cdcbc0103fc988e04e5c58b0b31e8e5dd2babb9182b6f9490488eb36324b", size = 177306, upload-time = "2026-02-12T14:53:54.743Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/e9/018cfd60629e0404e6917943789800aa2231defbea540a17b90cc4547b97/librt-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db63cf3586a24241e89ca1ce0b56baaec9d371a328bd186c529b27c914c9a1ef", size = 65690, upload-time = "2026-02-12T14:51:57.761Z" }, + { url = "https://files.pythonhosted.org/packages/b5/80/8d39980860e4d1c9497ee50e5cd7c4766d8cfd90d105578eae418e8ffcbc/librt-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba9d9e60651615bc614be5e21a82cdb7b1769a029369cf4b4d861e4f19686fb6", size = 68373, upload-time = "2026-02-12T14:51:59.013Z" }, + { url = "https://files.pythonhosted.org/packages/2d/76/6e6f7a443af63977e421bd542551fec4072d9eaba02e671b05b238fe73bc/librt-0.8.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb4b3ad543084ed79f186741470b251b9d269cd8b03556f15a8d1a99a64b7de5", size = 197091, upload-time = "2026-02-12T14:52:00.642Z" }, + { url = "https://files.pythonhosted.org/packages/14/40/fa064181c231334c9f4cb69eb338132d39510c8928e84beba34b861d0a71/librt-0.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d2720335020219197380ccfa5c895f079ac364b4c429e96952cd6509934d8eb", size = 207350, upload-time = "2026-02-12T14:52:02.32Z" }, + { url = "https://files.pythonhosted.org/packages/50/49/e7f8438dd226305e3e5955d495114ad01448e6a6ffc0303289b4153b5fc5/librt-0.8.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9726305d3e53419d27fc8cdfcd3f9571f0ceae22fa6b5ea1b3662c2e538f833e", size = 219962, upload-time = "2026-02-12T14:52:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/1f/2c/74086fc5d52e77107a3cc80a9a3209be6ad1c9b6bc99969d8d9bbf9fdfe4/librt-0.8.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cc3d107f603b5ee7a79b6aa6f166551b99b32fb4a5303c4dfcb4222fc6a0335e", size = 212939, upload-time = "2026-02-12T14:52:05.537Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ae/d6917c0ebec9bc2e0293903d6a5ccc7cdb64c228e529e96520b277318f25/librt-0.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:41064a0c07b4cc7a81355ccc305cb097d6027002209ffca51306e65ee8293630", size = 221393, upload-time = "2026-02-12T14:52:07.164Z" }, + { url = "https://files.pythonhosted.org/packages/04/97/15df8270f524ce09ad5c19cbbe0e8f95067582507149a6c90594e7795370/librt-0.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c6e4c10761ddbc0d67d2f6e2753daf99908db85d8b901729bf2bf5eaa60e0567", size = 216721, upload-time = "2026-02-12T14:52:08.857Z" }, + { url = "https://files.pythonhosted.org/packages/c4/52/17cbcf9b7a1bae5016d9d3561bc7169b32c3bd216c47d934d3f270602c0c/librt-0.8.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:ba581acad5ac8f33e2ff1746e8a57e001b47c6721873121bf8bbcf7ba8bd3aa4", size = 214790, upload-time = "2026-02-12T14:52:10.033Z" }, + { url = "https://files.pythonhosted.org/packages/2a/2d/010a236e8dc4d717dd545c46fd036dcced2c7ede71ef85cf55325809ff92/librt-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bdab762e2c0b48bab76f1a08acb3f4c77afd2123bedac59446aeaaeed3d086cf", size = 237384, upload-time = "2026-02-12T14:52:11.244Z" }, + { url = "https://files.pythonhosted.org/packages/38/14/f1c0eff3df8760dee761029efb72991c554d9f3282f1048e8c3d0eb60997/librt-0.8.0-cp310-cp310-win32.whl", hash = "sha256:6a3146c63220d814c4a2c7d6a1eacc8d5c14aed0ff85115c1dfea868080cd18f", size = 54289, upload-time = "2026-02-12T14:52:12.798Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0b/2684d473e64890882729f91866ed97ccc0a751a0afc3b4bf1a7b57094dbb/librt-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:bbebd2bba5c6ae02907df49150e55870fdd7440d727b6192c46b6f754723dde9", size = 61347, upload-time = "2026-02-12T14:52:13.793Z" }, + { url = "https://files.pythonhosted.org/packages/51/e9/42af181c89b65abfd557c1b017cba5b82098eef7bf26d1649d82ce93ccc7/librt-0.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ce33a9778e294507f3a0e3468eccb6a698b5166df7db85661543eca1cfc5369", size = 65314, upload-time = "2026-02-12T14:52:14.778Z" }, + { url = "https://files.pythonhosted.org/packages/9d/4a/15a847fca119dc0334a4b8012b1e15fdc5fc19d505b71e227eaf1bcdba09/librt-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8070aa3368559de81061ef752770d03ca1f5fc9467d4d512d405bd0483bfffe6", size = 68015, upload-time = "2026-02-12T14:52:15.797Z" }, + { url = "https://files.pythonhosted.org/packages/e1/87/ffc8dbd6ab68dd91b736c88529411a6729649d2b74b887f91f3aaff8d992/librt-0.8.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:20f73d4fecba969efc15cdefd030e382502d56bb6f1fc66b580cce582836c9fa", size = 194508, upload-time = "2026-02-12T14:52:16.835Z" }, + { url = "https://files.pythonhosted.org/packages/89/92/a7355cea28d6c48ff6ff5083ac4a2a866fb9b07b786aa70d1f1116680cd5/librt-0.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a512c88900bdb1d448882f5623a0b1ad27ba81a9bd75dacfe17080b72272ca1f", size = 205630, upload-time = "2026-02-12T14:52:18.58Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5e/54509038d7ac527828db95b8ba1c8f5d2649bc32fd8f39b1718ec9957dce/librt-0.8.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:015e2dde6e096d27c10238bf9f6492ba6c65822dfb69d2bf74c41a8e88b7ddef", size = 218289, upload-time = "2026-02-12T14:52:20.134Z" }, + { url = "https://files.pythonhosted.org/packages/6d/17/0ee0d13685cefee6d6f2d47bb643ddad3c62387e2882139794e6a5f1288a/librt-0.8.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1c25a131013eadd3c600686a0c0333eb2896483cbc7f65baa6a7ee761017aef9", size = 211508, upload-time = "2026-02-12T14:52:21.413Z" }, + { url = "https://files.pythonhosted.org/packages/4b/a8/1714ef6e9325582e3727de3be27e4c1b2f428ea411d09f1396374180f130/librt-0.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:21b14464bee0b604d80a638cf1ee3148d84ca4cc163dcdcecb46060c1b3605e4", size = 219129, upload-time = "2026-02-12T14:52:22.61Z" }, + { url = "https://files.pythonhosted.org/packages/89/d3/2d9fe353edff91cdc0ece179348054a6fa61f3de992c44b9477cb973509b/librt-0.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:05a3dd3f116747f7e1a2b475ccdc6fb637fd4987126d109e03013a79d40bf9e6", size = 213126, upload-time = "2026-02-12T14:52:23.819Z" }, + { url = "https://files.pythonhosted.org/packages/ad/8e/9f5c60444880f6ad50e3ff7475e5529e787797e7f3ad5432241633733b92/librt-0.8.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:fa37f99bff354ff191c6bcdffbc9d7cdd4fc37faccfc9be0ef3a4fd5613977da", size = 212279, upload-time = "2026-02-12T14:52:25.034Z" }, + { url = "https://files.pythonhosted.org/packages/fe/eb/d4a2cfa647da3022ae977f50d7eda1d91f70d7d1883cf958a4b6ef689eab/librt-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1566dbb9d1eb0987264c9b9460d212e809ba908d2f4a3999383a84d765f2f3f1", size = 234654, upload-time = "2026-02-12T14:52:26.204Z" }, + { url = "https://files.pythonhosted.org/packages/6a/31/26b978861c7983b036a3aea08bdbb2ec32bbaab1ad1d57c5e022be59afc1/librt-0.8.0-cp311-cp311-win32.whl", hash = "sha256:70defb797c4d5402166787a6b3c66dfb3fa7f93d118c0509ffafa35a392f4258", size = 54603, upload-time = "2026-02-12T14:52:27.342Z" }, + { url = "https://files.pythonhosted.org/packages/d0/78/f194ed7c48dacf875677e749c5d0d1d69a9daa7c994314a39466237fb1be/librt-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:db953b675079884ffda33d1dca7189fb961b6d372153750beb81880384300817", size = 61730, upload-time = "2026-02-12T14:52:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/97/ee/ad71095478d02137b6f49469dc808c595cfe89b50985f6b39c5345f0faab/librt-0.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:75d1a8cab20b2043f03f7aab730551e9e440adc034d776f15f6f8d582b0a5ad4", size = 52274, upload-time = "2026-02-12T14:52:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fb/53/f3bc0c4921adb0d4a5afa0656f2c0fbe20e18e3e0295e12985b9a5dc3f55/librt-0.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:17269dd2745dbe8e42475acb28e419ad92dfa38214224b1b01020b8cac70b645", size = 66511, upload-time = "2026-02-12T14:52:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/89/4b/4c96357432007c25a1b5e363045373a6c39481e49f6ba05234bb59a839c1/librt-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f4617cef654fca552f00ce5ffdf4f4b68770f18950e4246ce94629b789b92467", size = 68628, upload-time = "2026-02-12T14:52:31.491Z" }, + { url = "https://files.pythonhosted.org/packages/47/16/52d75374d1012e8fc709216b5eaa25f471370e2a2331b8be00f18670a6c7/librt-0.8.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5cb11061a736a9db45e3c1293cfcb1e3caf205912dfa085734ba750f2197ff9a", size = 198941, upload-time = "2026-02-12T14:52:32.489Z" }, + { url = "https://files.pythonhosted.org/packages/fc/11/d5dd89e5a2228567b1228d8602d896736247424484db086eea6b8010bcba/librt-0.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4bb00bd71b448f16749909b08a0ff16f58b079e2261c2e1000f2bbb2a4f0a45", size = 210009, upload-time = "2026-02-12T14:52:33.634Z" }, + { url = "https://files.pythonhosted.org/packages/49/d8/fc1a92a77c3020ee08ce2dc48aed4b42ab7c30fb43ce488d388673b0f164/librt-0.8.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95a719a049f0eefaf1952673223cf00d442952273cbd20cf2ed7ec423a0ef58d", size = 224461, upload-time = "2026-02-12T14:52:34.868Z" }, + { url = "https://files.pythonhosted.org/packages/7f/98/eb923e8b028cece924c246104aa800cf72e02d023a8ad4ca87135b05a2fe/librt-0.8.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bd32add59b58fba3439d48d6f36ac695830388e3da3e92e4fc26d2d02670d19c", size = 217538, upload-time = "2026-02-12T14:52:36.078Z" }, + { url = "https://files.pythonhosted.org/packages/fd/67/24e80ab170674a1d8ee9f9a83081dca4635519dbd0473b8321deecddb5be/librt-0.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4f764b2424cb04524ff7a486b9c391e93f93dc1bd8305b2136d25e582e99aa2f", size = 225110, upload-time = "2026-02-12T14:52:37.301Z" }, + { url = "https://files.pythonhosted.org/packages/d8/c7/6fbdcbd1a6e5243c7989c21d68ab967c153b391351174b4729e359d9977f/librt-0.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f04ca50e847abc486fa8f4107250566441e693779a5374ba211e96e238f298b9", size = 217758, upload-time = "2026-02-12T14:52:38.89Z" }, + { url = "https://files.pythonhosted.org/packages/4b/bd/4d6b36669db086e3d747434430073e14def032dd58ad97959bf7e2d06c67/librt-0.8.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9ab3a3475a55b89b87ffd7e6665838e8458e0b596c22e0177e0f961434ec474a", size = 218384, upload-time = "2026-02-12T14:52:40.637Z" }, + { url = "https://files.pythonhosted.org/packages/50/2d/afe966beb0a8f179b132f3e95c8dd90738a23e9ebdba10f89a3f192f9366/librt-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e36a8da17134ffc29373775d88c04832f9ecfab1880470661813e6c7991ef79", size = 241187, upload-time = "2026-02-12T14:52:43.55Z" }, + { url = "https://files.pythonhosted.org/packages/02/d0/6172ea4af2b538462785ab1a68e52d5c99cfb9866a7caf00fdf388299734/librt-0.8.0-cp312-cp312-win32.whl", hash = "sha256:4eb5e06ebcc668677ed6389164f52f13f71737fc8be471101fa8b4ce77baeb0c", size = 54914, upload-time = "2026-02-12T14:52:44.676Z" }, + { url = "https://files.pythonhosted.org/packages/d4/cb/ceb6ed6175612a4337ad49fb01ef594712b934b4bc88ce8a63554832eb44/librt-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:0a33335eb59921e77c9acc05d0e654e4e32e45b014a4d61517897c11591094f8", size = 62020, upload-time = "2026-02-12T14:52:45.676Z" }, + { url = "https://files.pythonhosted.org/packages/f1/7e/61701acbc67da74ce06ddc7ba9483e81c70f44236b2d00f6a4bfee1aacbf/librt-0.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:24a01c13a2a9bdad20997a4443ebe6e329df063d1978bbe2ebbf637878a46d1e", size = 52443, upload-time = "2026-02-12T14:52:47.218Z" }, + { url = "https://files.pythonhosted.org/packages/6d/32/3edb0bcb4113a9c8bdcd1750663a54565d255027657a5df9d90f13ee07fa/librt-0.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7f820210e21e3a8bf8fde2ae3c3d10106d4de9ead28cbfdf6d0f0f41f5b12fa1", size = 66522, upload-time = "2026-02-12T14:52:48.219Z" }, + { url = "https://files.pythonhosted.org/packages/30/ab/e8c3d05e281f5d405ebdcc5bc8ab36df23e1a4b40ac9da8c3eb9928b72b9/librt-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4831c44b8919e75ca0dfb52052897c1ef59fdae19d3589893fbd068f1e41afbf", size = 68658, upload-time = "2026-02-12T14:52:50.351Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d3/74a206c47b7748bbc8c43942de3ed67de4c231156e148b4f9250869593df/librt-0.8.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:88c6e75540f1f10f5e0fc5e87b4b6c290f0e90d1db8c6734f670840494764af8", size = 199287, upload-time = "2026-02-12T14:52:51.938Z" }, + { url = "https://files.pythonhosted.org/packages/fa/29/ef98a9131cf12cb95771d24e4c411fda96c89dc78b09c2de4704877ebee4/librt-0.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9646178cd794704d722306c2c920c221abbf080fede3ba539d5afdec16c46dad", size = 210293, upload-time = "2026-02-12T14:52:53.128Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3e/89b4968cb08c53d4c2d8b02517081dfe4b9e07a959ec143d333d76899f6c/librt-0.8.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e1af31a710e17891d9adf0dbd9a5fcd94901a3922a96499abdbf7ce658f4e01", size = 224801, upload-time = "2026-02-12T14:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/6d/28/f38526d501f9513f8b48d78e6be4a241e15dd4b000056dc8b3f06ee9ce5d/librt-0.8.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:507e94f4bec00b2f590fbe55f48cd518a208e2474a3b90a60aa8f29136ddbada", size = 218090, upload-time = "2026-02-12T14:52:55.758Z" }, + { url = "https://files.pythonhosted.org/packages/02/ec/64e29887c5009c24dc9c397116c680caffc50286f62bd99c39e3875a2854/librt-0.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f1178e0de0c271231a660fbef9be6acdfa1d596803464706862bef6644cc1cae", size = 225483, upload-time = "2026-02-12T14:52:57.375Z" }, + { url = "https://files.pythonhosted.org/packages/ee/16/7850bdbc9f1a32d3feff2708d90c56fc0490b13f1012e438532781aa598c/librt-0.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:71fc517efc14f75c2f74b1f0a5d5eb4a8e06aa135c34d18eaf3522f4a53cd62d", size = 218226, upload-time = "2026-02-12T14:52:58.534Z" }, + { url = "https://files.pythonhosted.org/packages/1c/4a/166bffc992d65ddefa7c47052010a87c059b44a458ebaf8f5eba384b0533/librt-0.8.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:0583aef7e9a720dd40f26a2ad5a1bf2ccbb90059dac2b32ac516df232c701db3", size = 218755, upload-time = "2026-02-12T14:52:59.701Z" }, + { url = "https://files.pythonhosted.org/packages/da/5d/9aeee038bcc72a9cfaaee934463fe9280a73c5440d36bd3175069d2cb97b/librt-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5d0f76fc73480d42285c609c0ea74d79856c160fa828ff9aceab574ea4ecfd7b", size = 241617, upload-time = "2026-02-12T14:53:00.966Z" }, + { url = "https://files.pythonhosted.org/packages/64/ff/2bec6b0296b9d0402aa6ec8540aa19ebcb875d669c37800cb43d10d9c3a3/librt-0.8.0-cp313-cp313-win32.whl", hash = "sha256:e79dbc8f57de360f0ed987dc7de7be814b4803ef0e8fc6d3ff86e16798c99935", size = 54966, upload-time = "2026-02-12T14:53:02.042Z" }, + { url = "https://files.pythonhosted.org/packages/08/8d/bf44633b0182996b2c7ea69a03a5c529683fa1f6b8e45c03fe874ff40d56/librt-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:25b3e667cbfc9000c4740b282df599ebd91dbdcc1aa6785050e4c1d6be5329ab", size = 62000, upload-time = "2026-02-12T14:53:03.822Z" }, + { url = "https://files.pythonhosted.org/packages/5c/fd/c6472b8e0eac0925001f75e366cf5500bcb975357a65ef1f6b5749389d3a/librt-0.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:e9a3a38eb4134ad33122a6d575e6324831f930a771d951a15ce232e0237412c2", size = 52496, upload-time = "2026-02-12T14:53:04.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/13/79ebfe30cd273d7c0ce37a5f14dc489c5fb8b722a008983db2cfd57270bb/librt-0.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:421765e8c6b18e64d21c8ead315708a56fc24f44075059702e421d164575fdda", size = 66078, upload-time = "2026-02-12T14:53:06.085Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8f/d11eca40b62a8d5e759239a80636386ef88adecb10d1a050b38cc0da9f9e/librt-0.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:48f84830a8f8ad7918afd743fd7c4eb558728bceab7b0e38fd5a5cf78206a556", size = 68309, upload-time = "2026-02-12T14:53:07.121Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b4/f12ee70a3596db40ff3c88ec9eaa4e323f3b92f77505b4d900746706ec6a/librt-0.8.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9f09d4884f882baa39a7e36bbf3eae124c4ca2a223efb91e567381d1c55c6b06", size = 196804, upload-time = "2026-02-12T14:53:08.164Z" }, + { url = "https://files.pythonhosted.org/packages/8b/7e/70dbbdc0271fd626abe1671ad117bcd61a9a88cdc6a10ccfbfc703db1873/librt-0.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:693697133c3b32aa9b27f040e3691be210e9ac4d905061859a9ed519b1d5a376", size = 206915, upload-time = "2026-02-12T14:53:09.333Z" }, + { url = "https://files.pythonhosted.org/packages/79/13/6b9e05a635d4327608d06b3c1702166e3b3e78315846373446cf90d7b0bf/librt-0.8.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5512aae4648152abaf4d48b59890503fcbe86e85abc12fb9b096fe948bdd816", size = 221200, upload-time = "2026-02-12T14:53:10.68Z" }, + { url = "https://files.pythonhosted.org/packages/35/6c/e19a3ac53e9414de43a73d7507d2d766cd22d8ca763d29a4e072d628db42/librt-0.8.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:995d24caa6bbb34bcdd4a41df98ac6d1af637cfa8975cb0790e47d6623e70e3e", size = 214640, upload-time = "2026-02-12T14:53:12.342Z" }, + { url = "https://files.pythonhosted.org/packages/30/f0/23a78464788619e8c70f090cfd099cce4973eed142c4dccb99fc322283fd/librt-0.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b9aef96d7593584e31ef6ac1eb9775355b0099fee7651fae3a15bc8657b67b52", size = 221980, upload-time = "2026-02-12T14:53:13.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/32/38e21420c5d7aa8a8bd2c7a7d5252ab174a5a8aaec8b5551968979b747bf/librt-0.8.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:4f6e975377fbc4c9567cb33ea9ab826031b6c7ec0515bfae66a4fb110d40d6da", size = 215146, upload-time = "2026-02-12T14:53:14.8Z" }, + { url = "https://files.pythonhosted.org/packages/bb/00/bd9ecf38b1824c25240b3ad982fb62c80f0a969e6679091ba2b3afb2b510/librt-0.8.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:daae5e955764be8fd70a93e9e5133c75297f8bce1e802e1d3683b98f77e1c5ab", size = 215203, upload-time = "2026-02-12T14:53:16.087Z" }, + { url = "https://files.pythonhosted.org/packages/b9/60/7559bcc5279d37810b98d4a52616febd7b8eef04391714fd6bdf629598b1/librt-0.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7bd68cebf3131bb920d5984f75fe302d758db33264e44b45ad139385662d7bc3", size = 237937, upload-time = "2026-02-12T14:53:17.236Z" }, + { url = "https://files.pythonhosted.org/packages/41/cc/be3e7da88f1abbe2642672af1dc00a0bccece11ca60241b1883f3018d8d5/librt-0.8.0-cp314-cp314-win32.whl", hash = "sha256:1e6811cac1dcb27ca4c74e0ca4a5917a8e06db0d8408d30daee3a41724bfde7a", size = 50685, upload-time = "2026-02-12T14:53:18.888Z" }, + { url = "https://files.pythonhosted.org/packages/38/27/e381d0df182a8f61ef1f6025d8b138b3318cc9d18ad4d5f47c3bf7492523/librt-0.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:178707cda89d910c3b28bf5aa5f69d3d4734e0f6ae102f753ad79edef83a83c7", size = 57872, upload-time = "2026-02-12T14:53:19.942Z" }, + { url = "https://files.pythonhosted.org/packages/c5/0c/ca9dfdf00554a44dea7d555001248269a4bab569e1590a91391feb863fa4/librt-0.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3e8b77b5f54d0937b26512774916041756c9eb3e66f1031971e626eea49d0bf4", size = 48056, upload-time = "2026-02-12T14:53:21.473Z" }, + { url = "https://files.pythonhosted.org/packages/f2/ed/6cc9c4ad24f90c8e782193c7b4a857408fd49540800613d1356c63567d7b/librt-0.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:789911e8fa40a2e82f41120c936b1965f3213c67f5a483fc5a41f5839a05dcbb", size = 68307, upload-time = "2026-02-12T14:53:22.498Z" }, + { url = "https://files.pythonhosted.org/packages/84/d8/0e94292c6b3e00b6eeea39dd44d5703d1ec29b6dafce7eea19dc8f1aedbd/librt-0.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2b37437e7e4ef5e15a297b36ba9e577f73e29564131d86dd75875705e97402b5", size = 70999, upload-time = "2026-02-12T14:53:23.603Z" }, + { url = "https://files.pythonhosted.org/packages/0e/f4/6be1afcbdeedbdbbf54a7c9d73ad43e1bf36897cebf3978308cd64922e02/librt-0.8.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:671a6152edf3b924d98a5ed5e6982ec9cb30894085482acadce0975f031d4c5c", size = 220782, upload-time = "2026-02-12T14:53:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f0/8d/f306e8caa93cfaf5c6c9e0d940908d75dc6af4fd856baa5535c922ee02b1/librt-0.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8992ca186a1678107b0af3d0c9303d8c7305981b9914989b9788319ed4d89546", size = 235420, upload-time = "2026-02-12T14:53:27.047Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f2/65d86bd462e9c351326564ca805e8457442149f348496e25ccd94583ffa2/librt-0.8.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:001e5330093d887b8b9165823eca6c5c4db183fe4edea4fdc0680bbac5f46944", size = 246452, upload-time = "2026-02-12T14:53:28.341Z" }, + { url = "https://files.pythonhosted.org/packages/03/94/39c88b503b4cb3fcbdeb3caa29672b6b44ebee8dcc8a54d49839ac280f3f/librt-0.8.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d920789eca7ef71df7f31fd547ec0d3002e04d77f30ba6881e08a630e7b2c30e", size = 238891, upload-time = "2026-02-12T14:53:29.625Z" }, + { url = "https://files.pythonhosted.org/packages/e3/c6/6c0d68190893d01b71b9569b07a1c811e280c0065a791249921c83dc0290/librt-0.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:82fb4602d1b3e303a58bfe6165992b5a78d823ec646445356c332cd5f5bbaa61", size = 250249, upload-time = "2026-02-12T14:53:30.93Z" }, + { url = "https://files.pythonhosted.org/packages/52/7a/f715ed9e039035d0ea637579c3c0155ab3709a7046bc408c0fb05d337121/librt-0.8.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:4d3e38797eb482485b486898f89415a6ab163bc291476bd95712e42cf4383c05", size = 240642, upload-time = "2026-02-12T14:53:32.174Z" }, + { url = "https://files.pythonhosted.org/packages/c2/3c/609000a333debf5992efe087edc6467c1fdbdddca5b610355569bbea9589/librt-0.8.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a905091a13e0884701226860836d0386b88c72ce5c2fdfba6618e14c72be9f25", size = 239621, upload-time = "2026-02-12T14:53:33.39Z" }, + { url = "https://files.pythonhosted.org/packages/b9/df/87b0673d5c395a8f34f38569c116c93142d4dc7e04af2510620772d6bd4f/librt-0.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:375eda7acfce1f15f5ed56cfc960669eefa1ec8732e3e9087c3c4c3f2066759c", size = 262986, upload-time = "2026-02-12T14:53:34.617Z" }, + { url = "https://files.pythonhosted.org/packages/09/7f/6bbbe9dcda649684773aaea78b87fff4d7e59550fbc2877faa83612087a3/librt-0.8.0-cp314-cp314t-win32.whl", hash = "sha256:2ccdd20d9a72c562ffb73098ac411de351b53a6fbb3390903b2d33078ef90447", size = 51328, upload-time = "2026-02-12T14:53:36.15Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f3/e1981ab6fa9b41be0396648b5850267888a752d025313a9e929c4856208e/librt-0.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:25e82d920d4d62ad741592fcf8d0f3bda0e3fc388a184cb7d2f566c681c5f7b9", size = 58719, upload-time = "2026-02-12T14:53:37.183Z" }, + { url = "https://files.pythonhosted.org/packages/94/d1/433b3c06e78f23486fe4fdd19bc134657eb30997d2054b0dbf52bbf3382e/librt-0.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:92249938ab744a5890580d3cb2b22042f0dce71cdaa7c1369823df62bedf7cbc", size = 48753, upload-time = "2026-02-12T14:53:38.539Z" }, + { url = "https://files.pythonhosted.org/packages/c5/dd/e0c82032d11fbc535ddbd4b955104fbe8e5202c0c42d982125a74e30f802/librt-0.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4b705f85311ee76acec5ee70806990a51f0deb519ea0c29c1d1652d79127604d", size = 65982, upload-time = "2026-02-12T14:53:39.597Z" }, + { url = "https://files.pythonhosted.org/packages/11/a2/55de2f768ce1f80029211bbbbedf7b22032145730b1aae92bb118a2bde40/librt-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7ce0a8cb67e702dcb06342b2aaaa3da9fb0ddc670417879adfa088b44cf7b3b6", size = 68638, upload-time = "2026-02-12T14:53:40.727Z" }, + { url = "https://files.pythonhosted.org/packages/52/fc/ae3b63d02b84f5afc06b822264d1b9d411f6286c58d8d9caa49d9cc0c68c/librt-0.8.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:aaadec87f45a3612b6818d1db5fbfe93630669b7ee5d6bdb6427ae08a1aa2141", size = 196099, upload-time = "2026-02-12T14:53:42.297Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3a/c9dc547bbaaef571d5dbd8249674c4baf7ecb689e2b25c8ff6227d85c751/librt-0.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56901f1eec031396f230db71c59a01d450715cbbef9856bf636726994331195d", size = 206678, upload-time = "2026-02-12T14:53:43.652Z" }, + { url = "https://files.pythonhosted.org/packages/df/97/ccab8bea6d5d49f22df87b237fb43f194e05b46e3892ede5785824ecdc48/librt-0.8.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b055bb3abaf69abed25743d8fc1ab691e4f51a912ee0a6f9a6c84f4bbddb283d", size = 219308, upload-time = "2026-02-12T14:53:44.896Z" }, + { url = "https://files.pythonhosted.org/packages/65/2b/bf86e2a084a49b25030bd2848956e34ec2faa18c5e29e9c829f9c52dceb8/librt-0.8.0-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1ef3bd856373cf8e7382402731f43bfe978a8613b4039e49e166e1e0dc590216", size = 212212, upload-time = "2026-02-12T14:53:46.166Z" }, + { url = "https://files.pythonhosted.org/packages/17/8d/d297a8bbf20b896b114d4751e2aa0539f97923ec9c91ded2ee17bdfd043d/librt-0.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e0ffe88ebb5962f8fb0ddcbaaff30f1ea06a79501069310e1e030eafb1ad787", size = 220670, upload-time = "2026-02-12T14:53:47.412Z" }, + { url = "https://files.pythonhosted.org/packages/d5/50/21feb3c235e4c4c538aa6f5a45a9b736f6ff868d0733fb97bdec486a9bf8/librt-0.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82e61cd1c563745ad495387c3b65806bfd453badb4adbc019df3389dddee1bf6", size = 216182, upload-time = "2026-02-12T14:53:48.683Z" }, + { url = "https://files.pythonhosted.org/packages/29/5c/1fdaafb7062a9587a59bb01d6fac70355f0c84caa4fa14d67d847a6cd2e6/librt-0.8.0-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:667e2513cf69bfd1e1ed9a00d6c736d5108714ec071192afb737987955888a25", size = 214133, upload-time = "2026-02-12T14:53:49.983Z" }, + { url = "https://files.pythonhosted.org/packages/57/a6/001e085e16c77cfc5d7cc74c8c05dc80733251b362b3167e33c832813ad8/librt-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b6caff69e25d80c269b1952be8493b4d94ef745f438fa619d7931066bdd26de", size = 236650, upload-time = "2026-02-12T14:53:51.263Z" }, + { url = "https://files.pythonhosted.org/packages/00/03/516075b2c0dac3ff6c88221f8e4f86dc6576a6e90e694558e0b71217427b/librt-0.8.0-cp39-cp39-win32.whl", hash = "sha256:02a9fe85410cc9bef045e7cb7fd26fdde6669e6d173f99df659aa7f6335961e9", size = 54369, upload-time = "2026-02-12T14:53:52.514Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/710ab8320072000439d1b57b5ed63f6b1dc2f61345aafaff53df9ae9dc15/librt-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:de076eaba208d16efb5962f99539867f8e2c73480988cb513fcf1b5dbb0c9dcf", size = 61505, upload-time = "2026-02-12T14:53:53.658Z" }, ] [[package]] @@ -3735,7 +3749,7 @@ wheels = [ [[package]] name = "pillow" -version = "12.1.0" +version = "12.1.1" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -3752,98 +3766,98 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", "python_full_version == '3.10.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283, upload-time = "2026-01-02T09:13:29.892Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/41/f73d92b6b883a579e79600d391f2e21cb0df767b2714ecbd2952315dfeef/pillow-12.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:fb125d860738a09d363a88daa0f59c4533529a90e564785e20fe875b200b6dbd", size = 5304089, upload-time = "2026-01-02T09:10:24.953Z" }, - { url = "https://files.pythonhosted.org/packages/94/55/7aca2891560188656e4a91ed9adba305e914a4496800da6b5c0a15f09edf/pillow-12.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cad302dc10fac357d3467a74a9561c90609768a6f73a1923b0fd851b6486f8b0", size = 4657815, upload-time = "2026-01-02T09:10:27.063Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d2/b28221abaa7b4c40b7dba948f0f6a708bd7342c4d47ce342f0ea39643974/pillow-12.1.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a40905599d8079e09f25027423aed94f2823adaf2868940de991e53a449e14a8", size = 6222593, upload-time = "2026-01-02T09:10:29.115Z" }, - { url = "https://files.pythonhosted.org/packages/71/b8/7a61fb234df6a9b0b479f69e66901209d89ff72a435b49933f9122f94cac/pillow-12.1.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:92a7fe4225365c5e3a8e598982269c6d6698d3e783b3b1ae979e7819f9cd55c1", size = 8027579, upload-time = "2026-01-02T09:10:31.182Z" }, - { url = "https://files.pythonhosted.org/packages/ea/51/55c751a57cc524a15a0e3db20e5cde517582359508d62305a627e77fd295/pillow-12.1.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f10c98f49227ed8383d28174ee95155a675c4ed7f85e2e573b04414f7e371bda", size = 6335760, upload-time = "2026-01-02T09:10:33.02Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7c/60e3e6f5e5891a1a06b4c910f742ac862377a6fe842f7184df4a274ce7bf/pillow-12.1.0-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8637e29d13f478bc4f153d8daa9ffb16455f0a6cb287da1b432fdad2bfbd66c7", size = 7027127, upload-time = "2026-01-02T09:10:35.009Z" }, - { url = "https://files.pythonhosted.org/packages/06/37/49d47266ba50b00c27ba63a7c898f1bb41a29627ced8c09e25f19ebec0ff/pillow-12.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:21e686a21078b0f9cb8c8a961d99e6a4ddb88e0fc5ea6e130172ddddc2e5221a", size = 6449896, upload-time = "2026-01-02T09:10:36.793Z" }, - { url = "https://files.pythonhosted.org/packages/f9/e5/67fd87d2913902462cd9b79c6211c25bfe95fcf5783d06e1367d6d9a741f/pillow-12.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2415373395a831f53933c23ce051021e79c8cd7979822d8cc478547a3f4da8ef", size = 7151345, upload-time = "2026-01-02T09:10:39.064Z" }, - { url = "https://files.pythonhosted.org/packages/bd/15/f8c7abf82af68b29f50d77c227e7a1f87ce02fdc66ded9bf603bc3b41180/pillow-12.1.0-cp310-cp310-win32.whl", hash = "sha256:e75d3dba8fc1ddfec0cd752108f93b83b4f8d6ab40e524a95d35f016b9683b09", size = 6325568, upload-time = "2026-01-02T09:10:41.035Z" }, - { url = "https://files.pythonhosted.org/packages/d4/24/7d1c0e160b6b5ac2605ef7d8be537e28753c0db5363d035948073f5513d7/pillow-12.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:64efdf00c09e31efd754448a383ea241f55a994fd079866b92d2bbff598aad91", size = 7032367, upload-time = "2026-01-02T09:10:43.09Z" }, - { url = "https://files.pythonhosted.org/packages/f4/03/41c038f0d7a06099254c60f618d0ec7be11e79620fc23b8e85e5b31d9a44/pillow-12.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:f188028b5af6b8fb2e9a76ac0f841a575bd1bd396e46ef0840d9b88a48fdbcea", size = 2452345, upload-time = "2026-01-02T09:10:44.795Z" }, - { url = "https://files.pythonhosted.org/packages/43/c4/bf8328039de6cc22182c3ef007a2abfbbdab153661c0a9aa78af8d706391/pillow-12.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:a83e0850cb8f5ac975291ebfc4170ba481f41a28065277f7f735c202cd8e0af3", size = 5304057, upload-time = "2026-01-02T09:10:46.627Z" }, - { url = "https://files.pythonhosted.org/packages/43/06/7264c0597e676104cc22ca73ee48f752767cd4b1fe084662620b17e10120/pillow-12.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b6e53e82ec2db0717eabb276aa56cf4e500c9a7cec2c2e189b55c24f65a3e8c0", size = 4657811, upload-time = "2026-01-02T09:10:49.548Z" }, - { url = "https://files.pythonhosted.org/packages/72/64/f9189e44474610daf83da31145fa56710b627b5c4c0b9c235e34058f6b31/pillow-12.1.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:40a8e3b9e8773876d6e30daed22f016509e3987bab61b3b7fe309d7019a87451", size = 6232243, upload-time = "2026-01-02T09:10:51.62Z" }, - { url = "https://files.pythonhosted.org/packages/ef/30/0df458009be6a4caca4ca2c52975e6275c387d4e5c95544e34138b41dc86/pillow-12.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:800429ac32c9b72909c671aaf17ecd13110f823ddb7db4dfef412a5587c2c24e", size = 8037872, upload-time = "2026-01-02T09:10:53.446Z" }, - { url = "https://files.pythonhosted.org/packages/e4/86/95845d4eda4f4f9557e25381d70876aa213560243ac1a6d619c46caaedd9/pillow-12.1.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0b022eaaf709541b391ee069f0022ee5b36c709df71986e3f7be312e46f42c84", size = 6345398, upload-time = "2026-01-02T09:10:55.426Z" }, - { url = "https://files.pythonhosted.org/packages/5c/1f/8e66ab9be3aaf1435bc03edd1ebdf58ffcd17f7349c1d970cafe87af27d9/pillow-12.1.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f345e7bc9d7f368887c712aa5054558bad44d2a301ddf9248599f4161abc7c0", size = 7034667, upload-time = "2026-01-02T09:10:57.11Z" }, - { url = "https://files.pythonhosted.org/packages/f9/f6/683b83cb9b1db1fb52b87951b1c0b99bdcfceaa75febf11406c19f82cb5e/pillow-12.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d70347c8a5b7ccd803ec0c85c8709f036e6348f1e6a5bf048ecd9c64d3550b8b", size = 6458743, upload-time = "2026-01-02T09:10:59.331Z" }, - { url = "https://files.pythonhosted.org/packages/9a/7d/de833d63622538c1d58ce5395e7c6cb7e7dce80decdd8bde4a484e095d9f/pillow-12.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fcc52d86ce7a34fd17cb04e87cfdb164648a3662a6f20565910a99653d66c18", size = 7159342, upload-time = "2026-01-02T09:11:01.82Z" }, - { url = "https://files.pythonhosted.org/packages/8c/40/50d86571c9e5868c42b81fe7da0c76ca26373f3b95a8dd675425f4a92ec1/pillow-12.1.0-cp311-cp311-win32.whl", hash = "sha256:3ffaa2f0659e2f740473bcf03c702c39a8d4b2b7ffc629052028764324842c64", size = 6328655, upload-time = "2026-01-02T09:11:04.556Z" }, - { url = "https://files.pythonhosted.org/packages/6c/af/b1d7e301c4cd26cd45d4af884d9ee9b6fab893b0ad2450d4746d74a6968c/pillow-12.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:806f3987ffe10e867bab0ddad45df1148a2b98221798457fa097ad85d6e8bc75", size = 7031469, upload-time = "2026-01-02T09:11:06.538Z" }, - { url = "https://files.pythonhosted.org/packages/48/36/d5716586d887fb2a810a4a61518a327a1e21c8b7134c89283af272efe84b/pillow-12.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:9f5fefaca968e700ad1a4a9de98bf0869a94e397fe3524c4c9450c1445252304", size = 2452515, upload-time = "2026-01-02T09:11:08.226Z" }, - { url = "https://files.pythonhosted.org/packages/20/31/dc53fe21a2f2996e1b7d92bf671cdb157079385183ef7c1ae08b485db510/pillow-12.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a332ac4ccb84b6dde65dbace8431f3af08874bf9770719d32a635c4ef411b18b", size = 5262642, upload-time = "2026-01-02T09:11:10.138Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c1/10e45ac9cc79419cedf5121b42dcca5a50ad2b601fa080f58c22fb27626e/pillow-12.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:907bfa8a9cb790748a9aa4513e37c88c59660da3bcfffbd24a7d9e6abf224551", size = 4657464, upload-time = "2026-01-02T09:11:12.319Z" }, - { url = "https://files.pythonhosted.org/packages/ad/26/7b82c0ab7ef40ebede7a97c72d473bda5950f609f8e0c77b04af574a0ddb/pillow-12.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efdc140e7b63b8f739d09a99033aa430accce485ff78e6d311973a67b6bf3208", size = 6234878, upload-time = "2026-01-02T09:11:14.096Z" }, - { url = "https://files.pythonhosted.org/packages/76/25/27abc9792615b5e886ca9411ba6637b675f1b77af3104710ac7353fe5605/pillow-12.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bef9768cab184e7ae6e559c032e95ba8d07b3023c289f79a2bd36e8bf85605a5", size = 8044868, upload-time = "2026-01-02T09:11:15.903Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ea/f200a4c36d836100e7bc738fc48cd963d3ba6372ebc8298a889e0cfc3359/pillow-12.1.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:742aea052cf5ab5034a53c3846165bc3ce88d7c38e954120db0ab867ca242661", size = 6349468, upload-time = "2026-01-02T09:11:17.631Z" }, - { url = "https://files.pythonhosted.org/packages/11/8f/48d0b77ab2200374c66d344459b8958c86693be99526450e7aee714e03e4/pillow-12.1.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6dfc2af5b082b635af6e08e0d1f9f1c4e04d17d4e2ca0ef96131e85eda6eb17", size = 7041518, upload-time = "2026-01-02T09:11:19.389Z" }, - { url = "https://files.pythonhosted.org/packages/1d/23/c281182eb986b5d31f0a76d2a2c8cd41722d6fb8ed07521e802f9bba52de/pillow-12.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:609e89d9f90b581c8d16358c9087df76024cf058fa693dd3e1e1620823f39670", size = 6462829, upload-time = "2026-01-02T09:11:21.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ef/7018273e0faac099d7b00982abdcc39142ae6f3bd9ceb06de09779c4a9d6/pillow-12.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43b4899cfd091a9693a1278c4982f3e50f7fb7cff5153b05174b4afc9593b616", size = 7166756, upload-time = "2026-01-02T09:11:23.559Z" }, - { url = "https://files.pythonhosted.org/packages/8f/c8/993d4b7ab2e341fe02ceef9576afcf5830cdec640be2ac5bee1820d693d4/pillow-12.1.0-cp312-cp312-win32.whl", hash = "sha256:aa0c9cc0b82b14766a99fbe6084409972266e82f459821cd26997a488a7261a7", size = 6328770, upload-time = "2026-01-02T09:11:25.661Z" }, - { url = "https://files.pythonhosted.org/packages/a7/87/90b358775a3f02765d87655237229ba64a997b87efa8ccaca7dd3e36e7a7/pillow-12.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:d70534cea9e7966169ad29a903b99fc507e932069a881d0965a1a84bb57f6c6d", size = 7033406, upload-time = "2026-01-02T09:11:27.474Z" }, - { url = "https://files.pythonhosted.org/packages/5d/cf/881b457eccacac9e5b2ddd97d5071fb6d668307c57cbf4e3b5278e06e536/pillow-12.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:65b80c1ee7e14a87d6a068dd3b0aea268ffcabfe0498d38661b00c5b4b22e74c", size = 2452612, upload-time = "2026-01-02T09:11:29.309Z" }, - { url = "https://files.pythonhosted.org/packages/dd/c7/2530a4aa28248623e9d7f27316b42e27c32ec410f695929696f2e0e4a778/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:7b5dd7cbae20285cdb597b10eb5a2c13aa9de6cde9bb64a3c1317427b1db1ae1", size = 4062543, upload-time = "2026-01-02T09:11:31.566Z" }, - { url = "https://files.pythonhosted.org/packages/8f/1f/40b8eae823dc1519b87d53c30ed9ef085506b05281d313031755c1705f73/pillow-12.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:29a4cef9cb672363926f0470afc516dbf7305a14d8c54f7abbb5c199cd8f8179", size = 4138373, upload-time = "2026-01-02T09:11:33.367Z" }, - { url = "https://files.pythonhosted.org/packages/d4/77/6fa60634cf06e52139fd0e89e5bbf055e8166c691c42fb162818b7fda31d/pillow-12.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:681088909d7e8fa9e31b9799aaa59ba5234c58e5e4f1951b4c4d1082a2e980e0", size = 3601241, upload-time = "2026-01-02T09:11:35.011Z" }, - { url = "https://files.pythonhosted.org/packages/4f/bf/28ab865de622e14b747f0cd7877510848252d950e43002e224fb1c9ababf/pillow-12.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:983976c2ab753166dc66d36af6e8ec15bb511e4a25856e2227e5f7e00a160587", size = 5262410, upload-time = "2026-01-02T09:11:36.682Z" }, - { url = "https://files.pythonhosted.org/packages/1c/34/583420a1b55e715937a85bd48c5c0991598247a1fd2eb5423188e765ea02/pillow-12.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:db44d5c160a90df2d24a24760bbd37607d53da0b34fb546c4c232af7192298ac", size = 4657312, upload-time = "2026-01-02T09:11:38.535Z" }, - { url = "https://files.pythonhosted.org/packages/1d/fd/f5a0896839762885b3376ff04878f86ab2b097c2f9a9cdccf4eda8ba8dc0/pillow-12.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:6b7a9d1db5dad90e2991645874f708e87d9a3c370c243c2d7684d28f7e133e6b", size = 6232605, upload-time = "2026-01-02T09:11:40.602Z" }, - { url = "https://files.pythonhosted.org/packages/98/aa/938a09d127ac1e70e6ed467bd03834350b33ef646b31edb7452d5de43792/pillow-12.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6258f3260986990ba2fa8a874f8b6e808cf5abb51a94015ca3dc3c68aa4f30ea", size = 8041617, upload-time = "2026-01-02T09:11:42.721Z" }, - { url = "https://files.pythonhosted.org/packages/17/e8/538b24cb426ac0186e03f80f78bc8dc7246c667f58b540bdd57c71c9f79d/pillow-12.1.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e115c15e3bc727b1ca3e641a909f77f8ca72a64fff150f666fcc85e57701c26c", size = 6346509, upload-time = "2026-01-02T09:11:44.955Z" }, - { url = "https://files.pythonhosted.org/packages/01/9a/632e58ec89a32738cabfd9ec418f0e9898a2b4719afc581f07c04a05e3c9/pillow-12.1.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6741e6f3074a35e47c77b23a4e4f2d90db3ed905cb1c5e6e0d49bff2045632bc", size = 7038117, upload-time = "2026-01-02T09:11:46.736Z" }, - { url = "https://files.pythonhosted.org/packages/c7/a2/d40308cf86eada842ca1f3ffa45d0ca0df7e4ab33c83f81e73f5eaed136d/pillow-12.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:935b9d1aed48fcfb3f838caac506f38e29621b44ccc4f8a64d575cb1b2a88644", size = 6460151, upload-time = "2026-01-02T09:11:48.625Z" }, - { url = "https://files.pythonhosted.org/packages/f1/88/f5b058ad6453a085c5266660a1417bdad590199da1b32fb4efcff9d33b05/pillow-12.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5fee4c04aad8932da9f8f710af2c1a15a83582cfb884152a9caa79d4efcdbf9c", size = 7164534, upload-time = "2026-01-02T09:11:50.445Z" }, - { url = "https://files.pythonhosted.org/packages/19/ce/c17334caea1db789163b5d855a5735e47995b0b5dc8745e9a3605d5f24c0/pillow-12.1.0-cp313-cp313-win32.whl", hash = "sha256:a786bf667724d84aa29b5db1c61b7bfdde380202aaca12c3461afd6b71743171", size = 6332551, upload-time = "2026-01-02T09:11:52.234Z" }, - { url = "https://files.pythonhosted.org/packages/e5/07/74a9d941fa45c90a0d9465098fe1ec85de3e2afbdc15cc4766622d516056/pillow-12.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:461f9dfdafa394c59cd6d818bdfdbab4028b83b02caadaff0ffd433faf4c9a7a", size = 7040087, upload-time = "2026-01-02T09:11:54.822Z" }, - { url = "https://files.pythonhosted.org/packages/88/09/c99950c075a0e9053d8e880595926302575bc742b1b47fe1bbcc8d388d50/pillow-12.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:9212d6b86917a2300669511ed094a9406888362e085f2431a7da985a6b124f45", size = 2452470, upload-time = "2026-01-02T09:11:56.522Z" }, - { url = "https://files.pythonhosted.org/packages/b5/ba/970b7d85ba01f348dee4d65412476321d40ee04dcb51cd3735b9dc94eb58/pillow-12.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:00162e9ca6d22b7c3ee8e61faa3c3253cd19b6a37f126cad04f2f88b306f557d", size = 5264816, upload-time = "2026-01-02T09:11:58.227Z" }, - { url = "https://files.pythonhosted.org/packages/10/60/650f2fb55fdba7a510d836202aa52f0baac633e50ab1cf18415d332188fb/pillow-12.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:7d6daa89a00b58c37cb1747ec9fb7ac3bc5ffd5949f5888657dfddde6d1312e0", size = 4660472, upload-time = "2026-01-02T09:12:00.798Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c0/5273a99478956a099d533c4f46cbaa19fd69d606624f4334b85e50987a08/pillow-12.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e2479c7f02f9d505682dc47df8c0ea1fc5e264c4d1629a5d63fe3e2334b89554", size = 6268974, upload-time = "2026-01-02T09:12:02.572Z" }, - { url = "https://files.pythonhosted.org/packages/b4/26/0bf714bc2e73d5267887d47931d53c4ceeceea6978148ed2ab2a4e6463c4/pillow-12.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f188d580bd870cda1e15183790d1cc2fa78f666e76077d103edf048eed9c356e", size = 8073070, upload-time = "2026-01-02T09:12:04.75Z" }, - { url = "https://files.pythonhosted.org/packages/43/cf/1ea826200de111a9d65724c54f927f3111dc5ae297f294b370a670c17786/pillow-12.1.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0fde7ec5538ab5095cc02df38ee99b0443ff0e1c847a045554cf5f9af1f4aa82", size = 6380176, upload-time = "2026-01-02T09:12:06.626Z" }, - { url = "https://files.pythonhosted.org/packages/03/e0/7938dd2b2013373fd85d96e0f38d62b7a5a262af21ac274250c7ca7847c9/pillow-12.1.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ed07dca4a8464bada6139ab38f5382f83e5f111698caf3191cb8dbf27d908b4", size = 7067061, upload-time = "2026-01-02T09:12:08.624Z" }, - { url = "https://files.pythonhosted.org/packages/86/ad/a2aa97d37272a929a98437a8c0ac37b3cf012f4f8721e1bd5154699b2518/pillow-12.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f45bd71d1fa5e5749587613037b172e0b3b23159d1c00ef2fc920da6f470e6f0", size = 6491824, upload-time = "2026-01-02T09:12:10.488Z" }, - { url = "https://files.pythonhosted.org/packages/a4/44/80e46611b288d51b115826f136fb3465653c28f491068a72d3da49b54cd4/pillow-12.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:277518bf4fe74aa91489e1b20577473b19ee70fb97c374aa50830b279f25841b", size = 7190911, upload-time = "2026-01-02T09:12:12.772Z" }, - { url = "https://files.pythonhosted.org/packages/86/77/eacc62356b4cf81abe99ff9dbc7402750044aed02cfd6a503f7c6fc11f3e/pillow-12.1.0-cp313-cp313t-win32.whl", hash = "sha256:7315f9137087c4e0ee73a761b163fc9aa3b19f5f606a7fc08d83fd3e4379af65", size = 6336445, upload-time = "2026-01-02T09:12:14.775Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3c/57d81d0b74d218706dafccb87a87ea44262c43eef98eb3b164fd000e0491/pillow-12.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:0ddedfaa8b5f0b4ffbc2fa87b556dc59f6bb4ecb14a53b33f9189713ae8053c0", size = 7045354, upload-time = "2026-01-02T09:12:16.599Z" }, - { url = "https://files.pythonhosted.org/packages/ac/82/8b9b97bba2e3576a340f93b044a3a3a09841170ab4c1eb0d5c93469fd32f/pillow-12.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:80941e6d573197a0c28f394753de529bb436b1ca990ed6e765cf42426abc39f8", size = 2454547, upload-time = "2026-01-02T09:12:18.704Z" }, - { url = "https://files.pythonhosted.org/packages/8c/87/bdf971d8bbcf80a348cc3bacfcb239f5882100fe80534b0ce67a784181d8/pillow-12.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:5cb7bc1966d031aec37ddb9dcf15c2da5b2e9f7cc3ca7c54473a20a927e1eb91", size = 4062533, upload-time = "2026-01-02T09:12:20.791Z" }, - { url = "https://files.pythonhosted.org/packages/ff/4f/5eb37a681c68d605eb7034c004875c81f86ec9ef51f5be4a63eadd58859a/pillow-12.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:97e9993d5ed946aba26baf9c1e8cf18adbab584b99f452ee72f7ee8acb882796", size = 4138546, upload-time = "2026-01-02T09:12:23.664Z" }, - { url = "https://files.pythonhosted.org/packages/11/6d/19a95acb2edbace40dcd582d077b991646b7083c41b98da4ed7555b59733/pillow-12.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:414b9a78e14ffeb98128863314e62c3f24b8a86081066625700b7985b3f529bd", size = 3601163, upload-time = "2026-01-02T09:12:26.338Z" }, - { url = "https://files.pythonhosted.org/packages/fc/36/2b8138e51cb42e4cc39c3297713455548be855a50558c3ac2beebdc251dd/pillow-12.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:e6bdb408f7c9dd2a5ff2b14a3b0bb6d4deb29fb9961e6eb3ae2031ae9a5cec13", size = 5266086, upload-time = "2026-01-02T09:12:28.782Z" }, - { url = "https://files.pythonhosted.org/packages/53/4b/649056e4d22e1caa90816bf99cef0884aed607ed38075bd75f091a607a38/pillow-12.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3413c2ae377550f5487991d444428f1a8ae92784aac79caa8b1e3b89b175f77e", size = 4657344, upload-time = "2026-01-02T09:12:31.117Z" }, - { url = "https://files.pythonhosted.org/packages/6c/6b/c5742cea0f1ade0cd61485dc3d81f05261fc2276f537fbdc00802de56779/pillow-12.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e5dcbe95016e88437ecf33544ba5db21ef1b8dd6e1b434a2cb2a3d605299e643", size = 6232114, upload-time = "2026-01-02T09:12:32.936Z" }, - { url = "https://files.pythonhosted.org/packages/bf/8f/9f521268ce22d63991601aafd3d48d5ff7280a246a1ef62d626d67b44064/pillow-12.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d0a7735df32ccbcc98b98a1ac785cc4b19b580be1bdf0aeb5c03223220ea09d5", size = 8042708, upload-time = "2026-01-02T09:12:34.78Z" }, - { url = "https://files.pythonhosted.org/packages/1a/eb/257f38542893f021502a1bbe0c2e883c90b5cff26cc33b1584a841a06d30/pillow-12.1.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c27407a2d1b96774cbc4a7594129cc027339fd800cd081e44497722ea1179de", size = 6347762, upload-time = "2026-01-02T09:12:36.748Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5a/8ba375025701c09b309e8d5163c5a4ce0102fa86bbf8800eb0d7ac87bc51/pillow-12.1.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15c794d74303828eaa957ff8070846d0efe8c630901a1c753fdc63850e19ecd9", size = 7039265, upload-time = "2026-01-02T09:12:39.082Z" }, - { url = "https://files.pythonhosted.org/packages/cf/dc/cf5e4cdb3db533f539e88a7bbf9f190c64ab8a08a9bc7a4ccf55067872e4/pillow-12.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c990547452ee2800d8506c4150280757f88532f3de2a58e3022e9b179107862a", size = 6462341, upload-time = "2026-01-02T09:12:40.946Z" }, - { url = "https://files.pythonhosted.org/packages/d0/47/0291a25ac9550677e22eda48510cfc4fa4b2ef0396448b7fbdc0a6946309/pillow-12.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b63e13dd27da389ed9475b3d28510f0f954bca0041e8e551b2a4eb1eab56a39a", size = 7165395, upload-time = "2026-01-02T09:12:42.706Z" }, - { url = "https://files.pythonhosted.org/packages/4f/4c/e005a59393ec4d9416be06e6b45820403bb946a778e39ecec62f5b2b991e/pillow-12.1.0-cp314-cp314-win32.whl", hash = "sha256:1a949604f73eb07a8adab38c4fe50791f9919344398bdc8ac6b307f755fc7030", size = 6431413, upload-time = "2026-01-02T09:12:44.944Z" }, - { url = "https://files.pythonhosted.org/packages/1c/af/f23697f587ac5f9095d67e31b81c95c0249cd461a9798a061ed6709b09b5/pillow-12.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:4f9f6a650743f0ddee5593ac9e954ba1bdbc5e150bc066586d4f26127853ab94", size = 7176779, upload-time = "2026-01-02T09:12:46.727Z" }, - { url = "https://files.pythonhosted.org/packages/b3/36/6a51abf8599232f3e9afbd16d52829376a68909fe14efe29084445db4b73/pillow-12.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:808b99604f7873c800c4840f55ff389936ef1948e4e87645eaf3fccbc8477ac4", size = 2543105, upload-time = "2026-01-02T09:12:49.243Z" }, - { url = "https://files.pythonhosted.org/packages/82/54/2e1dd20c8749ff225080d6ba465a0cab4387f5db0d1c5fb1439e2d99923f/pillow-12.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bc11908616c8a283cf7d664f77411a5ed2a02009b0097ff8abbba5e79128ccf2", size = 5268571, upload-time = "2026-01-02T09:12:51.11Z" }, - { url = "https://files.pythonhosted.org/packages/57/61/571163a5ef86ec0cf30d265ac2a70ae6fc9e28413d1dc94fa37fae6bda89/pillow-12.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:896866d2d436563fa2a43a9d72f417874f16b5545955c54a64941e87c1376c61", size = 4660426, upload-time = "2026-01-02T09:12:52.865Z" }, - { url = "https://files.pythonhosted.org/packages/5e/e1/53ee5163f794aef1bf84243f755ee6897a92c708505350dd1923f4afec48/pillow-12.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8e178e3e99d3c0ea8fc64b88447f7cac8ccf058af422a6cedc690d0eadd98c51", size = 6269908, upload-time = "2026-01-02T09:12:54.884Z" }, - { url = "https://files.pythonhosted.org/packages/bc/0b/b4b4106ff0ee1afa1dc599fde6ab230417f800279745124f6c50bcffed8e/pillow-12.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:079af2fb0c599c2ec144ba2c02766d1b55498e373b3ac64687e43849fbbef5bc", size = 8074733, upload-time = "2026-01-02T09:12:56.802Z" }, - { url = "https://files.pythonhosted.org/packages/19/9f/80b411cbac4a732439e629a26ad3ef11907a8c7fc5377b7602f04f6fe4e7/pillow-12.1.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bdec5e43377761c5dbca620efb69a77f6855c5a379e32ac5b158f54c84212b14", size = 6381431, upload-time = "2026-01-02T09:12:58.823Z" }, - { url = "https://files.pythonhosted.org/packages/8f/b7/d65c45db463b66ecb6abc17c6ba6917a911202a07662247e1355ce1789e7/pillow-12.1.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:565c986f4b45c020f5421a4cea13ef294dde9509a8577f29b2fc5edc7587fff8", size = 7068529, upload-time = "2026-01-02T09:13:00.885Z" }, - { url = "https://files.pythonhosted.org/packages/50/96/dfd4cd726b4a45ae6e3c669fc9e49deb2241312605d33aba50499e9d9bd1/pillow-12.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:43aca0a55ce1eefc0aefa6253661cb54571857b1a7b2964bd8a1e3ef4b729924", size = 6492981, upload-time = "2026-01-02T09:13:03.314Z" }, - { url = "https://files.pythonhosted.org/packages/4d/1c/b5dc52cf713ae46033359c5ca920444f18a6359ce1020dd3e9c553ea5bc6/pillow-12.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0deedf2ea233722476b3a81e8cdfbad786f7adbed5d848469fa59fe52396e4ef", size = 7191878, upload-time = "2026-01-02T09:13:05.276Z" }, - { url = "https://files.pythonhosted.org/packages/53/26/c4188248bd5edaf543864fe4834aebe9c9cb4968b6f573ce014cc42d0720/pillow-12.1.0-cp314-cp314t-win32.whl", hash = "sha256:b17fbdbe01c196e7e159aacb889e091f28e61020a8abeac07b68079b6e626988", size = 6438703, upload-time = "2026-01-02T09:13:07.491Z" }, - { url = "https://files.pythonhosted.org/packages/b8/0e/69ed296de8ea05cb03ee139cee600f424ca166e632567b2d66727f08c7ed/pillow-12.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27b9baecb428899db6c0de572d6d305cfaf38ca1596b5c0542a5182e3e74e8c6", size = 7182927, upload-time = "2026-01-02T09:13:09.841Z" }, - { url = "https://files.pythonhosted.org/packages/fc/f5/68334c015eed9b5cff77814258717dec591ded209ab5b6fb70e2ae873d1d/pillow-12.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f61333d817698bdcdd0f9d7793e365ac3d2a21c1f1eb02b32ad6aefb8d8ea831", size = 2545104, upload-time = "2026-01-02T09:13:12.068Z" }, - { url = "https://files.pythonhosted.org/packages/8b/bc/224b1d98cffd7164b14707c91aac83c07b047fbd8f58eba4066a3e53746a/pillow-12.1.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ca94b6aac0d7af2a10ba08c0f888b3d5114439b6b3ef39968378723622fed377", size = 5228605, upload-time = "2026-01-02T09:13:14.084Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ca/49ca7769c4550107de049ed85208240ba0f330b3f2e316f24534795702ce/pillow-12.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:351889afef0f485b84078ea40fe33727a0492b9af3904661b0abbafee0355b72", size = 4622245, upload-time = "2026-01-02T09:13:15.964Z" }, - { url = "https://files.pythonhosted.org/packages/73/48/fac807ce82e5955bcc2718642b94b1bd22a82a6d452aea31cbb678cddf12/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb0984b30e973f7e2884362b7d23d0a348c7143ee559f38ef3eaab640144204c", size = 5247593, upload-time = "2026-01-02T09:13:17.913Z" }, - { url = "https://files.pythonhosted.org/packages/d2/95/3e0742fe358c4664aed4fd05d5f5373dcdad0b27af52aa0972568541e3f4/pillow-12.1.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:84cabc7095dd535ca934d57e9ce2a72ffd216e435a84acb06b2277b1de2689bd", size = 6989008, upload-time = "2026-01-02T09:13:20.083Z" }, - { url = "https://files.pythonhosted.org/packages/5a/74/fe2ac378e4e202e56d50540d92e1ef4ff34ed687f3c60f6a121bcf99437e/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53d8b764726d3af1a138dd353116f774e3862ec7e3794e0c8781e30db0f35dfc", size = 5313824, upload-time = "2026-01-02T09:13:22.405Z" }, - { url = "https://files.pythonhosted.org/packages/f3/77/2a60dee1adee4e2655ac328dd05c02a955c1cd683b9f1b82ec3feb44727c/pillow-12.1.0-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5da841d81b1a05ef940a8567da92decaa15bc4d7dedb540a8c219ad83d91808a", size = 5963278, upload-time = "2026-01-02T09:13:24.706Z" }, - { url = "https://files.pythonhosted.org/packages/2d/71/64e9b1c7f04ae0027f788a248e6297d7fcc29571371fe7d45495a78172c0/pillow-12.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:75af0b4c229ac519b155028fa1be632d812a519abba9b46b20e50c6caa184f19", size = 7029809, upload-time = "2026-01-02T09:13:26.541Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/30/5bd3d794762481f8c8ae9c80e7b76ecea73b916959eb587521358ef0b2f9/pillow-12.1.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f1625b72740fdda5d77b4def688eb8fd6490975d06b909fd19f13f391e077e0", size = 5304099, upload-time = "2026-02-11T04:20:06.13Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c1/aab9e8f3eeb4490180e357955e15c2ef74b31f64790ff356c06fb6cf6d84/pillow-12.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:178aa072084bd88ec759052feca8e56cbb14a60b39322b99a049e58090479713", size = 4657880, upload-time = "2026-02-11T04:20:09.291Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0a/9879e30d56815ad529d3985aeff5af4964202425c27261a6ada10f7cbf53/pillow-12.1.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b66e95d05ba806247aaa1561f080abc7975daf715c30780ff92a20e4ec546e1b", size = 6222587, upload-time = "2026-02-11T04:20:10.82Z" }, + { url = "https://files.pythonhosted.org/packages/5a/5f/a1b72ff7139e4f89014e8d451442c74a774d5c43cd938fb0a9f878576b37/pillow-12.1.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89c7e895002bbe49cdc5426150377cbbc04767d7547ed145473f496dfa40408b", size = 8027678, upload-time = "2026-02-11T04:20:12.455Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c2/c7cb187dac79a3d22c3ebeae727abee01e077c8c7d930791dc592f335153/pillow-12.1.1-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a5cbdcddad0af3da87cb16b60d23648bc3b51967eb07223e9fed77a82b457c4", size = 6335777, upload-time = "2026-02-11T04:20:14.441Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7b/f9b09a7804ec7336effb96c26d37c29d27225783dc1501b7d62dcef6ae25/pillow-12.1.1-cp310-cp310-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f51079765661884a486727f0729d29054242f74b46186026582b4e4769918e4", size = 7027140, upload-time = "2026-02-11T04:20:16.387Z" }, + { url = "https://files.pythonhosted.org/packages/98/b2/2fa3c391550bd421b10849d1a2144c44abcd966daadd2f7c12e19ea988c4/pillow-12.1.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:99c1506ea77c11531d75e3a412832a13a71c7ebc8192ab9e4b2e355555920e3e", size = 6449855, upload-time = "2026-02-11T04:20:18.554Z" }, + { url = "https://files.pythonhosted.org/packages/96/ff/9caf4b5b950c669263c39e96c78c0d74a342c71c4f43fd031bb5cb7ceac9/pillow-12.1.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36341d06738a9f66c8287cf8b876d24b18db9bd8740fa0672c74e259ad408cff", size = 7151329, upload-time = "2026-02-11T04:20:20.646Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f8/4b24841f582704da675ca535935bccb32b00a6da1226820845fac4a71136/pillow-12.1.1-cp310-cp310-win32.whl", hash = "sha256:6c52f062424c523d6c4db85518774cc3d50f5539dd6eed32b8f6229b26f24d40", size = 6325574, upload-time = "2026-02-11T04:20:22.43Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/9f6b01c0881d7036063aa6612ef04c0e2cad96be21325a1e92d0203f8e91/pillow-12.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6008de247150668a705a6338156efb92334113421ceecf7438a12c9a12dab23", size = 7032347, upload-time = "2026-02-11T04:20:23.932Z" }, + { url = "https://files.pythonhosted.org/packages/79/13/c7922edded3dcdaf10c59297540b72785620abc0538872c819915746757d/pillow-12.1.1-cp310-cp310-win_arm64.whl", hash = "sha256:1a9b0ee305220b392e1124a764ee4265bd063e54a751a6b62eff69992f457fa9", size = 2453457, upload-time = "2026-02-11T04:20:25.392Z" }, + { url = "https://files.pythonhosted.org/packages/2b/46/5da1ec4a5171ee7bf1a0efa064aba70ba3d6e0788ce3f5acd1375d23c8c0/pillow-12.1.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32", size = 5304084, upload-time = "2026-02-11T04:20:27.501Z" }, + { url = "https://files.pythonhosted.org/packages/78/93/a29e9bc02d1cf557a834da780ceccd54e02421627200696fcf805ebdc3fb/pillow-12.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38", size = 4657866, upload-time = "2026-02-11T04:20:29.827Z" }, + { url = "https://files.pythonhosted.org/packages/13/84/583a4558d492a179d31e4aae32eadce94b9acf49c0337c4ce0b70e0a01f2/pillow-12.1.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5", size = 6232148, upload-time = "2026-02-11T04:20:31.329Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e2/53c43334bbbb2d3b938978532fbda8e62bb6e0b23a26ce8592f36bcc4987/pillow-12.1.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc354a04072b765eccf2204f588a7a532c9511e8b9c7f900e1b64e3e33487090", size = 8038007, upload-time = "2026-02-11T04:20:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/b8/a6/3d0e79c8a9d58150dd98e199d7c1c56861027f3829a3a60b3c2784190180/pillow-12.1.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7e7976bf1910a8116b523b9f9f58bf410f3e8aa330cd9a2bb2953f9266ab49af", size = 6345418, upload-time = "2026-02-11T04:20:35.858Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c8/46dfeac5825e600579157eea177be43e2f7ff4a99da9d0d0a49533509ac5/pillow-12.1.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:597bd9c8419bc7c6af5604e55847789b69123bbe25d65cc6ad3012b4f3c98d8b", size = 7034590, upload-time = "2026-02-11T04:20:37.91Z" }, + { url = "https://files.pythonhosted.org/packages/af/bf/e6f65d3db8a8bbfeaf9e13cc0417813f6319863a73de934f14b2229ada18/pillow-12.1.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2c1fc0f2ca5f96a3c8407e41cca26a16e46b21060fe6d5b099d2cb01412222f5", size = 6458655, upload-time = "2026-02-11T04:20:39.496Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c2/66091f3f34a25894ca129362e510b956ef26f8fb67a0e6417bc5744e56f1/pillow-12.1.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:578510d88c6229d735855e1f278aa305270438d36a05031dfaae5067cc8eb04d", size = 7159286, upload-time = "2026-02-11T04:20:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5a/24bc8eb526a22f957d0cec6243146744966d40857e3d8deb68f7902ca6c1/pillow-12.1.1-cp311-cp311-win32.whl", hash = "sha256:7311c0a0dcadb89b36b7025dfd8326ecfa36964e29913074d47382706e516a7c", size = 6328663, upload-time = "2026-02-11T04:20:43.184Z" }, + { url = "https://files.pythonhosted.org/packages/31/03/bef822e4f2d8f9d7448c133d0a18185d3cce3e70472774fffefe8b0ed562/pillow-12.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:fbfa2a7c10cc2623f412753cddf391c7f971c52ca40a3f65dc5039b2939e8563", size = 7031448, upload-time = "2026-02-11T04:20:44.696Z" }, + { url = "https://files.pythonhosted.org/packages/49/70/f76296f53610bd17b2e7d31728b8b7825e3ac3b5b3688b51f52eab7c0818/pillow-12.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:b81b5e3511211631b3f672a595e3221252c90af017e399056d0faabb9538aa80", size = 2453651, upload-time = "2026-02-11T04:20:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/d3/8df65da0d4df36b094351dce696f2989bec731d4f10e743b1c5f4da4d3bf/pillow-12.1.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052", size = 5262803, upload-time = "2026-02-11T04:20:47.653Z" }, + { url = "https://files.pythonhosted.org/packages/d6/71/5026395b290ff404b836e636f51d7297e6c83beceaa87c592718747e670f/pillow-12.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984", size = 4657601, upload-time = "2026-02-11T04:20:49.328Z" }, + { url = "https://files.pythonhosted.org/packages/b1/2e/1001613d941c67442f745aff0f7cc66dd8df9a9c084eb497e6a543ee6f7e/pillow-12.1.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79", size = 6234995, upload-time = "2026-02-11T04:20:51.032Z" }, + { url = "https://files.pythonhosted.org/packages/07/26/246ab11455b2549b9233dbd44d358d033a2f780fa9007b61a913c5b2d24e/pillow-12.1.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293", size = 8045012, upload-time = "2026-02-11T04:20:52.882Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8b/07587069c27be7535ac1fe33874e32de118fbd34e2a73b7f83436a88368c/pillow-12.1.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397", size = 6349638, upload-time = "2026-02-11T04:20:54.444Z" }, + { url = "https://files.pythonhosted.org/packages/ff/79/6df7b2ee763d619cda2fb4fea498e5f79d984dae304d45a8999b80d6cf5c/pillow-12.1.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0", size = 7041540, upload-time = "2026-02-11T04:20:55.97Z" }, + { url = "https://files.pythonhosted.org/packages/2c/5e/2ba19e7e7236d7529f4d873bdaf317a318896bac289abebd4bb00ef247f0/pillow-12.1.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3", size = 6462613, upload-time = "2026-02-11T04:20:57.542Z" }, + { url = "https://files.pythonhosted.org/packages/03/03/31216ec124bb5c3dacd74ce8efff4cc7f52643653bad4825f8f08c697743/pillow-12.1.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35", size = 7166745, upload-time = "2026-02-11T04:20:59.196Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e7/7c4552d80052337eb28653b617eafdef39adfb137c49dd7e831b8dc13bc5/pillow-12.1.1-cp312-cp312-win32.whl", hash = "sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a", size = 6328823, upload-time = "2026-02-11T04:21:01.385Z" }, + { url = "https://files.pythonhosted.org/packages/3d/17/688626d192d7261bbbf98846fc98995726bddc2c945344b65bec3a29d731/pillow-12.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6", size = 7033367, upload-time = "2026-02-11T04:21:03.536Z" }, + { url = "https://files.pythonhosted.org/packages/ed/fe/a0ef1f73f939b0eca03ee2c108d0043a87468664770612602c63266a43c4/pillow-12.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523", size = 2453811, upload-time = "2026-02-11T04:21:05.116Z" }, + { url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689, upload-time = "2026-02-11T04:21:06.804Z" }, + { url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535, upload-time = "2026-02-11T04:21:08.452Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364, upload-time = "2026-02-11T04:21:10.194Z" }, + { url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561, upload-time = "2026-02-11T04:21:11.742Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460, upload-time = "2026-02-11T04:21:13.786Z" }, + { url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698, upload-time = "2026-02-11T04:21:15.949Z" }, + { url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706, upload-time = "2026-02-11T04:21:17.723Z" }, + { url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621, upload-time = "2026-02-11T04:21:19.547Z" }, + { url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069, upload-time = "2026-02-11T04:21:21.378Z" }, + { url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040, upload-time = "2026-02-11T04:21:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523, upload-time = "2026-02-11T04:21:25.01Z" }, + { url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552, upload-time = "2026-02-11T04:21:27.238Z" }, + { url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108, upload-time = "2026-02-11T04:21:29.462Z" }, + { url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712, upload-time = "2026-02-11T04:21:31.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880, upload-time = "2026-02-11T04:21:32.865Z" }, + { url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616, upload-time = "2026-02-11T04:21:34.97Z" }, + { url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008, upload-time = "2026-02-11T04:21:36.623Z" }, + { url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226, upload-time = "2026-02-11T04:21:38.585Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136, upload-time = "2026-02-11T04:21:40.562Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129, upload-time = "2026-02-11T04:21:42.521Z" }, + { url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807, upload-time = "2026-02-11T04:21:44.22Z" }, + { url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954, upload-time = "2026-02-11T04:21:46.114Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441, upload-time = "2026-02-11T04:21:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383, upload-time = "2026-02-11T04:21:50.015Z" }, + { url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104, upload-time = "2026-02-11T04:21:51.633Z" }, + { url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" }, + { url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" }, + { url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" }, + { url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" }, + { url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" }, + { url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" }, + { url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" }, + { url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" }, + { url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" }, + { url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" }, + { url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" }, + { url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" }, + { url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" }, + { url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" }, + { url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" }, + { url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" }, + { url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" }, + { url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" }, + { url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" }, + { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, + { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/5d43209aa4cb58e0cc80127956ff1796a68b928e6324bbf06ef4db34367b/pillow-12.1.1-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:600fd103672b925fe62ed08e0d874ea34d692474df6f4bf7ebe148b30f89f39f", size = 5228606, upload-time = "2026-02-11T04:22:52.106Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d5/3b005b4e4fda6698b371fa6c21b097d4707585d7db99e98d9b0b87ac612a/pillow-12.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:665e1b916b043cef294bc54d47bf02d87e13f769bc4bc5fa225a24b3a6c5aca9", size = 4622321, upload-time = "2026-02-11T04:22:53.827Z" }, + { url = "https://files.pythonhosted.org/packages/df/36/ed3ea2d594356fd8037e5a01f6156c74bc8d92dbb0fa60746cc96cabb6e8/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:495c302af3aad1ca67420ddd5c7bd480c8867ad173528767d906428057a11f0e", size = 5247579, upload-time = "2026-02-11T04:22:56.094Z" }, + { url = "https://files.pythonhosted.org/packages/54/9a/9cc3e029683cf6d20ae5085da0dafc63148e3252c2f13328e553aaa13cfb/pillow-12.1.1-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8fd420ef0c52c88b5a035a0886f367748c72147b2b8f384c9d12656678dfdfa9", size = 6989094, upload-time = "2026-02-11T04:22:58.288Z" }, + { url = "https://files.pythonhosted.org/packages/00/98/fc53ab36da80b88df0967896b6c4b4cd948a0dc5aa40a754266aa3ae48b3/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f975aa7ef9684ce7e2c18a3aa8f8e2106ce1e46b94ab713d156b2898811651d3", size = 5313850, upload-time = "2026-02-11T04:23:00.554Z" }, + { url = "https://files.pythonhosted.org/packages/30/02/00fa585abfd9fe9d73e5f6e554dc36cc2b842898cbfc46d70353dae227f8/pillow-12.1.1-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8089c852a56c2966cf18835db62d9b34fef7ba74c726ad943928d494fa7f4735", size = 5963343, upload-time = "2026-02-11T04:23:02.934Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/c56ce33ca856e358d27fda9676c055395abddb82c35ac0f593877ed4562e/pillow-12.1.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e", size = 7029880, upload-time = "2026-02-11T04:23:04.783Z" }, ] [[package]] @@ -3861,7 +3875,7 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.5.1" +version = "4.7.0" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -3878,9 +3892,9 @@ resolution-markers = [ "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", "python_full_version == '3.10.*'", ] -sdist = { url = "https://files.pythonhosted.org/packages/cf/86/0248f086a84f01b37aaec0fa567b397df1a119f73c16f6c7a9aac73ea309/platformdirs-4.5.1.tar.gz", hash = "sha256:61d5cdcc6065745cdd94f0f878977f8de9437be93de97c1c12f853c9c0cdcbda", size = 21715, upload-time = "2025-12-05T13:52:58.638Z" } +sdist = { url = "https://files.pythonhosted.org/packages/71/25/ccd8e88fcd16a4eb6343a8b4b9635e6f3928a7ebcd82822a14d20e3ca29f/platformdirs-4.7.0.tar.gz", hash = "sha256:fd1a5f8599c85d49b9ac7d6e450bc2f1aaf4a23f1fe86d09952fe20ad365cf36", size = 23118, upload-time = "2026-02-12T22:21:53.764Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/cb/28/3bfe2fa5a7b9c46fe7e13c97bda14c895fb10fa2ebf1d0abb90e0cea7ee1/platformdirs-4.5.1-py3-none-any.whl", hash = "sha256:d03afa3963c806a9bed9d5125c8f4cb2fdaf74a55ab60e5d59b3fde758104d31", size = 18731, upload-time = "2025-12-05T13:52:56.823Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e3/1eddccb2c39ecfbe09b3add42a04abcc3fa5b468aa4224998ffb8a7e9c8f/platformdirs-4.7.0-py3-none-any.whl", hash = "sha256:1ed8db354e344c5bb6039cd727f096af975194b508e37177719d562b2b540ee6", size = 18983, upload-time = "2026-02-12T22:21:52.237Z" }, ] [[package]] @@ -3915,7 +3929,7 @@ wheels = [ [[package]] name = "posthog" -version = "7.8.3" +version = "7.8.6" source = { registry = "https://pypi.org/simple" } resolution-markers = [ "python_full_version >= '3.14' and sys_platform == 'win32'", @@ -3940,9 +3954,9 @@ dependencies = [ { name = "six", marker = "python_full_version >= '3.10'" }, { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/ad/2f116cd9b83dc83ece4328a4efe0bcb80e5c2993837f89a788467d261da8/posthog-7.8.3.tar.gz", hash = "sha256:2b85e818bf818ac2768a890b772b7c12d4f909797226acd9327d66a319dbcf83", size = 167083, upload-time = "2026-02-06T13:16:22.938Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/c9/a7c67c039f23f16a0b87d17561ba2a1c863b01f054a226c92437c539a7b6/posthog-7.8.6.tar.gz", hash = "sha256:6f67e18b5f19bf20d7ef2e1a80fa1ad879a5cd309ca13cfb300f45a8105968c4", size = 169304, upload-time = "2026-02-11T13:59:42.558Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/e5/5a4b060cbb9aa9defb8bfd55d15899b3146fece14147f4d66be80e81955a/posthog-7.8.3-py3-none-any.whl", hash = "sha256:1840796e4f7e14dd91ec5fdeb939712c3383fe9e758cfcdeb0317d8f30f7b901", size = 192528, upload-time = "2026-02-06T13:16:21.385Z" }, + { url = "https://files.pythonhosted.org/packages/56/c7/41664398a838f52ddfc89141e4c38b88eaa01b9e9a269c5ac184bd8586c6/posthog-7.8.6-py3-none-any.whl", hash = "sha256:21809f73e8e8f09d2bc273b09582f1a9f997b66f51fc626ef5bd3c5bdffd8bcd", size = 194801, upload-time = "2026-02-11T13:59:41.26Z" }, ] [[package]] @@ -5011,27 +5025,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" }, - { url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" }, - { url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" }, - { url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" }, - { url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" }, - { url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" }, - { url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" }, - { url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" }, - { url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" }, - { url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" }, - { url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" }, - { url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" }, - { url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" }, - { url = "https://files.pythonhosted.org/packages/6f/b0/69adf22f4e24f3677208adb715c578266842e6e6a3cc77483f48dd999ede/ruff-0.15.0-py3-none-win32.whl", hash = "sha256:238a717ef803e501b6d51e0bdd0d2c6e8513fe9eec14002445134d3907cd46c3", size = 10465945, upload-time = "2026-02-03T17:53:12.591Z" }, - { url = "https://files.pythonhosted.org/packages/51/ad/f813b6e2c97e9b4598be25e94a9147b9af7e60523b0cb5d94d307c15229d/ruff-0.15.0-py3-none-win_amd64.whl", hash = "sha256:dd5e4d3301dc01de614da3cdffc33d4b1b96fb89e45721f1598e5532ccf78b18", size = 11564657, upload-time = "2026-02-03T17:52:51.893Z" }, - { url = "https://files.pythonhosted.org/packages/f6/b0/2d823f6e77ebe560f4e397d078487e8d52c1516b331e3521bc75db4272ca/ruff-0.15.0-py3-none-win_arm64.whl", hash = "sha256:c480d632cc0ca3f0727acac8b7d053542d9e114a462a145d0b00e7cd658c515a", size = 10865753, upload-time = "2026-02-03T17:53:03.014Z" }, +version = "0.15.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/dc/4e6ac71b511b141cf626357a3946679abeba4cf67bc7cc5a17920f31e10d/ruff-0.15.1.tar.gz", hash = "sha256:c590fe13fb57c97141ae975c03a1aedb3d3156030cabd740d6ff0b0d601e203f", size = 4540855, upload-time = "2026-02-12T23:09:09.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/bf/e6e4324238c17f9d9120a9d60aa99a7daaa21204c07fcd84e2ef03bb5fd1/ruff-0.15.1-py3-none-linux_armv6l.whl", hash = "sha256:b101ed7cf4615bda6ffe65bdb59f964e9f4a0d3f85cbf0e54f0ab76d7b90228a", size = 10367819, upload-time = "2026-02-12T23:09:03.598Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ea/c8f89d32e7912269d38c58f3649e453ac32c528f93bb7f4219258be2e7ed/ruff-0.15.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:939c995e9277e63ea632cc8d3fae17aa758526f49a9a850d2e7e758bfef46602", size = 10798618, upload-time = "2026-02-12T23:09:22.928Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0f/1d0d88bc862624247d82c20c10d4c0f6bb2f346559d8af281674cf327f15/ruff-0.15.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1d83466455fdefe60b8d9c8df81d3c1bbb2115cede53549d3b522ce2bc703899", size = 10148518, upload-time = "2026-02-12T23:08:58.339Z" }, + { url = "https://files.pythonhosted.org/packages/f5/c8/291c49cefaa4a9248e986256df2ade7add79388fe179e0691be06fae6f37/ruff-0.15.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9457e3c3291024866222b96108ab2d8265b477e5b1534c7ddb1810904858d16", size = 10518811, upload-time = "2026-02-12T23:09:31.865Z" }, + { url = "https://files.pythonhosted.org/packages/c3/1a/f5707440e5ae43ffa5365cac8bbb91e9665f4a883f560893829cf16a606b/ruff-0.15.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:92c92b003e9d4f7fbd33b1867bb15a1b785b1735069108dfc23821ba045b29bc", size = 10196169, upload-time = "2026-02-12T23:09:17.306Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ff/26ddc8c4da04c8fd3ee65a89c9fb99eaa5c30394269d424461467be2271f/ruff-0.15.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fe5c41ab43e3a06778844c586251eb5a510f67125427625f9eb2b9526535779", size = 10990491, upload-time = "2026-02-12T23:09:25.503Z" }, + { url = "https://files.pythonhosted.org/packages/fc/00/50920cb385b89413f7cdb4bb9bc8fc59c1b0f30028d8bccc294189a54955/ruff-0.15.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66a6dd6df4d80dc382c6484f8ce1bcceb55c32e9f27a8b94c32f6c7331bf14fb", size = 11843280, upload-time = "2026-02-12T23:09:19.88Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6d/2f5cad8380caf5632a15460c323ae326f1e1a2b5b90a6ee7519017a017ca/ruff-0.15.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a4a42cbb8af0bda9bcd7606b064d7c0bc311a88d141d02f78920be6acb5aa83", size = 11274336, upload-time = "2026-02-12T23:09:14.907Z" }, + { url = "https://files.pythonhosted.org/packages/a3/1d/5f56cae1d6c40b8a318513599b35ea4b075d7dc1cd1d04449578c29d1d75/ruff-0.15.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ab064052c31dddada35079901592dfba2e05f5b1e43af3954aafcbc1096a5b2", size = 11137288, upload-time = "2026-02-12T23:09:07.475Z" }, + { url = "https://files.pythonhosted.org/packages/cd/20/6f8d7d8f768c93b0382b33b9306b3b999918816da46537d5a61635514635/ruff-0.15.1-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5631c940fe9fe91f817a4c2ea4e81f47bee3ca4aa646134a24374f3c19ad9454", size = 11070681, upload-time = "2026-02-12T23:08:55.43Z" }, + { url = "https://files.pythonhosted.org/packages/9a/67/d640ac76069f64cdea59dba02af2e00b1fa30e2103c7f8d049c0cff4cafd/ruff-0.15.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:68138a4ba184b4691ccdc39f7795c66b3c68160c586519e7e8444cf5a53e1b4c", size = 10486401, upload-time = "2026-02-12T23:09:27.927Z" }, + { url = "https://files.pythonhosted.org/packages/65/3d/e1429f64a3ff89297497916b88c32a5cc88eeca7e9c787072d0e7f1d3e1e/ruff-0.15.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:518f9af03bfc33c03bdb4cb63fabc935341bb7f54af500f92ac309ecfbba6330", size = 10197452, upload-time = "2026-02-12T23:09:12.147Z" }, + { url = "https://files.pythonhosted.org/packages/78/83/e2c3bade17dad63bf1e1c2ffaf11490603b760be149e1419b07049b36ef2/ruff-0.15.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:da79f4d6a826caaea95de0237a67e33b81e6ec2e25fc7e1993a4015dffca7c61", size = 10693900, upload-time = "2026-02-12T23:09:34.418Z" }, + { url = "https://files.pythonhosted.org/packages/a1/27/fdc0e11a813e6338e0706e8b39bb7a1d61ea5b36873b351acee7e524a72a/ruff-0.15.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3dd86dccb83cd7d4dcfac303ffc277e6048600dfc22e38158afa208e8bf94a1f", size = 11227302, upload-time = "2026-02-12T23:09:36.536Z" }, + { url = "https://files.pythonhosted.org/packages/f6/58/ac864a75067dcbd3b95be5ab4eb2b601d7fbc3d3d736a27e391a4f92a5c1/ruff-0.15.1-py3-none-win32.whl", hash = "sha256:660975d9cb49b5d5278b12b03bb9951d554543a90b74ed5d366b20e2c57c2098", size = 10462555, upload-time = "2026-02-12T23:09:29.899Z" }, + { url = "https://files.pythonhosted.org/packages/e0/5e/d4ccc8a27ecdb78116feac4935dfc39d1304536f4296168f91ed3ec00cd2/ruff-0.15.1-py3-none-win_amd64.whl", hash = "sha256:c820fef9dd5d4172a6570e5721704a96c6679b80cf7be41659ed439653f62336", size = 11599956, upload-time = "2026-02-12T23:09:01.157Z" }, + { url = "https://files.pythonhosted.org/packages/2a/07/5bda6a85b220c64c65686bc85bd0bbb23b29c62b3a9f9433fa55f17cda93/ruff-0.15.1-py3-none-win_arm64.whl", hash = "sha256:5ff7d5f0f88567850f45081fac8f4ec212be8d0b963e385c3f7d0d2eb4899416", size = 10874604, upload-time = "2026-02-12T23:09:05.515Z" }, ] [[package]] @@ -5341,7 +5355,7 @@ dependencies = [ { name = "numpy", version = "2.3.5", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, { name = "packaging" }, { name = "pillow", version = "11.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, - { name = "pillow", version = "12.1.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pillow", version = "12.1.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "protobuf" }, { name = "setuptools" }, { name = "tensorboard-data-server" }, @@ -5597,7 +5611,7 @@ resolution-markers = [ ] dependencies = [ { name = "cuda-bindings", marker = "python_full_version >= '3.10' and platform_machine == 'x86_64' and sys_platform == 'linux'" }, - { name = "filelock", version = "3.20.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "filelock", version = "3.21.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "fsspec", version = "2026.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "jinja2", marker = "python_full_version >= '3.10'" }, { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, @@ -5623,10 +5637,10 @@ dependencies = [ { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, ] wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/ea/304cf7afb744aa626fa9855245526484ee55aba610d9973a0521c552a843/torch-2.10.0-1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:c37fc46eedd9175f9c81814cc47308f1b42cfe4987e532d4b423d23852f2bf63", size = 79411450, upload-time = "2026-02-06T17:37:35.75Z" }, - { url = "https://files.pythonhosted.org/packages/25/d8/9e6b8e7df981a1e3ea3907fd5a74673e791da483e8c307f0b6ff012626d0/torch-2.10.0-1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:f699f31a236a677b3118bc0a3ef3d89c0c29b5ec0b20f4c4bf0b110378487464", size = 79423460, upload-time = "2026-02-06T17:37:39.657Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2f/0b295dd8d199ef71e6f176f576473d645d41357b7b8aa978cc6b042575df/torch-2.10.0-1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:6abb224c2b6e9e27b592a1c0015c33a504b00a0e0938f1499f7f514e9b7bfb5c", size = 79498197, upload-time = "2026-02-06T17:37:27.627Z" }, - { url = "https://files.pythonhosted.org/packages/a4/1b/af5fccb50c341bd69dc016769503cb0857c1423fbe9343410dfeb65240f2/torch-2.10.0-1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:7350f6652dfd761f11f9ecb590bfe95b573e2961f7a242eccb3c8e78348d26fe", size = 79498248, upload-time = "2026-02-06T17:37:31.982Z" }, + { url = "https://files.pythonhosted.org/packages/5b/30/bfebdd8ec77db9a79775121789992d6b3b75ee5494971294d7b4b7c999bc/torch-2.10.0-2-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:2b980edd8d7c0a68c4e951ee1856334a43193f98730d97408fbd148c1a933313", size = 79411457, upload-time = "2026-02-10T21:44:59.189Z" }, + { url = "https://files.pythonhosted.org/packages/0f/8b/4b61d6e13f7108f36910df9ab4b58fd389cc2520d54d81b88660804aad99/torch-2.10.0-2-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:418997cb02d0a0f1497cf6a09f63166f9f5df9f3e16c8a716ab76a72127c714f", size = 79423467, upload-time = "2026-02-10T21:44:48.711Z" }, + { url = "https://files.pythonhosted.org/packages/d3/54/a2ba279afcca44bbd320d4e73675b282fcee3d81400ea1b53934efca6462/torch-2.10.0-2-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:13ec4add8c3faaed8d13e0574f5cd4a323c11655546f91fbe6afa77b57423574", size = 79498202, upload-time = "2026-02-10T21:44:52.603Z" }, + { url = "https://files.pythonhosted.org/packages/ec/23/2c9fe0c9c27f7f6cb865abcea8a4568f29f00acaeadfc6a37f6801f84cb4/torch-2.10.0-2-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:e521c9f030a3774ed770a9c011751fb47c4d12029a3d6522116e48431f2ff89e", size = 79498254, upload-time = "2026-02-10T21:44:44.095Z" }, { url = "https://files.pythonhosted.org/packages/0c/1a/c61f36cfd446170ec27b3a4984f072fd06dab6b5d7ce27e11adb35d6c838/torch-2.10.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5276fa790a666ee8becaffff8acb711922252521b28fbce5db7db5cf9cb2026d", size = 145992962, upload-time = "2026-01-21T16:24:14.04Z" }, { url = "https://files.pythonhosted.org/packages/b5/60/6662535354191e2d1555296045b63e4279e5a9dbad49acf55a5d38655a39/torch-2.10.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:aaf663927bcd490ae971469a624c322202a2a1e68936eb952535ca4cd3b90444", size = 915599237, upload-time = "2026-01-21T16:23:25.497Z" }, { url = "https://files.pythonhosted.org/packages/40/b8/66bbe96f0d79be2b5c697b2e0b187ed792a15c6c4b8904613454651db848/torch-2.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:a4be6a2a190b32ff5c8002a0977a25ea60e64f7ba46b1be37093c141d9c49aeb", size = 113720931, upload-time = "2026-01-21T16:24:23.743Z" }, @@ -5900,26 +5914,26 @@ wheels = [ [[package]] name = "ty" -version = "0.0.15" +version = "0.0.17" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4e/25/257602d316b9333089b688a7a11b33ebc660b74e8dacf400dc3dfdea1594/ty-0.0.15.tar.gz", hash = "sha256:4f9a5b8df208c62dba56e91b93bed8b5bb714839691b8cff16d12c983bfa1174", size = 5101936, upload-time = "2026-02-05T01:06:34.922Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c3/41ae6346443eedb65b96761abfab890a48ce2aa5a8a27af69c5c5d99064d/ty-0.0.17.tar.gz", hash = "sha256:847ed6c120913e280bf9b54d8eaa7a1049708acb8824ad234e71498e8ad09f97", size = 5167209, upload-time = "2026-02-13T13:26:36.835Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/c5/35626e732b79bf0e6213de9f79aff59b5f247c0a1e3ce0d93e675ab9b728/ty-0.0.15-py3-none-linux_armv6l.whl", hash = "sha256:68e092458516c61512dac541cde0a5e4e5842df00b4e81881ead8f745ddec794", size = 10138374, upload-time = "2026-02-05T01:07:03.804Z" }, - { url = "https://files.pythonhosted.org/packages/d5/8a/48fd81664604848f79d03879b3ca3633762d457a069b07e09fb1b87edd6e/ty-0.0.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:79f2e75289eae3cece94c51118b730211af4ba5762906f52a878041b67e54959", size = 9947858, upload-time = "2026-02-05T01:06:47.453Z" }, - { url = "https://files.pythonhosted.org/packages/b6/85/c1ac8e97bcd930946f4c94db85b675561d590b4e72703bf3733419fc3973/ty-0.0.15-py3-none-macosx_11_0_arm64.whl", hash = "sha256:112a7b26e63e48cc72c8c5b03227d1db280cfa57a45f2df0e264c3a016aa8c3c", size = 9443220, upload-time = "2026-02-05T01:06:44.98Z" }, - { url = "https://files.pythonhosted.org/packages/3c/d9/244bc02599d950f7a4298fbc0c1b25cc808646b9577bdf7a83470b2d1cec/ty-0.0.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71f62a2644972975a657d9dc867bf901235cde51e8d24c20311067e7afd44a56", size = 9949976, upload-time = "2026-02-05T01:07:01.515Z" }, - { url = "https://files.pythonhosted.org/packages/7e/ab/3a0daad66798c91a33867a3ececf17d314ac65d4ae2bbbd28cbfde94da63/ty-0.0.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e48b42be2d257317c85b78559233273b655dd636fc61e7e1d69abd90fd3cba4", size = 9965918, upload-time = "2026-02-05T01:06:54.283Z" }, - { url = "https://files.pythonhosted.org/packages/39/4e/e62b01338f653059a7c0cd09d1a326e9a9eedc351a0f0de9db0601658c3d/ty-0.0.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27dd5b52a421e6871c5bfe9841160331b60866ed2040250cb161886478ab3e4f", size = 10424943, upload-time = "2026-02-05T01:07:08.777Z" }, - { url = "https://files.pythonhosted.org/packages/65/b5/7aa06655ce69c0d4f3e845d2d85e79c12994b6d84c71699cfb437e0bc8cf/ty-0.0.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76b85c9ec2219e11c358a7db8e21b7e5c6674a1fb9b6f633836949de98d12286", size = 10964692, upload-time = "2026-02-05T01:06:37.103Z" }, - { url = "https://files.pythonhosted.org/packages/13/04/36fdfe1f3c908b471e246e37ce3d011175584c26d3853e6c5d9a0364564c/ty-0.0.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9e8204c61d8ede4f21f2975dce74efdb80fafb2fae1915c666cceb33ea3c90b", size = 10692225, upload-time = "2026-02-05T01:06:49.714Z" }, - { url = "https://files.pythonhosted.org/packages/13/41/5bf882649bd8b64ded5fbce7fb8d77fb3b868de1a3b1a6c4796402b47308/ty-0.0.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af87c3be7c944bb4d6609d6c63e4594944b0028c7bd490a525a82b88fe010d6d", size = 10516776, upload-time = "2026-02-05T01:06:52.047Z" }, - { url = "https://files.pythonhosted.org/packages/56/75/66852d7e004f859839c17ffe1d16513c1e7cc04bcc810edb80ca022a9124/ty-0.0.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:50dccf7398505e5966847d366c9e4c650b8c225411c2a68c32040a63b9521eea", size = 9928828, upload-time = "2026-02-05T01:06:56.647Z" }, - { url = "https://files.pythonhosted.org/packages/65/72/96bc16c7b337a3ef358fd227b3c8ef0c77405f3bfbbfb59ee5915f0d9d71/ty-0.0.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:bd797b8f231a4f4715110259ad1ad5340a87b802307f3e06d92bfb37b858a8f3", size = 9978960, upload-time = "2026-02-05T01:06:29.567Z" }, - { url = "https://files.pythonhosted.org/packages/a0/18/d2e316a35b626de2227f832cd36d21205e4f5d96fd036a8af84c72ecec1b/ty-0.0.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9deb7f20e18b25440a9aa4884f934ba5628ef456dbde91819d5af1a73da48af3", size = 10135903, upload-time = "2026-02-05T01:06:59.256Z" }, - { url = "https://files.pythonhosted.org/packages/02/d3/b617a79c9dad10c888d7c15cd78859e0160b8772273637b9c4241a049491/ty-0.0.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:7b31b3de031255b90a5f4d9cb3d050feae246067c87130e5a6861a8061c71754", size = 10615879, upload-time = "2026-02-05T01:07:06.661Z" }, - { url = "https://files.pythonhosted.org/packages/fb/b0/2652a73c71c77296a6343217063f05745da60c67b7e8a8e25f2064167fce/ty-0.0.15-py3-none-win32.whl", hash = "sha256:9362c528ceb62c89d65c216336d28d500bc9f4c10418413f63ebc16886e16cc1", size = 9578058, upload-time = "2026-02-05T01:06:42.928Z" }, - { url = "https://files.pythonhosted.org/packages/84/6e/08a4aedebd2a6ce2784b5bc3760e43d1861f1a184734a78215c2d397c1df/ty-0.0.15-py3-none-win_amd64.whl", hash = "sha256:4db040695ae67c5524f59cb8179a8fa277112e69042d7dfdac862caa7e3b0d9c", size = 10457112, upload-time = "2026-02-05T01:06:39.885Z" }, - { url = "https://files.pythonhosted.org/packages/b3/be/1991f2bc12847ae2d4f1e3ac5dcff8bb7bc1261390645c0755bb55616355/ty-0.0.15-py3-none-win_arm64.whl", hash = "sha256:e5a98d4119e77d6136461e16ae505f8f8069002874ab073de03fbcb1a5e8bf25", size = 9937490, upload-time = "2026-02-05T01:06:32.388Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/0ef15c22a1c54b0f728ceff3f62d478dbf8b0dcf8ff7b80b954f79584f3e/ty-0.0.17-py3-none-linux_armv6l.whl", hash = "sha256:64a9a16555cc8867d35c2647c2f1afbd3cae55f68fd95283a574d1bb04fe93e0", size = 10192793, upload-time = "2026-02-13T13:27:13.943Z" }, + { url = "https://files.pythonhosted.org/packages/0f/2c/f4c322d9cded56edc016b1092c14b95cf58c8a33b4787316ea752bb9418e/ty-0.0.17-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:eb2dbd8acd5c5a55f4af0d479523e7c7265a88542efe73ed3d696eb1ba7b6454", size = 10051977, upload-time = "2026-02-13T13:26:57.741Z" }, + { url = "https://files.pythonhosted.org/packages/4c/a5/43746c1ff81e784f5fc303afc61fe5bcd85d0fcf3ef65cb2cef78c7486c7/ty-0.0.17-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f18f5fd927bc628deb9ea2df40f06b5f79c5ccf355db732025a3e8e7152801f6", size = 9564639, upload-time = "2026-02-13T13:26:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/d6/b8/280b04e14a9c0474af574f929fba2398b5e1c123c1e7735893b4cd73d13c/ty-0.0.17-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5383814d1d7a5cc53b3b07661856bab04bb2aac7a677c8d33c55169acdaa83df", size = 10061204, upload-time = "2026-02-13T13:27:00.152Z" }, + { url = "https://files.pythonhosted.org/packages/2a/d7/493e1607d8dfe48288d8a768a2adc38ee27ef50e57f0af41ff273987cda0/ty-0.0.17-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c20423b8744b484f93e7bf2ef8a9724bca2657873593f9f41d08bd9f83444c9", size = 10013116, upload-time = "2026-02-13T13:26:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/80/ef/22f3ed401520afac90dbdf1f9b8b7755d85b0d5c35c1cb35cf5bd11b59c2/ty-0.0.17-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6f5b1aba97db9af86517b911674b02f5bc310750485dc47603a105bd0e83ddd", size = 10533623, upload-time = "2026-02-13T13:26:31.449Z" }, + { url = "https://files.pythonhosted.org/packages/75/ce/744b15279a11ac7138832e3a55595706b4a8a209c9f878e3ab8e571d9032/ty-0.0.17-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:488bce1a9bea80b851a97cd34c4d2ffcd69593d6c3f54a72ae02e5c6e47f3d0c", size = 11069750, upload-time = "2026-02-13T13:26:48.638Z" }, + { url = "https://files.pythonhosted.org/packages/f2/be/1133c91f15a0e00d466c24f80df486d630d95d1b2af63296941f7473812f/ty-0.0.17-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8df66b91ec84239420985ec215e7f7549bfda2ac036a3b3c065f119d1c06825a", size = 10870862, upload-time = "2026-02-13T13:26:54.715Z" }, + { url = "https://files.pythonhosted.org/packages/3e/4a/a2ed209ef215b62b2d3246e07e833081e07d913adf7e0448fc204be443d6/ty-0.0.17-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:002139e807c53002790dfefe6e2f45ab0e04012e76db3d7c8286f96ec121af8f", size = 10628118, upload-time = "2026-02-13T13:26:45.439Z" }, + { url = "https://files.pythonhosted.org/packages/b3/0c/87476004cb5228e9719b98afffad82c3ef1f84334bde8527bcacba7b18cb/ty-0.0.17-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6c4e01f05ce82e5d489ab3900ca0899a56c4ccb52659453780c83e5b19e2b64c", size = 10038185, upload-time = "2026-02-13T13:27:02.693Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/98f0b3ba9aef53c1f0305519536967a4aa793a69ed72677b0a625c5313ac/ty-0.0.17-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2b226dd1e99c0d2152d218c7e440150d1a47ce3c431871f0efa073bbf899e881", size = 10047644, upload-time = "2026-02-13T13:27:05.474Z" }, + { url = "https://files.pythonhosted.org/packages/93/e0/06737bb80aa1a9103b8651d2eb691a7e53f1ed54111152be25f4a02745db/ty-0.0.17-py3-none-musllinux_1_2_i686.whl", hash = "sha256:8b11f1da7859e0ad69e84b3c5ef9a7b055ceed376a432fad44231bdfc48061c2", size = 10231140, upload-time = "2026-02-13T13:27:10.844Z" }, + { url = "https://files.pythonhosted.org/packages/7c/79/e2a606bd8852383ba9abfdd578f4a227bd18504145381a10a5f886b4e751/ty-0.0.17-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:c04e196809ff570559054d3e011425fd7c04161529eb551b3625654e5f2434cb", size = 10718344, upload-time = "2026-02-13T13:26:51.66Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2d/2663984ac11de6d78f74432b8b14ba64d170b45194312852b7543cf7fd56/ty-0.0.17-py3-none-win32.whl", hash = "sha256:305b6ed150b2740d00a817b193373d21f0767e10f94ac47abfc3b2e5a5aec809", size = 9672932, upload-time = "2026-02-13T13:27:08.522Z" }, + { url = "https://files.pythonhosted.org/packages/de/b5/39be78f30b31ee9f5a585969930c7248354db90494ff5e3d0756560fb731/ty-0.0.17-py3-none-win_amd64.whl", hash = "sha256:531828267527aee7a63e972f54e5eee21d9281b72baf18e5c2850c6b862add83", size = 10542138, upload-time = "2026-02-13T13:27:17.084Z" }, + { url = "https://files.pythonhosted.org/packages/40/b7/f875c729c5d0079640c75bad2c7e5d43edc90f16ba242f28a11966df8f65/ty-0.0.17-py3-none-win_arm64.whl", hash = "sha256:de9810234c0c8d75073457e10a84825b9cd72e6629826b7f01c7a0b266ae25b1", size = 10023068, upload-time = "2026-02-13T13:26:39.637Z" }, ] [[package]] @@ -5927,7 +5941,8 @@ name = "types-cffi" version = "1.17.0.20250915" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "types-setuptools" }, + { name = "types-setuptools", version = "81.0.0.20260209", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "types-setuptools", version = "82.0.0.20260210", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" } wheels = [ @@ -6078,11 +6093,39 @@ wheels = [ name = "types-setuptools" version = "81.0.0.20260209" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.9.2' and python_full_version < '3.10'", + "python_full_version < '3.9.2'", +] sdist = { url = "https://files.pythonhosted.org/packages/9e/57/f1f7992d6d7bded78d1f14dc23d59e87601920852bf10ece2325e49bacae/types_setuptools-81.0.0.20260209.tar.gz", hash = "sha256:2c2eb64499b41b672c387f6f45678a28d20a143a81b45a5c77acbfd4da0df3e1", size = 43201, upload-time = "2026-02-09T04:14:15.505Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/3f/87/90c9143af95850bdaf7eb0d47c59e5c3a8b55fc5a49aca0eb7f98cb964d5/types_setuptools-81.0.0.20260209-py3-none-any.whl", hash = "sha256:4facf71e3f953f8f5ac0020cd6c1b5e493aaff0183e85830bc34870b6abf8475", size = 64194, upload-time = "2026-02-09T04:14:14.278Z" }, ] +[[package]] +name = "types-setuptools" +version = "82.0.0.20260210" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.14' and sys_platform == 'win32'", + "python_full_version >= '3.14' and sys_platform == 'emscripten'", + "python_full_version >= '3.14' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'win32'", + "python_full_version == '3.13.*' and sys_platform == 'emscripten'", + "python_full_version == '3.13.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'win32'", + "python_full_version == '3.11.*' and sys_platform == 'win32'", + "python_full_version == '3.12.*' and sys_platform == 'emscripten'", + "python_full_version == '3.11.*' and sys_platform == 'emscripten'", + "python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.11.*' and sys_platform != 'emscripten' and sys_platform != 'win32'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/90/796ac8c774a7f535084aacbaa6b7053d16fff5c630eff87c3ecff7896c37/types_setuptools-82.0.0.20260210.tar.gz", hash = "sha256:d9719fbbeb185254480ade1f25327c4654f8c00efda3fec36823379cebcdee58", size = 44768, upload-time = "2026-02-10T04:22:02.107Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/54/3489432b1d9bc713c9d8aa810296b8f5b0088403662959fb63a8acdbd4fc/types_setuptools-82.0.0.20260210-py3-none-any.whl", hash = "sha256:5124a7daf67f195c6054e0f00f1d97c69caad12fdcf9113eba33eff0bce8cd2b", size = 68433, upload-time = "2026-02-10T04:22:00.876Z" }, +] + [[package]] name = "types-six" version = "1.17.0.20251009" @@ -6177,27 +6220,27 @@ wheels = [ [[package]] name = "uv" -version = "0.10.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/09/36/f7fe4de0ad81234ac43938fe39c6ba84595c6b3a1868d786a4d7ad19e670/uv-0.10.0.tar.gz", hash = "sha256:ad01dd614a4bb8eb732da31ade41447026427397c5ad171cc98bd59579ef57ea", size = 3854103, upload-time = "2026-02-05T20:57:55.248Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/69/33fb64aee6ba138b1aaf957e20778e94a8c23732e41cdf68e6176aa2cf4e/uv-0.10.0-py3-none-linux_armv6l.whl", hash = "sha256:38dc0ccbda6377eb94095688c38e5001b8b40dfce14b9654949c1f0b6aa889df", size = 21984662, upload-time = "2026-02-05T20:57:19.076Z" }, - { url = "https://files.pythonhosted.org/packages/1a/5a/e3ff8a98cfbabc5c2d09bf304d2d9d2d7b2e7d60744241ac5ed762015e5c/uv-0.10.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a165582c1447691109d49d09dccb065d2a23852ff42bf77824ff169909aa85da", size = 21057249, upload-time = "2026-02-05T20:56:48.921Z" }, - { url = "https://files.pythonhosted.org/packages/ee/77/ec8f24f8d0f19c4fda0718d917bb78b9e6f02a4e1963b401f1c4f4614a54/uv-0.10.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:aefea608971f4f23ac3dac2006afb8eb2b2c1a2514f5fee1fac18e6c45fd70c4", size = 19827174, upload-time = "2026-02-05T20:57:10.581Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/09b38b93208906728f591f66185a425be3acdb97c448460137d0e6ecb30a/uv-0.10.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:d4b621bcc5d0139502789dc299bae8bf55356d07b95cb4e57e50e2afcc5f43e1", size = 21629522, upload-time = "2026-02-05T20:57:29.959Z" }, - { url = "https://files.pythonhosted.org/packages/89/f3/48d92c90e869331306979efaa29a44c3e7e8376ae343edc729df0d534dfb/uv-0.10.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:b4bea728a6b64826d0091f95f28de06dd2dc786384b3d336a90297f123b4da0e", size = 21614812, upload-time = "2026-02-05T20:56:58.103Z" }, - { url = "https://files.pythonhosted.org/packages/ff/43/d0dedfcd4fe6e36cabdbeeb43425cd788604db9d48425e7b659d0f7ba112/uv-0.10.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc0cc2a4bcf9efbff9a57e2aed21c2d4b5a7ec2cc0096e0c33d7b53da17f6a3b", size = 21577072, upload-time = "2026-02-05T20:57:45.455Z" }, - { url = "https://files.pythonhosted.org/packages/c5/90/b8c9320fd8d86f356e37505a02aa2978ed28f9c63b59f15933e98bce97e5/uv-0.10.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:070ca2f0e8c67ca9a8f70ce403c956b7ed9d51e0c2e9dbbcc4efa5e0a2483f79", size = 22829664, upload-time = "2026-02-05T20:57:22.689Z" }, - { url = "https://files.pythonhosted.org/packages/56/9c/2c36b30b05c74b2af0e663e0e68f1d10b91a02a145e19b6774c121120c0b/uv-0.10.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8070c66149c06f9b39092a06f593a2241345ea2b1d42badc6f884c2cc089a1b1", size = 23705815, upload-time = "2026-02-05T20:57:37.604Z" }, - { url = "https://files.pythonhosted.org/packages/6c/a1/8c7fdb14ab72e26ca872e07306e496a6b8cf42353f9bf6251b015be7f535/uv-0.10.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3db1d5390b3a624de672d7b0f9c9d8197693f3b2d3d9c4d9e34686dcbc34197a", size = 22890313, upload-time = "2026-02-05T20:57:26.35Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f8/5c152350b1a6d0af019801f91a1bdeac854c33deb36275f6c934f0113cb5/uv-0.10.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82b46db718763bf742e986ebbc7a30ca33648957a0dcad34382970b992f5e900", size = 22769440, upload-time = "2026-02-05T20:56:53.859Z" }, - { url = "https://files.pythonhosted.org/packages/87/44/980e5399c6f4943b81754be9b7deb87bd56430e035c507984e17267d6a97/uv-0.10.0-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:eb95d28590edd73b8fdd80c27d699c45c52f8305170c6a90b830caf7f36670a4", size = 21695296, upload-time = "2026-02-05T20:57:06.732Z" }, - { url = "https://files.pythonhosted.org/packages/ae/e7/f44ad40275be2087b3910df4678ed62cf0c82eeb3375c4a35037a79747db/uv-0.10.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:5871eef5046a81df3f1636a3d2b4ccac749c23c7f4d3a4bae5496cb2876a1814", size = 22424291, upload-time = "2026-02-05T20:57:49.067Z" }, - { url = "https://files.pythonhosted.org/packages/c2/81/31c0c0a8673140756e71a1112bf8f0fcbb48a4cf4587a7937f5bd55256b6/uv-0.10.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:1af0ec125a07edb434dfaa98969f6184c1313dbec2860c3c5ce2d533b257132a", size = 22109479, upload-time = "2026-02-05T20:57:02.258Z" }, - { url = "https://files.pythonhosted.org/packages/d7/d1/2eb51bc233bad3d13ad64a0c280fd4d1ebebf5c2939b3900a46670fa2b91/uv-0.10.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:45909b9a734250da05b10101e0a067e01ffa2d94bbb07de4b501e3cee4ae0ff3", size = 22972087, upload-time = "2026-02-05T20:57:52.847Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f7/49987207b87b5c21e1f0e81c52892813e8cdf7e318b6373d6585773ebcdd/uv-0.10.0-py3-none-win32.whl", hash = "sha256:d5498851b1f07aa9c9af75578b2029a11743cb933d741f84dcbb43109a968c29", size = 20896746, upload-time = "2026-02-05T20:57:33.426Z" }, - { url = "https://files.pythonhosted.org/packages/80/b2/1370049596c6ff7fa1fe22fccf86a093982eac81017b8c8aff541d7263b2/uv-0.10.0-py3-none-win_amd64.whl", hash = "sha256:edd469425cd62bcd8c8cc0226c5f9043a94e37ed869da8268c80fdbfd3e5015e", size = 23433041, upload-time = "2026-02-05T20:57:41.41Z" }, - { url = "https://files.pythonhosted.org/packages/e3/76/1034c46244feafec2c274ac52b094f35d47c94cdb11461c24cf4be8a0c0c/uv-0.10.0-py3-none-win_arm64.whl", hash = "sha256:e90c509749b3422eebb54057434b7119892330d133b9690a88f8a6b0f3116be3", size = 21880261, upload-time = "2026-02-05T20:57:14.724Z" }, +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/9a/fe74aa0127cdc26141364e07abf25e5d69b4bf9788758fad9cfecca637aa/uv-0.10.2.tar.gz", hash = "sha256:b5016f038e191cc9ef00e17be802f44363d1b1cc3ef3454d1d76839a4246c10a", size = 3858864, upload-time = "2026-02-10T19:17:51.609Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/b5/aea88f66284d220be56ef748ed5e1bd11d819be14656a38631f4b55bfd48/uv-0.10.2-py3-none-linux_armv6l.whl", hash = "sha256:69e35aa3e91a245b015365e5e6ca383ecf72a07280c6d00c17c9173f2d3b68ab", size = 22215714, upload-time = "2026-02-10T19:17:34.281Z" }, + { url = "https://files.pythonhosted.org/packages/7f/72/947ba7737ae6cd50de61d268781b9e7717caa3b07e18238ffd547f9fc728/uv-0.10.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:0b7eef95c36fe92e7aac399c0dce555474432cbfeaaa23975ed83a63923f78fd", size = 21276485, upload-time = "2026-02-10T19:18:15.415Z" }, + { url = "https://files.pythonhosted.org/packages/d3/38/5c3462b927a93be4ccaaa25138926a5fb6c9e1b72884efd7af77e451d82e/uv-0.10.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:acc08e420abab21de987151059991e3f04bc7f4044d94ca58b5dd547995b4843", size = 20048620, upload-time = "2026-02-10T19:17:26.481Z" }, + { url = "https://files.pythonhosted.org/packages/03/51/d4509b0f5b7740c1af82202e9c69b700d5848b8bd0faa25229e8edd2c19c/uv-0.10.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:aefbcd749ab2ad48bb533ec028607607f7b03be11c83ea152dbb847226cd6285", size = 21870454, upload-time = "2026-02-10T19:17:21.838Z" }, + { url = "https://files.pythonhosted.org/packages/cd/7e/2bcbafcb424bb885817a7e58e6eec9314c190c55935daaafab1858bb82cd/uv-0.10.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.musllinux_1_1_armv7l.whl", hash = "sha256:fad554c38d9988409ceddfac69a465e6e5f925a8b689e7606a395c20bb4d1d78", size = 21839508, upload-time = "2026-02-10T19:17:59.211Z" }, + { url = "https://files.pythonhosted.org/packages/60/08/16df2c1f8ad121a595316b82f6e381447e8974265b2239c9135eb874f33b/uv-0.10.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6dd2dc41043e92b3316d7124a7bf48c2affe7117c93079419146f083df71933c", size = 21841283, upload-time = "2026-02-10T19:17:41.419Z" }, + { url = "https://files.pythonhosted.org/packages/76/27/a869fec4c03af5e43db700fabe208d8ee8dbd56e0ff568ba792788d505cd/uv-0.10.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111c05182c5630ac523764e0ec2e58d7b54eb149dbe517b578993a13c2f71aff", size = 23111967, upload-time = "2026-02-10T19:18:11.764Z" }, + { url = "https://files.pythonhosted.org/packages/2a/4a/fb38515d966acfbd80179e626985aab627898ffd02c70205850d6eb44df1/uv-0.10.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45c3deaba0343fd27ab5385d6b7cde0765df1a15389ee7978b14a51c32895662", size = 23911019, upload-time = "2026-02-10T19:18:26.947Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5f/51bcbb490ddb1dcb06d767f0bde649ad2826686b9e30efa57f8ab2750a1d/uv-0.10.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bb2cac4f3be60b64a23d9f035019c30a004d378b563c94f60525c9591665a56b", size = 23030217, upload-time = "2026-02-10T19:17:37.789Z" }, + { url = "https://files.pythonhosted.org/packages/46/69/144f6db851d49aa6f25b040dc5c8c684b8f92df9e8d452c7abc619c6ec23/uv-0.10.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:937687df0380d636ceafcb728cf6357f0432588e721892128985417b283c3b54", size = 23036452, upload-time = "2026-02-10T19:18:18.97Z" }, + { url = "https://files.pythonhosted.org/packages/66/29/3c7c4559c9310ed478e3d6c585ee0aad2852dc4d5fb14f4d92a2a12d1728/uv-0.10.2-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:f90bca8703ae66bccfcfb7313b4b697a496c4d3df662f4a1a2696a6320c47598", size = 21941903, upload-time = "2026-02-10T19:17:30.575Z" }, + { url = "https://files.pythonhosted.org/packages/9a/5a/42883b5ef2ef0b1bc5b70a1da12a6854a929ff824aa8eb1a5571fb27a39b/uv-0.10.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:cca026c2e584788e1264879a123bf499dd8f169b9cafac4a2065a416e09d3823", size = 22651571, upload-time = "2026-02-10T19:18:22.74Z" }, + { url = "https://files.pythonhosted.org/packages/e8/b8/e4f1dda1b3b0cc6c8ac06952bfe7bc28893ff016fb87651c8fafc6dfca96/uv-0.10.2-py3-none-musllinux_1_1_i686.whl", hash = "sha256:9f878837938103ee1307ed3ed5d9228118e3932816ab0deb451e7e16dc8ce82a", size = 22321279, upload-time = "2026-02-10T19:17:49.402Z" }, + { url = "https://files.pythonhosted.org/packages/2c/4b/baa16d46469e024846fc1a8aa0cfa63f1f89ad0fd3eaa985359a168c3fb0/uv-0.10.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:6ec75cfe638b316b329474aa798c3988e5946ead4d9e977fe4dc6fc2ea3e0b8b", size = 23252208, upload-time = "2026-02-10T19:17:54.46Z" }, + { url = "https://files.pythonhosted.org/packages/d6/84/6a74e5ec2ee90e4314905e6d1d1708d473e06405e492ec38868b42645388/uv-0.10.2-py3-none-win32.whl", hash = "sha256:f7f3c7e09bf53b81f55730a67dd86299158f470dffb2bd279b6432feb198d231", size = 21118543, upload-time = "2026-02-10T19:18:07.296Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f9/e5cc6cf3a578b87004e857274df97d3cdecd8e19e965869b9b67c094c20c/uv-0.10.2-py3-none-win_amd64.whl", hash = "sha256:7b3685aa1da15acbe080b4cba8684afbb6baf11c9b04d4d4b347cc18b7b9cfa0", size = 23620790, upload-time = "2026-02-10T19:17:45.204Z" }, + { url = "https://files.pythonhosted.org/packages/df/7a/99979dc08ae6a65f4f7a44c5066699016c6eecdc4e695b7512c2efb53378/uv-0.10.2-py3-none-win_arm64.whl", hash = "sha256:abdd5b3c6b871b17bf852a90346eb7af881345706554fd082346b000a9393afd", size = 22035199, upload-time = "2026-02-10T19:18:03.679Z" }, ] [[package]] @@ -6416,15 +6459,17 @@ wheels = [ [[package]] name = "z3-solver" -version = "4.15.7.0" +version = "4.15.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fd/5d/810ba04f7e7f2f2e5f019dd75237d1a16b7388a0c72f7e532b27dde9f7e2/z3_solver-4.15.7.0.tar.gz", hash = "sha256:a26b91f861b6d13bb76f0ac568d3ef1c0a4801e70a135f80e66b49628565a460", size = 5071448, upload-time = "2026-02-09T01:08:40.767Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/46/5ab514528111418ed5b93df48a572fecb3e8fe2ed9108d5563a951f3a7d6/z3_solver-4.15.8.0.tar.gz", hash = "sha256:fbb5ebb43e4f59335d415fc78074000953dcf9963b7ad2230fa68293ca25e9cb", size = 5072381, upload-time = "2026-02-12T20:59:04.352Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/1b/d21f292b473c1c40bedf41d113577ae2bb7fcc715f54d42c10b7f2b3a186/z3_solver-4.15.7.0-py3-none-macosx_15_0_arm64.whl", hash = "sha256:a6c967677c67296a8b7c97dff68107f029c576a94cfb4abc9e08bf72e5499e5d", size = 36987369, upload-time = "2026-02-09T01:08:27.585Z" }, - { url = "https://files.pythonhosted.org/packages/77/36/132c3d03de2eed160fad123207c981507193b2621e05b2909563775e0ad9/z3_solver-4.15.7.0-py3-none-macosx_15_0_x86_64.whl", hash = "sha256:a9644e958252dfdbdae2f787a8192fe4b8c156e7cf7b0e00a6a59e896a27569d", size = 47560235, upload-time = "2026-02-09T01:08:30.415Z" }, - { url = "https://files.pythonhosted.org/packages/61/49/40b0ee7cd2425dfa05bde5776f6aa7e892460a5ca8016171204f9b2d42df/z3_solver-4.15.7.0-py3-none-win32.whl", hash = "sha256:2dd09ac8afde63035d9c0a63b23d448726e374ec588b67b5f5edce9d7e9b1a13", size = 13342998, upload-time = "2026-02-09T01:08:33.84Z" }, - { url = "https://files.pythonhosted.org/packages/6c/ab/5a60c6ed712eb97749cd758162842cec771cfbe2c37ea43a251dc6fe583b/z3_solver-4.15.7.0-py3-none-win_amd64.whl", hash = "sha256:17f5ccea921d6a11bba5880281048c9f4a1e0c35f76e8ce69e72826c90c230bd", size = 16427563, upload-time = "2026-02-09T01:08:35.884Z" }, - { url = "https://files.pythonhosted.org/packages/f0/1f/ea28f6b3dec9cbab32cf851b3a529c9fb8332300c7419a55ab68ef5b40ac/z3_solver-4.15.7.0-py3-none-win_arm64.whl", hash = "sha256:9bf1a350598bc92ece90220073fe47c0b0f8cbbeaaf62974de736bd79947f8bd", size = 15082309, upload-time = "2026-02-09T01:08:38.832Z" }, + { url = "https://files.pythonhosted.org/packages/4a/f5/625c056c0d86b3f3ae8c1779c9314a9fa7bf74cd863b6f92d5d9c74e197b/z3_solver-4.15.8.0-py3-none-macosx_15_0_arm64.whl", hash = "sha256:24434ff39a86f3f580130380d341796b19ada49e68f139ec05b82ae0cc46b384", size = 36964743, upload-time = "2026-02-12T20:58:34.145Z" }, + { url = "https://files.pythonhosted.org/packages/e6/56/f5553c5ceaa50c0a1927d58aee4f1ab63ae830fee1d0ae3a8302c92d3465/z3_solver-4.15.8.0-py3-none-macosx_15_0_x86_64.whl", hash = "sha256:f60da7b1da62ba7e2d0b5852395ecf50f095d46c004286a51ddc0c75d4d5132a", size = 47526198, upload-time = "2026-02-12T20:58:38.806Z" }, + { url = "https://files.pythonhosted.org/packages/c1/d6/beb88db135980497db93ec0211285e83bf4d04fde99925309cb0f5dc9fbb/z3_solver-4.15.8.0-py3-none-manylinux_2_27_x86_64.whl", hash = "sha256:05fbd0b2644131c83c535505a26db8057728e45f3de9ce07af2c99d3be365713", size = 31748580, upload-time = "2026-02-12T20:58:43.18Z" }, + { url = "https://files.pythonhosted.org/packages/63/12/fa348373f437601349b4233c6681d0b8e7f2e8f0f8f63d130f406a4c888e/z3_solver-4.15.8.0-py3-none-manylinux_2_38_aarch64.whl", hash = "sha256:b35ac727aa9e769de0ddbea94be4f1bf382abe49903ea455b1512cc959fc1ac9", size = 27321039, upload-time = "2026-02-12T20:58:47.549Z" }, + { url = "https://files.pythonhosted.org/packages/70/67/a440ce9386b3c8c6d30929cbaacd35cfb26802471e888595cc633e1976e0/z3_solver-4.15.8.0-py3-none-win32.whl", hash = "sha256:b98df38ceabcae8dd4f5e7d8705d0ffb6e80cde3428d73850f398cdfbf7579bf", size = 13341721, upload-time = "2026-02-12T20:58:55.289Z" }, + { url = "https://files.pythonhosted.org/packages/33/0a/836ab4e4bbe490cc94472da42001cfcdda9c75b518869b98d4b0097a308e/z3_solver-4.15.8.0-py3-none-win_amd64.whl", hash = "sha256:8f630d5bf139e0c20fea8c09b8b10a4ee52e99666951468e3e365b594690da7f", size = 16419862, upload-time = "2026-02-12T20:58:58.486Z" }, + { url = "https://files.pythonhosted.org/packages/eb/34/5f361d9320fcf1ce334ecdd77f85858084d7681687809ac10c64ca6a9636/z3_solver-4.15.8.0-py3-none-win_arm64.whl", hash = "sha256:87d5c4a0400ee5dbcaf5b86c6d507525a9fd2d0adb2b64622ebcd29eef59207a", size = 15086043, upload-time = "2026-02-12T20:59:01.957Z" }, ] [[package]]