Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ examples/
.venv
venv
requirements.txt
.pyi_generator_last_run
.pyi_generator_diff
reflex.db
.codspeed
.env
Expand Down
1 change: 1 addition & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ repos:
entry: python3 scripts/make_pyi.py
language: system
always_run: true
pass_filenames: false
require_serial: true
- repo: local
hooks:
Expand Down
2 changes: 1 addition & 1 deletion AGENTS.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ Playwright tests use the `page` fixture and navigate to `harness.frontend_url`.

## .pyi stubs

When adding/modifying components: `uv run python scripts/make_pyi.py`. Commit `pyi_hashes.json` (not `.pyi` files). If the diff removes many modules, run `uv sync`, delete `.pyi_generator_last_run`, and regenerate.
When components change, run `uv run python scripts/make_pyi.py` and commit only `pyi_hashes.json`. It incrementally regenerates changed stubs and dependent subclasses.

## Breaking changes and deprecation

Expand Down
11 changes: 10 additions & 1 deletion packages/hatch-reflex-pyi/src/hatch_reflex_pyi/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,17 @@ def initialize(self, version: str, build_data: dict[str, Any]) -> None:
# Run from src/ so _path_to_module_name produces valid import names
# (e.g. "reflex_components_core.core.banner" instead of
# "packages.reflex-components-core.src.reflex_components_core.core.banner").
# --no-update-hashes: parallel workspace builds would race on the shared
# pyi_hashes.json; that file is a dev-tooling artifact and isn't needed
# at install/wheel-build time.
subprocess.run(
[sys.executable, "-m", "reflex_base.utils.pyi_generator", src_dir.name],
[
sys.executable,
"-m",
"reflex_base.utils.pyi_generator",
"--no-update-hashes",
src_dir.name,
],
cwd=src_dir.parent,
check=True,
)
Expand Down
161 changes: 79 additions & 82 deletions packages/reflex-base/src/reflex_base/utils/pyi_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -1599,15 +1599,72 @@ def _scan_file(module_path: Path) -> tuple[str, str] | None:
return str(module_path.with_suffix(".pyi").resolve()), content_hash


def _update_pyi_hashes_file(
written_files: list[tuple[str, str]],
scanned_sources: list[Path],
) -> None:
"""Merge a scan's results into the workspace ``pyi_hashes.json``.

Walks up from the current working directory to find an existing
``pyi_hashes.json`` (the file is workspace-scoped — one per repo). If none
exists, a fresh one is created in cwd. Existing entries survive unless
their source file disappeared, or their stub was scanned this run but no
longer produces a stub. Newly produced hashes are written in.

Args:
written_files: ``(absolute pyi path, content md5)`` tuples for stubs
written this run.
scanned_sources: Absolute ``.py`` paths that were scanned this run,
including those that produced no stub. An entry whose source was
scanned but produced nothing this run is dropped from the file.
"""
written_paths = [Path(p) for p, _ in written_files]
hashes = [h for _, h in written_files]

pyi_hashes_parent = Path.cwd().resolve()
while (
pyi_hashes_parent != pyi_hashes_parent.parent
and not (pyi_hashes_parent / PYI_HASHES).exists()
):
pyi_hashes_parent = pyi_hashes_parent.parent

pyi_hashes_file = pyi_hashes_parent / PYI_HASHES
if pyi_hashes_file.exists():
existing = json.loads(pyi_hashes_file.read_text())
else:
pyi_hashes_file = (Path.cwd() / PYI_HASHES).resolve()
pyi_hashes_parent = pyi_hashes_file.parent
existing = {}

produced = {
p.relative_to(pyi_hashes_parent).as_posix(): h
for p, h in zip(written_paths, hashes, strict=True)
}
scanned = {
source.with_suffix(".pyi").relative_to(pyi_hashes_parent).as_posix()
for source in scanned_sources
if source.with_suffix(".pyi").is_relative_to(pyi_hashes_parent)
}
pyi_hashes = {
entry: produced.get(entry, current)
for entry, current in existing.items()
if (entry in produced or entry not in scanned)
and (pyi_hashes_parent / entry).with_suffix(".py").exists()
}
for entry, hashed in produced.items():
pyi_hashes.setdefault(entry, hashed)

pyi_hashes_file.write_text(json.dumps(pyi_hashes, indent=2, sort_keys=True) + "\n")


class PyiGenerator:
"""A .pyi file generator that will scan all defined Component in Reflex and
generate the appropriate stub.
"""

modules: list = []
root: str = ""
current_module: Any = {}
written_files: list[tuple[str, str]] = []
def __init__(self) -> None:
"""Initialize per-instance scan state."""
self.written_files: list[tuple[str, str]] = []

def _scan_files(self, files: list[Path]):
max_workers = min(multiprocessing.cpu_count() or 1, len(files), 8)
Expand Down Expand Up @@ -1673,18 +1730,15 @@ def scan_all(
and target_path.suffix == ".py"
and target_path.name not in EXCLUDED_FILES
):
file_targets.append(target_path)
file_targets.append(target_path.resolve())
continue
if not target_path.is_dir():
continue
for file_path in _walk_files(target_path):
relative = _relative_to_pwd(file_path)
if relative.name in EXCLUDED_FILES or file_path.suffix != ".py":
continue
if (
changed_files is not None
and _relative_to_pwd(file_path) not in changed_files
):
if changed_files is not None and relative not in changed_files:
continue
file_targets.append(file_path)

Expand All @@ -1702,79 +1756,14 @@ def scan_all(

self._scan_files(file_targets)

file_paths, hashes = (
[f[0] for f in self.written_files],
[f[1] for f in self.written_files],
)

# Fix generated pyi files with ruff.
if file_paths:
subprocess.run(["ruff", "format", *file_paths])
subprocess.run(["ruff", "check", "--fix", *file_paths])

if use_json:
if file_paths and changed_files is None:
file_paths = list(map(Path, file_paths))
top_dir = file_paths[0].parent
for file_path in file_paths:
file_parent = file_path.parent
while len(file_parent.parts) > len(top_dir.parts):
file_parent = file_parent.parent
while len(top_dir.parts) > len(file_parent.parts):
top_dir = top_dir.parent
while not file_parent.samefile(top_dir):
file_parent = file_parent.parent
top_dir = top_dir.parent

while (
not top_dir.samefile(top_dir.parent)
and not (top_dir / PYI_HASHES).exists()
):
top_dir = top_dir.parent

pyi_hashes_file = top_dir / PYI_HASHES

if pyi_hashes_file.exists():
pyi_hashes_file.write_text(
json.dumps(
dict(
zip(
[
f.relative_to(pyi_hashes_file.parent).as_posix()
for f in file_paths
],
hashes,
strict=True,
)
),
indent=2,
sort_keys=True,
)
+ "\n",
)
elif file_paths:
file_paths = list(map(Path, file_paths))
pyi_hashes_parent = file_paths[0].parent
while (
not pyi_hashes_parent.samefile(pyi_hashes_parent.parent)
and not (pyi_hashes_parent / PYI_HASHES).exists()
):
pyi_hashes_parent = pyi_hashes_parent.parent

pyi_hashes_file = pyi_hashes_parent / PYI_HASHES
if pyi_hashes_file.exists():
pyi_hashes = json.loads(pyi_hashes_file.read_text())
for file_path, hashed_content in zip(
file_paths, hashes, strict=False
):
formatted_path = file_path.relative_to(
pyi_hashes_parent
).as_posix()
pyi_hashes[formatted_path] = hashed_content

pyi_hashes_file.write_text(
json.dumps(pyi_hashes, indent=2, sort_keys=True) + "\n"
)
if self.written_files:
written_paths = [p for p, _ in self.written_files]
subprocess.run(["ruff", "format", *written_paths])
subprocess.run(["ruff", "check", "--fix", *written_paths])

if use_json and (self.written_files or file_targets):
_update_pyi_hashes_file(self.written_files, file_targets)


if __name__ == "__main__":
Expand All @@ -1787,10 +1776,18 @@ def scan_all(
default=["reflex/components", "reflex/experimental", "reflex/__init__.py"],
help="Target directories/files to process",
)
parser.add_argument(
"--no-update-hashes",
dest="update_hashes",
action="store_false",
help="Do not read or write the workspace pyi_hashes.json. "
"Use this from build hooks where parallel package builds would race "
"on a single shared hash file.",
)
args = parser.parse_args()

logging.basicConfig(level=logging.INFO)
logging.getLogger("blib2to3.pgen2.driver").setLevel(logging.INFO)

gen = PyiGenerator()
gen.scan_all(args.targets, None, use_json=True)
gen.scan_all(args.targets, None, use_json=args.update_hashes)
7 changes: 6 additions & 1 deletion scripts/hatch_build.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,12 @@ def initialize(self, version: str, build_data: dict[str, Any]) -> None:
file.unlink(missing_ok=True)

subprocess.run(
[sys.executable, "-m", "reflex_base.utils.pyi_generator"],
[
sys.executable,
"-m",
"reflex_base.utils.pyi_generator",
"--no-update-hashes",
],
check=True,
)
self.marker().touch()
Loading
Loading