From 91c996981f11a0af403b7f0331c48490da4efb91 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 17 Feb 2026 03:12:00 +0530 Subject: [PATCH 01/47] feat: issue #8 project scaffolding --- .github/workflows/ci.yml | 29 ++++ .gitignore | 14 ++ Makefile | 21 +++ README.md | 265 +++++++---------------------------- pyproject.toml | 39 ++++++ src/minichain/__init__.py | 4 + src/minichain/__main__.py | 23 +++ src/minichain/block.py | 1 + src/minichain/consensus.py | 1 + src/minichain/crypto.py | 1 + src/minichain/mempool.py | 1 + src/minichain/network.py | 1 + src/minichain/node.py | 8 ++ src/minichain/state.py | 1 + src/minichain/storage.py | 1 + src/minichain/transaction.py | 1 + tests/test_scaffold.py | 31 ++++ 17 files changed, 227 insertions(+), 215 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 Makefile create mode 100644 pyproject.toml create mode 100644 src/minichain/__init__.py create mode 100644 src/minichain/__main__.py create mode 100644 src/minichain/block.py create mode 100644 src/minichain/consensus.py create mode 100644 src/minichain/crypto.py create mode 100644 src/minichain/mempool.py create mode 100644 src/minichain/network.py create mode 100644 src/minichain/node.py create mode 100644 src/minichain/state.py create mode 100644 src/minichain/storage.py create mode 100644 src/minichain/transaction.py create mode 100644 tests/test_scaffold.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a41da18 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,29 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install -e .[dev] + + - name: Lint + run: make lint + + - name: Run tests + run: make test diff --git a/.gitignore b/.gitignore index 9308a4b..d40d7cf 100644 --- a/.gitignore +++ b/.gitignore @@ -258,6 +258,17 @@ pythontex-files-*/ # easy-todo *.lod +# MiniChain local planning docs (do not commit) +issues.md +architectureProposal.md + +# Python caches and virtualenvs +__pycache__/ +*.py[cod] +.pytest_cache/ +.ruff_cache/ +.venv/ + # xcolor *.xcp @@ -324,3 +335,6 @@ TSWLatexianTemp* # option is specified. Footnotes are the stored in a file with suffix Notes.bib. # Uncomment the next line to have this generated file ignored. #*Notes.bib + + +docs/ diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..a99220a --- /dev/null +++ b/Makefile @@ -0,0 +1,21 @@ +PYTHON ?= python3 + +.PHONY: install dev-install test lint format start-node + +install: + $(PYTHON) -m pip install . + +dev-install: + $(PYTHON) -m pip install -e .[dev] + +test: + $(PYTHON) -m pytest + +lint: + $(PYTHON) -m ruff check src tests + +format: + $(PYTHON) -m ruff format src tests + +start-node: + PYTHONPATH=src $(PYTHON) -m minichain --host 127.0.0.1 --port 7000 diff --git a/README.md b/README.md index 0ea906d..7706e0a 100644 --- a/README.md +++ b/README.md @@ -1,236 +1,71 @@ - -
+# MiniChain - -
- Stability Nexus - -
+MiniChain is a minimal, research-oriented blockchain implementation in Python. This repository currently contains the project scaffolding and development environment for the v0 core chain roadmap. -  +## Current Status - -
+Issue #1 (project scaffolding) is implemented with: -[![Static Badge](https://img.shields.io/badge/Stability_Nexus-/TODO-228B22?style=for-the-badge&labelColor=FFC517)](https://TODO.stability.nexus/) +- Python package layout under `src/minichain` +- Placeholder component modules for: + - `crypto`, `transaction`, `block`, `state`, `mempool`, `consensus`, `network`, `storage`, `node` +- `pyproject.toml` project configuration +- `Makefile` for common developer tasks +- Basic CI workflow (`.github/workflows/ci.yml`) +- Baseline tests under `tests/` - +## Requirements -
+- Python 3.11+ - -

- - -Telegram Badge -   - - -X (formerly Twitter) Badge -   - - -Discord Badge -   - - - Medium Badge -   - - - LinkedIn Badge -   - - - Youtube Badge -

- ---- - -
-

MiniChain

-
- -MiniChain is a minimal fully functional blockchain implemented in Python. - -### Background and Motivation - -* Most well-known blockchains are now several years old and have accumulated a lot of technical debt. - Simply forking their codebases is not an optimal option for starting a new chain. - -* MiniChain will be focused on research. Its primary purpose is not to be yet another blockchain - trying to be the one blockchain to kill them all, but rather to serve as a clean codebase that can be a benchmark for research of - variations of the technology. (We hope that MiniChain will be as valuable for blockchain research as, for instance, - MiniSat was valuable for satisfiability and automated reasoning research. MiniSat had less than 600 lines of C++ code.) - -* MiniChain will be focused on education. By having a clean and small codebase, devs will be able to understand - blockchains by looking at the codebase. - -* The blockchain space is again going through a phase where many new blockchains are being launched. - Back in 2017 and 2018, such an expansion period led to many general frameworks for blockchains, - such as Scorex and various Hyperledger frameworks. But most of these frameworks suffered from speculative generality and - were bloated. They focused on extensibility and configurability. MiniChain has a different philosophy: - focus on minimality and, therefore, ease of modification. - -* Recent advances in networking and crypto libraries for Python make it possible to develop MiniChain in Python. - Given that Python is one of the easiest languages to learn and results in usually boilerplate-minimized and easy to read code, - implementing MiniChain in Python aligns with MiniChain's educational goal. - - -### Overview of Tasks - -* Develop a fully functional minimal blockchain in Python, with all the expected components: - peer-to-peer networking, consensus, mempool, ledger, ... - -* Bonus task: add smart contracts to the blockchain. - -Candidates are expected to refine these tasks in their GSoC proposals. -It is encouraged that you develop an initial prototype during the application phase. - -### Requirements - -* Use [PyNaCl](https://pynacl.readthedocs.io/en/latest/) library for hashing, signing transactions and verifying signatures. -* Use [Py-libp2p](https://github.com/libp2p/py-libp2p/tree/main) for p2p networking. -* Implement Proof-of-Work as the consensus protocol. -* Use accounts (instead of UTxO) as the accounting model for the ledger. -* Use as few lines of code as possible without compromising readability and understandability. -* For the bonus task, make Python itself be the language used for smart contracts, but watch out for security concerns related to executing arbitrary code from untrusted sources. - -### Resources - -* Read this book: https://www.marabu.dev/blockchain-foundations.pdf - - ---- - -## Project Maturity - -TODO: In the checklist below, mark the items that have been completed and delete items that are not applicable to the current project: - -* [ ] The project has a logo. -* [ ] The project has a favicon. -* [ ] The protocol: - - [ ] has been described and formally specified in a paper. - - [ ] has had its main properties mathematically proven. - - [ ] has been formally verified. -* [ ] The smart contracts: - - [ ] were thoroughly reviewed by at least two knights of The Stable Order. - - [ ] were deployed to: - - [ ] Ergo - - [ ] Cardano - - [ ] EVM Chains: - - [ ] Ethereum Classic - - [ ] Ethereum - - [ ] Polygon - - [ ] BSC - - [ ] Base -* [ ] The mobile app: - - [ ] has an _About_ page containing the Stability Nexus's logo and pointing to the social media accounts of the Stability Nexus. - - [ ] is available for download as a release in this repo. - - [ ] is available in the relevant app stores. -* [ ] The web frontend: - - [ ] has proper title and metadata. - - [ ] has proper open graph metadata, to ensure that it is shown well when shared in social media (Discord, Telegram, Twitter, LinkedIn). - - [ ] has a footer, containing the Stability Nexus's logo and pointing to the social media accounts of the Stability Nexus. - - [ ] is fully static and client-side. - - [ ] is deployed to Github Pages via a Github Workflow. - - [ ] is accessible through the https://TODO:PROJECT-NAME.stability.nexus domain. -* [ ] the project is listed in [https://stability.nexus/protocols](https://stability.nexus/protocols). - ---- - -## Tech Stack - -TODO: - -### Frontend - -TODO: - -- Next.js 14+ (React) -- TypeScript -- TailwindCSS -- shadcn/ui - -### Blockchain - -TODO: - -- Wagmi -- Solidity Smart Contracts -- Ethers.js - ---- - -## Getting Started - -### Prerequisites - -TODO - -- Node.js 18+ -- npm/yarn/pnpm -- MetaMask or any other web3 wallet browser extension - -### Installation - -TODO - -#### 1. Clone the Repository +## Setup ```bash -git clone https://github.com/StabilityNexus/TODO.git -cd TODO +python3 -m venv .venv +source .venv/bin/activate +python -m pip install --upgrade pip +make dev-install ``` -#### 2. Install Dependencies - -Using your preferred package manager: +If you also want networking dependencies: ```bash -npm install -# or -yarn install -# or -pnpm install +python -m pip install -e .[network] ``` -#### 3. Run the Development Server - -Start the app locally: +## Common Commands ```bash -npm run dev -# or -yarn dev -# or -pnpm dev +make test # run unit tests +make lint # run ruff checks +make format # format with ruff +make start-node # run scaffold node entrypoint ``` -#### 4. Open your Browser - -Navigate to [http://localhost:3000](http://localhost:3000) to see the application. - ---- - -## Contributing - -We welcome contributions of all kinds! To contribute: - -1. Fork the repository and create your feature branch (`git checkout -b feature/AmazingFeature`). -2. Commit your changes (`git commit -m 'Add some AmazingFeature'`). -3. Run the development workflow commands to ensure code quality: - - `npm run format:write` - - `npm run lint:fix` - - `npm run typecheck` -4. Push your branch (`git push origin feature/AmazingFeature`). -5. Open a Pull Request for review. +## Run the Node Entrypoint -If you encounter bugs, need help, or have feature requests: - -- Please open an issue in this repository providing detailed information. -- Describe the problem clearly and include any relevant logs or screenshots. - -We appreciate your feedback and contributions! +```bash +PYTHONPATH=src python -m minichain --host 127.0.0.1 --port 7000 +``` -© 2025 The Stable Order. +## Repository Layout + +```text +.github/workflows/ci.yml +src/minichain/ + __init__.py + __main__.py + crypto.py + transaction.py + block.py + state.py + mempool.py + consensus.py + network.py + storage.py + node.py +tests/ + test_scaffold.py +issues.md +architectureProposal.md +``` diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..b22da7e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[build-system] +requires = ["hatchling>=1.24"] +build-backend = "hatchling.build" + +[project] +name = "minichain" +version = "0.1.0" +description = "Minimal, research-oriented blockchain in Python" +readme = "README.md" +requires-python = ">=3.11" +authors = [{ name = "MiniChain Contributors" }] +dependencies = [ + "PyNaCl>=1.5.0", +] + +[project.optional-dependencies] +network = [ + "py-libp2p>=0.2.0", +] +dev = [ + "pytest>=8.0", + "ruff>=0.7.0", +] + +[project.scripts] +minichain-node = "minichain.__main__:main" + +[tool.pytest.ini_options] +minversion = "8.0" +addopts = "-q" +testpaths = ["tests"] +pythonpath = ["src"] + +[tool.ruff] +line-length = 100 +target-version = "py311" + +[tool.ruff.lint] +select = ["E", "F", "I"] diff --git a/src/minichain/__init__.py b/src/minichain/__init__.py new file mode 100644 index 0000000..3bc48b2 --- /dev/null +++ b/src/minichain/__init__.py @@ -0,0 +1,4 @@ +"""MiniChain package.""" + +__all__ = ["__version__"] +__version__ = "0.1.0" diff --git a/src/minichain/__main__.py b/src/minichain/__main__.py new file mode 100644 index 0000000..614289f --- /dev/null +++ b/src/minichain/__main__.py @@ -0,0 +1,23 @@ +"""CLI entrypoint for running a MiniChain node.""" + +from __future__ import annotations + +import argparse + +from minichain.node import start_node + + +def build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description="Run a MiniChain node.") + parser.add_argument("--host", default="127.0.0.1", help="Host interface for the node") + parser.add_argument("--port", default=7000, type=int, help="Port for the node") + return parser + + +def main() -> None: + args = build_parser().parse_args() + start_node(host=args.host, port=args.port) + + +if __name__ == "__main__": + main() diff --git a/src/minichain/block.py b/src/minichain/block.py new file mode 100644 index 0000000..bc33b77 --- /dev/null +++ b/src/minichain/block.py @@ -0,0 +1 @@ +"""Block primitives and block-level validation logic (to be implemented).""" diff --git a/src/minichain/consensus.py b/src/minichain/consensus.py new file mode 100644 index 0000000..41953b9 --- /dev/null +++ b/src/minichain/consensus.py @@ -0,0 +1 @@ +"""Consensus and mining primitives (to be implemented).""" diff --git a/src/minichain/crypto.py b/src/minichain/crypto.py new file mode 100644 index 0000000..104239f --- /dev/null +++ b/src/minichain/crypto.py @@ -0,0 +1 @@ +"""Cryptographic identity and signature helpers (to be implemented).""" diff --git a/src/minichain/mempool.py b/src/minichain/mempool.py new file mode 100644 index 0000000..3e15d3b --- /dev/null +++ b/src/minichain/mempool.py @@ -0,0 +1 @@ +"""Mempool data structures and transaction selection logic (to be implemented).""" diff --git a/src/minichain/network.py b/src/minichain/network.py new file mode 100644 index 0000000..7245a33 --- /dev/null +++ b/src/minichain/network.py @@ -0,0 +1 @@ +"""P2P networking layer built on py-libp2p (to be implemented).""" diff --git a/src/minichain/node.py b/src/minichain/node.py new file mode 100644 index 0000000..8922753 --- /dev/null +++ b/src/minichain/node.py @@ -0,0 +1,8 @@ +"""Node orchestration layer for MiniChain.""" + +from __future__ import annotations + + +def start_node(host: str, port: int) -> None: + """Start a MiniChain node (placeholder for Issue #20 integration).""" + print(f"MiniChain node scaffold started on {host}:{port}") diff --git a/src/minichain/state.py b/src/minichain/state.py new file mode 100644 index 0000000..16dc1a0 --- /dev/null +++ b/src/minichain/state.py @@ -0,0 +1 @@ +"""Account state and ledger transitions (to be implemented).""" diff --git a/src/minichain/storage.py b/src/minichain/storage.py new file mode 100644 index 0000000..0b8f8ee --- /dev/null +++ b/src/minichain/storage.py @@ -0,0 +1 @@ +"""Persistent storage integration (to be implemented).""" diff --git a/src/minichain/transaction.py b/src/minichain/transaction.py new file mode 100644 index 0000000..0957177 --- /dev/null +++ b/src/minichain/transaction.py @@ -0,0 +1 @@ +"""Transaction data structures and validation rules (to be implemented).""" diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py new file mode 100644 index 0000000..3ddcaec --- /dev/null +++ b/tests/test_scaffold.py @@ -0,0 +1,31 @@ +"""Scaffolding checks for Issue #1.""" + +from __future__ import annotations + +import importlib + +COMPONENT_MODULES = [ + "crypto", + "transaction", + "block", + "state", + "mempool", + "consensus", + "network", + "storage", + "node", +] + + +def test_component_modules_are_importable() -> None: + for module in COMPONENT_MODULES: + imported = importlib.import_module(f"minichain.{module}") + assert imported is not None + + +def test_cli_parser_defaults() -> None: + from minichain.__main__ import build_parser + + args = build_parser().parse_args([]) + assert args.host == "127.0.0.1" + assert args.port == 7000 From 1a26ae79de51fac526a958ddfc34f13d9e3aea5d Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 17 Feb 2026 03:49:38 +0530 Subject: [PATCH 02/47] feat(crypto): add Ed25519 identity and signature helpers --- src/minichain/crypto.py | 79 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 78 insertions(+), 1 deletion(-) diff --git a/src/minichain/crypto.py b/src/minichain/crypto.py index 104239f..aec33d7 100644 --- a/src/minichain/crypto.py +++ b/src/minichain/crypto.py @@ -1 +1,78 @@ -"""Cryptographic identity and signature helpers (to be implemented).""" +"""Cryptographic identity and signature helpers.""" + +from __future__ import annotations + +from typing import Any + +try: + from nacl.encoding import HexEncoder, RawEncoder + from nacl.exceptions import BadSignatureError + from nacl.hash import blake2b + from nacl.signing import SigningKey, VerifyKey +except ModuleNotFoundError as exc: # pragma: no cover - exercised in dependency-light envs + _NACL_IMPORT_ERROR = exc + HexEncoder = RawEncoder = None # type: ignore[assignment] + BadSignatureError = Exception # type: ignore[assignment] + SigningKey = VerifyKey = Any # type: ignore[assignment] + +ADDRESS_LENGTH_BYTES = 20 + + +def _require_nacl() -> None: + if "blake2b" not in globals(): + msg = "PyNaCl is required for minichain.crypto. Install with: pip install PyNaCl" + raise RuntimeError(msg) from _NACL_IMPORT_ERROR + + +def generate_key_pair() -> tuple[SigningKey, VerifyKey]: + """Generate a new Ed25519 keypair.""" + _require_nacl() + signing_key = SigningKey.generate() + return signing_key, signing_key.verify_key + + +def derive_address(verify_key: VerifyKey) -> str: + """Derive a 20-byte address from a verify key as lowercase hex.""" + _require_nacl() + digest = blake2b(verify_key.encode(), encoder=RawEncoder) + return digest[:ADDRESS_LENGTH_BYTES].hex() + + +def serialize_signing_key(signing_key: SigningKey) -> str: + """Serialize a signing key into a hex string.""" + _require_nacl() + return signing_key.encode(encoder=HexEncoder).decode("ascii") + + +def deserialize_signing_key(signing_key_hex: str) -> SigningKey: + """Deserialize a signing key from a hex string.""" + _require_nacl() + return SigningKey(signing_key_hex, encoder=HexEncoder) + + +def serialize_verify_key(verify_key: VerifyKey) -> str: + """Serialize a verify key into a hex string.""" + _require_nacl() + return verify_key.encode(encoder=HexEncoder).decode("ascii") + + +def deserialize_verify_key(verify_key_hex: str) -> VerifyKey: + """Deserialize a verify key from a hex string.""" + _require_nacl() + return VerifyKey(verify_key_hex, encoder=HexEncoder) + + +def sign_message(message: bytes, signing_key: SigningKey) -> bytes: + """Sign bytes and return the detached signature bytes.""" + _require_nacl() + return signing_key.sign(message).signature + + +def verify_signature(message: bytes, signature: bytes, verify_key: VerifyKey) -> bool: + """Verify a detached Ed25519 signature.""" + _require_nacl() + try: + verify_key.verify(message, signature) + except BadSignatureError: + return False + return True From fce3e7a5549127adfa11116ff38f1894c2eb979e Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 17 Feb 2026 03:49:53 +0530 Subject: [PATCH 03/47] test(crypto): cover keypair, address, and signature validation --- tests/test_crypto.py | 63 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) create mode 100644 tests/test_crypto.py diff --git a/tests/test_crypto.py b/tests/test_crypto.py new file mode 100644 index 0000000..2b40967 --- /dev/null +++ b/tests/test_crypto.py @@ -0,0 +1,63 @@ +"""Unit tests for the cryptographic identity module.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import ( + derive_address, + deserialize_signing_key, + deserialize_verify_key, + generate_key_pair, + serialize_signing_key, + serialize_verify_key, + sign_message, + verify_signature, +) + + +def test_generated_key_pair_can_sign_and_verify() -> None: + signing_key, verify_key = generate_key_pair() + message = b"minichain-crypto-test" + + signature = sign_message(message, signing_key) + + assert verify_signature(message, signature, verify_key) + + +def test_address_derivation_is_deterministic() -> None: + signing_key, verify_key = generate_key_pair() + first = derive_address(verify_key) + second = derive_address(verify_key) + + assert first == second + assert first == derive_address(signing_key.verify_key) + assert len(first) == 40 + + +def test_invalid_signature_is_rejected() -> None: + signing_key, verify_key = generate_key_pair() + other_signing_key, _ = generate_key_pair() + message = b"minichain-message" + + wrong_signature = sign_message(message, other_signing_key) + + assert not verify_signature(message, wrong_signature, verify_key) + + +def test_key_hex_serialization_round_trip() -> None: + signing_key, verify_key = generate_key_pair() + + signing_key_hex = serialize_signing_key(signing_key) + verify_key_hex = serialize_verify_key(verify_key) + + decoded_signing_key = deserialize_signing_key(signing_key_hex) + decoded_verify_key = deserialize_verify_key(verify_key_hex) + + message = b"serialization-round-trip" + signature = sign_message(message, decoded_signing_key) + + assert verify_signature(message, signature, decoded_verify_key) + assert derive_address(decoded_verify_key) == derive_address(verify_key) From 15c67bd5d8c6596d4c8a64b8cde1c46648b76080 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 17 Feb 2026 04:05:14 +0530 Subject: [PATCH 04/47] feat(serialization): add canonical transaction and header encoding --- src/minichain/serialization.py | 64 ++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 src/minichain/serialization.py diff --git a/src/minichain/serialization.py b/src/minichain/serialization.py new file mode 100644 index 0000000..ffff532 --- /dev/null +++ b/src/minichain/serialization.py @@ -0,0 +1,64 @@ +"""Deterministic serialization helpers for consensus-critical data.""" + +from __future__ import annotations + +import json +from typing import Any, Mapping + +TRANSACTION_FIELD_ORDER = ( + "sender", + "recipient", + "amount", + "nonce", + "fee", + "timestamp", +) + +BLOCK_HEADER_FIELD_ORDER = ( + "version", + "previous_hash", + "merkle_root", + "timestamp", + "difficulty_target", + "nonce", + "block_height", +) + + +def _to_field_map(value: Mapping[str, Any] | object, field_order: tuple[str, ...]) -> dict[str, Any]: + if isinstance(value, Mapping): + source = dict(value) + else: + source = {field: getattr(value, field) for field in field_order if hasattr(value, field)} + + missing = [field for field in field_order if field not in source] + if missing: + raise ValueError(f"Missing required fields: {', '.join(missing)}") + + extras = sorted(set(source) - set(field_order)) + if extras: + raise ValueError(f"Unexpected fields: {', '.join(extras)}") + + return {field: source[field] for field in field_order} + + +def serialize_canonical(value: Mapping[str, Any] | object, field_order: tuple[str, ...]) -> bytes: + """Serialize a structure to canonical UTF-8 JSON bytes.""" + canonical_map = _to_field_map(value, field_order) + text = json.dumps( + canonical_map, + ensure_ascii=False, + sort_keys=True, + separators=(",", ":"), + ) + return text.encode("utf-8") + + +def serialize_transaction(value: Mapping[str, Any] | object) -> bytes: + """Serialize a transaction using the canonical transaction field order.""" + return serialize_canonical(value, TRANSACTION_FIELD_ORDER) + + +def serialize_block_header(value: Mapping[str, Any] | object) -> bytes: + """Serialize a block header using the canonical block header field order.""" + return serialize_canonical(value, BLOCK_HEADER_FIELD_ORDER) From 313ee5d689036773f5ac70609df53f9e1e01f2b7 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 17 Feb 2026 04:05:31 +0530 Subject: [PATCH 05/47] test(serialization): add deterministic encoding coverage --- tests/test_scaffold.py | 1 + tests/test_serialization.py | 102 ++++++++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+) create mode 100644 tests/test_serialization.py diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py index 3ddcaec..e36ce30 100644 --- a/tests/test_scaffold.py +++ b/tests/test_scaffold.py @@ -14,6 +14,7 @@ "network", "storage", "node", + "serialization", ] diff --git a/tests/test_serialization.py b/tests/test_serialization.py new file mode 100644 index 0000000..4741fde --- /dev/null +++ b/tests/test_serialization.py @@ -0,0 +1,102 @@ +"""Tests for deterministic serialization.""" + +from __future__ import annotations + +from collections.abc import Callable + +import pytest + +from minichain.serialization import serialize_block_header, serialize_transaction + + +def test_transaction_serialization_is_deterministic() -> None: + tx_a = { + "sender": "a1" * 20, + "recipient": "b2" * 20, + "amount": 25, + "nonce": 1, + "fee": 2, + "timestamp": 1_739_749_000, + } + tx_b = { + "timestamp": 1_739_749_000, + "fee": 2, + "nonce": 1, + "amount": 25, + "recipient": "b2" * 20, + "sender": "a1" * 20, + } + + serialized_a = serialize_transaction(tx_a) + serialized_b = serialize_transaction(tx_b) + + assert serialized_a == serialized_b + assert b" " not in serialized_a + + +def test_changing_transaction_field_changes_serialization() -> None: + base = { + "sender": "aa" * 20, + "recipient": "bb" * 20, + "amount": 10, + "nonce": 0, + "fee": 1, + "timestamp": 123456, + } + mutated = dict(base) + mutated["amount"] = 11 + + assert serialize_transaction(base) != serialize_transaction(mutated) + + +def test_changing_block_header_field_changes_serialization() -> None: + base = { + "version": 0, + "previous_hash": "00" * 32, + "merkle_root": "11" * 32, + "timestamp": 123_456_789, + "difficulty_target": 1_000_000, + "nonce": 7, + "block_height": 3, + } + mutated = dict(base) + mutated["nonce"] = 8 + + assert serialize_block_header(base) != serialize_block_header(mutated) + + +@pytest.mark.parametrize( + "payload,serializer,expected", + [ + ( + { + "sender": "aa" * 20, + "recipient": "bb" * 20, + "amount": 1, + "nonce": 1, + "timestamp": 1, + }, + serialize_transaction, + "Missing required fields: fee", + ), + ( + { + "version": 0, + "previous_hash": "00" * 32, + "merkle_root": "11" * 32, + "timestamp": 1, + "difficulty_target": 1, + "nonce": 1, + "block_height": 1, + "extra": "x", + }, + serialize_block_header, + "Unexpected fields: extra", + ), + ], +) +def test_required_and_unexpected_fields_are_rejected( + payload: dict[str, object], serializer: Callable[[dict[str, object]], bytes], expected: str +) -> None: + with pytest.raises(ValueError, match=expected): + serializer(payload) From 58125c2ce9fdf2c2b5a48295d069df4953121c0e Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 17 Feb 2026 18:35:27 +0530 Subject: [PATCH 06/47] chore: fix serialization lint formatting --- src/minichain/serialization.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/minichain/serialization.py b/src/minichain/serialization.py index ffff532..a9d91cd 100644 --- a/src/minichain/serialization.py +++ b/src/minichain/serialization.py @@ -25,7 +25,9 @@ ) -def _to_field_map(value: Mapping[str, Any] | object, field_order: tuple[str, ...]) -> dict[str, Any]: +def _to_field_map( + value: Mapping[str, Any] | object, field_order: tuple[str, ...] +) -> dict[str, Any]: if isinstance(value, Mapping): source = dict(value) else: From dcc3d234b26fc8ce71ae9f133a8f98aebe1da79d Mon Sep 17 00:00:00 2001 From: Arunabha Date: Wed, 18 Feb 2026 03:29:03 +0530 Subject: [PATCH 07/47] feat: implement signed transaction model and verification --- src/minichain/transaction.py | 96 +++++++++++++++++++++++++++++++++++- 1 file changed, 95 insertions(+), 1 deletion(-) diff --git a/src/minichain/transaction.py b/src/minichain/transaction.py index 0957177..e169b3f 100644 --- a/src/minichain/transaction.py +++ b/src/minichain/transaction.py @@ -1 +1,95 @@ -"""Transaction data structures and validation rules (to be implemented).""" +"""Transaction data structures and validation rules.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from minichain.crypto import ( + derive_address, + deserialize_verify_key, + serialize_verify_key, + sign_message, + verify_signature, +) +from minichain.serialization import serialize_transaction + +ADDRESS_HEX_LENGTH = 40 +PUBLIC_KEY_HEX_LENGTH = 64 +SIGNATURE_HEX_LENGTH = 128 + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +@dataclass +class Transaction: + """A signed account-transfer transaction.""" + + sender: str + recipient: str + amount: int + nonce: int + fee: int + timestamp: int + signature: str = "" + public_key: str = "" + + def signing_payload(self) -> dict[str, int | str]: + """Return the canonical transaction payload that is signed.""" + return { + "sender": self.sender, + "recipient": self.recipient, + "amount": self.amount, + "nonce": self.nonce, + "fee": self.fee, + "timestamp": self.timestamp, + } + + def signing_bytes(self) -> bytes: + """Return canonical bytes for signature generation/verification.""" + return serialize_transaction(self.signing_payload()) + + def _validate_common_fields(self) -> bool: + if not _is_lower_hex(self.sender, ADDRESS_HEX_LENGTH): + return False + if not _is_lower_hex(self.recipient, ADDRESS_HEX_LENGTH): + return False + if not isinstance(self.amount, int) or self.amount < 0: + return False + if not isinstance(self.nonce, int) or self.nonce < 0: + return False + if not isinstance(self.fee, int) or self.fee < 0: + return False + if not isinstance(self.timestamp, int) or self.timestamp < 0: + return False + return True + + def sign(self, signing_key: object) -> None: + """Sign this transaction in-place and populate auth fields.""" + if not self._validate_common_fields(): + raise ValueError("Invalid transaction fields") + verify_key = signing_key.verify_key + self.public_key = serialize_verify_key(verify_key) + self.signature = sign_message(self.signing_bytes(), signing_key).hex() + + def verify(self) -> bool: + """Verify transaction structure, signer identity, and signature.""" + if not self._validate_common_fields(): + return False + if not _is_lower_hex(self.public_key, PUBLIC_KEY_HEX_LENGTH): + return False + if not _is_lower_hex(self.signature, SIGNATURE_HEX_LENGTH): + return False + + try: + verify_key = deserialize_verify_key(self.public_key) + except Exception: + return False + + if derive_address(verify_key) != self.sender: + return False + signature_bytes = bytes.fromhex(self.signature) + return verify_signature(self.signing_bytes(), signature_bytes, verify_key) From ca2fd8e0e68f0bd0733e42b604ca6e5d8ffbac4d Mon Sep 17 00:00:00 2001 From: Arunabha Date: Wed, 18 Feb 2026 03:29:33 +0530 Subject: [PATCH 08/47] test: add transaction tamper and identity mismatch coverage --- tests/test_transaction.py | 55 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 tests/test_transaction.py diff --git a/tests/test_transaction.py b/tests/test_transaction.py new file mode 100644 index 0000000..7688d54 --- /dev/null +++ b/tests/test_transaction.py @@ -0,0 +1,55 @@ +"""Unit tests for transaction signing and verification.""" + +from __future__ import annotations + +from dataclasses import replace + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import derive_address, generate_key_pair, serialize_verify_key +from minichain.transaction import Transaction + + +def _build_signed_transaction() -> tuple[Transaction, object]: + signing_key, verify_key = generate_key_pair() + tx = Transaction( + sender=derive_address(verify_key), + recipient="ab" * 20, + amount=25, + nonce=0, + fee=2, + timestamp=1_739_760_000, + ) + tx.sign(signing_key) + return tx, signing_key + + +def test_valid_transaction_signing_and_verification() -> None: + tx, _ = _build_signed_transaction() + + assert tx.verify() + + +def test_tampered_transaction_amount_is_rejected() -> None: + tx, _ = _build_signed_transaction() + tampered = replace(tx, amount=tx.amount + 1) + + assert not tampered.verify() + + +def test_tampered_transaction_recipient_is_rejected() -> None: + tx, _ = _build_signed_transaction() + tampered = replace(tx, recipient="cd" * 20) + + assert not tampered.verify() + + +def test_mismatched_public_key_and_sender_is_rejected() -> None: + tx, _ = _build_signed_transaction() + other_signing_key, other_verify_key = generate_key_pair() + _ = other_signing_key + tampered = replace(tx, public_key=serialize_verify_key(other_verify_key)) + + assert not tampered.verify() From 92e96a4f993528855b287848c5461d5ac71a9d3e Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 17:01:08 +0530 Subject: [PATCH 09/47] feat: add merkle root computation using blake2b --- src/minichain/crypto.py | 8 +++++++- src/minichain/merkle.py | 27 +++++++++++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) create mode 100644 src/minichain/merkle.py diff --git a/src/minichain/crypto.py b/src/minichain/crypto.py index aec33d7..ede02d6 100644 --- a/src/minichain/crypto.py +++ b/src/minichain/crypto.py @@ -34,10 +34,16 @@ def generate_key_pair() -> tuple[SigningKey, VerifyKey]: def derive_address(verify_key: VerifyKey) -> str: """Derive a 20-byte address from a verify key as lowercase hex.""" _require_nacl() - digest = blake2b(verify_key.encode(), encoder=RawEncoder) + digest = blake2b_digest(verify_key.encode()) return digest[:ADDRESS_LENGTH_BYTES].hex() +def blake2b_digest(data: bytes) -> bytes: + """Compute a 32-byte BLAKE2b digest.""" + _require_nacl() + return blake2b(data, encoder=RawEncoder) + + def serialize_signing_key(signing_key: SigningKey) -> str: """Serialize a signing key into a hex string.""" _require_nacl() diff --git a/src/minichain/merkle.py b/src/minichain/merkle.py new file mode 100644 index 0000000..d043f81 --- /dev/null +++ b/src/minichain/merkle.py @@ -0,0 +1,27 @@ +"""Merkle tree construction for transaction commitments.""" + +from __future__ import annotations + +from minichain.crypto import blake2b_digest + + +def _hash_pair(left: bytes, right: bytes) -> bytes: + return blake2b_digest(left + right) + + +def compute_merkle_root(leaves: list[bytes]) -> bytes: + """Compute the Merkle root from pre-hashed leaf bytes.""" + if not leaves: + return blake2b_digest(b"") + + level = [bytes(leaf) for leaf in leaves] + while len(level) > 1: + if len(level) % 2 == 1: + level.append(level[-1]) + + next_level: list[bytes] = [] + for i in range(0, len(level), 2): + next_level.append(_hash_pair(level[i], level[i + 1])) + level = next_level + + return level[0] From 761eb20515766266f6b62766da4bdc95a7c08a26 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 17:01:35 +0530 Subject: [PATCH 10/47] test: add merkle root determinism and edge-case coverage --- tests/test_merkle.py | 37 +++++++++++++++++++++++++++++++++++++ tests/test_scaffold.py | 1 + 2 files changed, 38 insertions(+) create mode 100644 tests/test_merkle.py diff --git a/tests/test_merkle.py b/tests/test_merkle.py new file mode 100644 index 0000000..a271f75 --- /dev/null +++ b/tests/test_merkle.py @@ -0,0 +1,37 @@ +"""Unit tests for Merkle tree construction.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import blake2b_digest +from minichain.merkle import compute_merkle_root + + +def test_empty_leaf_list_has_well_defined_root() -> None: + assert compute_merkle_root([]) == blake2b_digest(b"") + + +def test_merkle_root_is_deterministic() -> None: + leaves = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b"), blake2b_digest(b"tx-c")] + first = compute_merkle_root(leaves) + second = compute_merkle_root(list(leaves)) + assert first == second + + +def test_merkle_root_changes_when_leaf_changes() -> None: + base = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b"), blake2b_digest(b"tx-c")] + modified = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b-mutated"), blake2b_digest(b"tx-c")] + assert compute_merkle_root(base) != compute_merkle_root(modified) + + +def test_odd_leaf_count_duplicates_last_leaf() -> None: + leaves = [blake2b_digest(b"tx-a"), blake2b_digest(b"tx-b"), blake2b_digest(b"tx-c")] + + left = blake2b_digest(leaves[0] + leaves[1]) + right = blake2b_digest(leaves[2] + leaves[2]) + expected = blake2b_digest(left + right) + + assert compute_merkle_root(leaves) == expected diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py index e36ce30..a783321 100644 --- a/tests/test_scaffold.py +++ b/tests/test_scaffold.py @@ -15,6 +15,7 @@ "storage", "node", "serialization", + "merkle", ] From 0b5b764bb7a543884365216fda99282952f7a781 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 17:16:58 +0530 Subject: [PATCH 11/47] feat: add deterministic transaction id hashing --- src/minichain/transaction.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/minichain/transaction.py b/src/minichain/transaction.py index e169b3f..551b6e5 100644 --- a/src/minichain/transaction.py +++ b/src/minichain/transaction.py @@ -5,6 +5,7 @@ from dataclasses import dataclass from minichain.crypto import ( + blake2b_digest, derive_address, deserialize_verify_key, serialize_verify_key, @@ -52,6 +53,15 @@ def signing_bytes(self) -> bytes: """Return canonical bytes for signature generation/verification.""" return serialize_transaction(self.signing_payload()) + def transaction_id(self) -> bytes: + """Return a deterministic transaction hash for Merkle commitments.""" + digest_input = bytearray(self.signing_bytes()) + if self.signature: + digest_input.extend(bytes.fromhex(self.signature)) + if self.public_key: + digest_input.extend(bytes.fromhex(self.public_key)) + return blake2b_digest(bytes(digest_input)) + def _validate_common_fields(self) -> bool: if not _is_lower_hex(self.sender, ADDRESS_HEX_LENGTH): return False From bd229ab091532f09704748d96a8bda59004d226c Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 17:17:57 +0530 Subject: [PATCH 12/47] feat: implement block header hashing and merkle validation --- src/minichain/block.py | 57 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 56 insertions(+), 1 deletion(-) diff --git a/src/minichain/block.py b/src/minichain/block.py index bc33b77..f6c5829 100644 --- a/src/minichain/block.py +++ b/src/minichain/block.py @@ -1 +1,56 @@ -"""Block primitives and block-level validation logic (to be implemented).""" +"""Block primitives and block-level validation logic.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +from minichain.crypto import blake2b_digest +from minichain.merkle import compute_merkle_root +from minichain.serialization import serialize_block_header +from minichain.transaction import Transaction + + +@dataclass +class BlockHeader: + """Consensus-critical block header.""" + + version: int + previous_hash: str + merkle_root: str + timestamp: int + difficulty_target: int + nonce: int + block_height: int + + def hash(self) -> bytes: + """Compute the canonical block-header hash.""" + return blake2b_digest(serialize_block_header(self)) + + def hash_hex(self) -> str: + return self.hash().hex() + + +@dataclass +class Block: + """A block containing a header and ordered transactions.""" + + header: BlockHeader + transactions: list[Transaction] = field(default_factory=list) + + def transaction_hashes(self) -> list[bytes]: + return [tx.transaction_id() for tx in self.transactions] + + def computed_merkle_root(self) -> bytes: + return compute_merkle_root(self.transaction_hashes()) + + def computed_merkle_root_hex(self) -> str: + return self.computed_merkle_root().hex() + + def update_header_merkle_root(self) -> None: + self.header.merkle_root = self.computed_merkle_root_hex() + + def has_valid_merkle_root(self) -> bool: + return self.header.merkle_root == self.computed_merkle_root_hex() + + def hash(self) -> bytes: + return self.header.hash() From a7b4f4ba84e281992b22e12ee7e9027b12fb0ba8 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 17:18:18 +0530 Subject: [PATCH 13/47] test: add block hash and merkle-root coverage --- tests/test_block.py | 78 +++++++++++++++++++++++++++++++++++++++ tests/test_transaction.py | 8 ++++ 2 files changed, 86 insertions(+) create mode 100644 tests/test_block.py diff --git a/tests/test_block.py b/tests/test_block.py new file mode 100644 index 0000000..8554366 --- /dev/null +++ b/tests/test_block.py @@ -0,0 +1,78 @@ +"""Unit tests for block hashing and transaction commitments.""" + +from __future__ import annotations + +from dataclasses import replace + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.crypto import derive_address, generate_key_pair +from minichain.transaction import Transaction + + +def _make_signed_transaction(amount: int, nonce: int) -> Transaction: + signing_key, verify_key = generate_key_pair() + tx = Transaction( + sender=derive_address(verify_key), + recipient="ab" * 20, + amount=amount, + nonce=nonce, + fee=1, + timestamp=1_739_800_000 + nonce, + ) + tx.sign(signing_key) + return tx + + +def _make_block() -> Block: + transactions = [ + _make_signed_transaction(amount=10, nonce=0), + _make_signed_transaction(amount=11, nonce=1), + ] + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_800_111, + difficulty_target=1_000_000, + nonce=7, + block_height=1, + ) + block = Block(header=header, transactions=transactions) + block.update_header_merkle_root() + return block + + +def test_block_hash_is_deterministic() -> None: + block = _make_block() + assert block.hash() == block.hash() + + +@pytest.mark.parametrize( + ("field", "value"), + [ + ("version", 1), + ("previous_hash", "11" * 32), + ("merkle_root", "22" * 32), + ("timestamp", 1_739_800_222), + ("difficulty_target", 2_000_000), + ("nonce", 8), + ("block_height", 2), + ], +) +def test_changing_header_field_changes_hash(field: str, value: int | str) -> None: + block = _make_block() + mutated_header = replace(block.header, **{field: value}) + + assert block.header.hash() != mutated_header.hash() + + +def test_header_merkle_root_matches_transaction_body() -> None: + block = _make_block() + assert block.has_valid_merkle_root() + + block.transactions[0].amount += 1 + assert not block.has_valid_merkle_root() diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 7688d54..258586d 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -53,3 +53,11 @@ def test_mismatched_public_key_and_sender_is_rejected() -> None: tampered = replace(tx, public_key=serialize_verify_key(other_verify_key)) assert not tampered.verify() + + +def test_transaction_id_changes_when_signature_changes() -> None: + tx, _ = _build_signed_transaction() + original_id = tx.transaction_id() + tampered = replace(tx, signature="00" * 64) + + assert tampered.transaction_id() != original_id From aa7ea051b7810bc0c633108f547dc84dbe49f211 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 19:13:06 +0530 Subject: [PATCH 14/47] feat: implement account state transitions and atomic block apply --- src/minichain/state.py | 87 +++++++++++++++++++++++++++++++++++++++++- 1 file changed, 86 insertions(+), 1 deletion(-) diff --git a/src/minichain/state.py b/src/minichain/state.py index 16dc1a0..c38e583 100644 --- a/src/minichain/state.py +++ b/src/minichain/state.py @@ -1 +1,86 @@ -"""Account state and ledger transitions (to be implemented).""" +"""Account state and ledger transitions.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from minichain.block import Block +from minichain.transaction import Transaction + + +@dataclass +class Account: + """Account state for an address.""" + + balance: int = 0 + nonce: int = 0 + + +class StateTransitionError(ValueError): + """Raised when a transaction or block cannot be applied.""" + + +class State: + """Mutable account-state mapping and transition engine.""" + + def __init__(self) -> None: + self.accounts: dict[str, Account] = {} + + def copy(self) -> State: + snapshot = State() + snapshot.accounts = { + address: Account(balance=account.balance, nonce=account.nonce) + for address, account in self.accounts.items() + } + return snapshot + + def set_account(self, address: str, account: Account) -> None: + self.accounts[address] = account + + def get_account(self, address: str) -> Account: + if address not in self.accounts: + self.accounts[address] = Account() + return self.accounts[address] + + def apply_transaction(self, transaction: Transaction) -> None: + if not transaction.verify(): + raise StateTransitionError("Transaction signature/identity verification failed") + + sender = self.get_account(transaction.sender) + recipient = self.get_account(transaction.recipient) + + if sender.nonce != transaction.nonce: + raise StateTransitionError( + f"Nonce mismatch for sender {transaction.sender}: " + f"expected {sender.nonce}, got {transaction.nonce}" + ) + + total_cost = transaction.amount + transaction.fee + if sender.balance < total_cost: + raise StateTransitionError( + f"Insufficient balance for sender {transaction.sender}: " + f"required {total_cost}, available {sender.balance}" + ) + + sender.balance -= total_cost + sender.nonce += 1 + recipient.balance += transaction.amount + + def apply_block(self, block: Block) -> None: + snapshot = self.copy() + try: + for transaction in block.transactions: + self.apply_transaction(transaction) + except StateTransitionError as exc: + self.accounts = snapshot.accounts + raise StateTransitionError(f"Block application failed: {exc}") from exc + + +def apply_transaction(state: State, transaction: Transaction) -> None: + """Apply a transaction to state with validation.""" + state.apply_transaction(transaction) + + +def apply_block(state: State, block: Block) -> None: + """Apply all block transactions atomically, rolling back on failure.""" + state.apply_block(block) From af35a69b09c50b8eada7e4f6df01ccb73dd95576 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sat, 21 Feb 2026 19:13:23 +0530 Subject: [PATCH 15/47] test: add state transfer, nonce, and rollback coverage --- tests/test_state.py | 157 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 157 insertions(+) create mode 100644 tests/test_state.py diff --git a/tests/test_state.py b/tests/test_state.py new file mode 100644 index 0000000..94b5bcc --- /dev/null +++ b/tests/test_state.py @@ -0,0 +1,157 @@ +"""Unit tests for account state transitions.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.crypto import derive_address, generate_key_pair +from minichain.state import Account, State, StateTransitionError +from minichain.transaction import Transaction + + +def _signed_transaction( + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int = 1, + timestamp: int = 1_739_900_000, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp + nonce, + ) + tx.sign(sender_key) + return tx + + +def _block_with_transactions(transactions: list[Transaction]) -> Block: + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_900_100, + difficulty_target=1_000_000, + nonce=0, + block_height=1, + ) + block = Block(header=header, transactions=transactions) + block.update_header_merkle_root() + return block + + +def test_successful_transfer_updates_balances_and_nonce() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=25, nonce=0, fee=2 + ) + state.apply_transaction(tx) + + assert state.get_account(sender_address).balance == 73 + assert state.get_account(sender_address).nonce == 1 + assert state.get_account(recipient_address).balance == 25 + assert state.get_account(recipient_address).nonce == 0 + + +def test_insufficient_balance_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=5, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + + with pytest.raises(StateTransitionError, match="Insufficient balance"): + state.apply_transaction(tx) + + +def test_nonce_mismatch_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=1)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + + with pytest.raises(StateTransitionError, match="Nonce mismatch"): + state.apply_transaction(tx) + + +def test_transfer_to_new_address_creates_recipient_account() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=50, nonce=0)) + assert recipient_address not in state.accounts + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + state.apply_transaction(tx) + + assert recipient_address in state.accounts + assert state.get_account(recipient_address).balance == 10 + + +def test_apply_block_is_atomic_and_rolls_back_on_failure() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx_ok = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + tx_fail = _signed_transaction( + sender_key, sender_address, recipient_address, amount=95, nonce=1, fee=10 + ) + block = _block_with_transactions([tx_ok, tx_fail]) + + with pytest.raises(StateTransitionError, match="Block application failed"): + state.apply_block(block) + + assert state.get_account(sender_address).balance == 100 + assert state.get_account(sender_address).nonce == 0 + assert state.get_account(recipient_address).balance == 0 + assert state.get_account(recipient_address).nonce == 0 From 1fb521f1d0d7d9035a53d261f746b65222bfacf9 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sun, 22 Feb 2026 02:48:10 +0530 Subject: [PATCH 16/47] feat: add configurable genesis block and state initialization --- src/minichain/genesis.py | 81 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 src/minichain/genesis.py diff --git a/src/minichain/genesis.py b/src/minichain/genesis.py new file mode 100644 index 0000000..e8e253d --- /dev/null +++ b/src/minichain/genesis.py @@ -0,0 +1,81 @@ +"""Genesis block/state creation and application.""" + +from __future__ import annotations + +from dataclasses import dataclass, field + +from minichain.block import Block, BlockHeader +from minichain.crypto import blake2b_digest +from minichain.state import Account, State + +GENESIS_PREVIOUS_HASH = "00" * 32 + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +@dataclass(frozen=True) +class GenesisConfig: + """Configurable parameters for building genesis artifacts.""" + + initial_balances: dict[str, int] = field(default_factory=dict) + timestamp: int = 1_739_000_000 + difficulty_target: int = (1 << 255) - 1 + version: int = 0 + + def validate(self) -> None: + if self.timestamp < 0: + raise ValueError("Genesis timestamp must be non-negative") + if self.difficulty_target <= 0: + raise ValueError("Genesis difficulty_target must be positive") + for address, balance in self.initial_balances.items(): + if not _is_lower_hex(address, 40): + raise ValueError(f"Invalid genesis address: {address}") + if balance < 0: + raise ValueError(f"Negative genesis balance for {address}") + + +def create_genesis_block(config: GenesisConfig) -> Block: + """Build the genesis block (height 0, no PoW check required).""" + config.validate() + header = BlockHeader( + version=config.version, + previous_hash=GENESIS_PREVIOUS_HASH, + merkle_root=blake2b_digest(b"").hex(), + timestamp=config.timestamp, + difficulty_target=config.difficulty_target, + nonce=0, + block_height=0, + ) + return Block(header=header, transactions=[]) + + +def apply_genesis_block(state: State, block: Block, config: GenesisConfig) -> None: + """Apply genesis allocations to an empty state.""" + config.validate() + if state.accounts: + raise ValueError("Genesis can only be applied to an empty state") + if block.header.block_height != 0: + raise ValueError("Genesis block height must be 0") + if block.header.previous_hash != GENESIS_PREVIOUS_HASH: + raise ValueError("Genesis previous_hash must be all zeros") + if block.transactions: + raise ValueError("Genesis block must not contain transactions") + + expected_merkle_root = blake2b_digest(b"").hex() + if block.header.merkle_root != expected_merkle_root: + raise ValueError("Genesis merkle_root must commit to an empty tx list") + + for address, balance in config.initial_balances.items(): + state.set_account(address, Account(balance=balance, nonce=0)) + + +def create_genesis_state(config: GenesisConfig) -> tuple[Block, State]: + """Create genesis block and initialized state in one step.""" + block = create_genesis_block(config) + state = State() + apply_genesis_block(state, block, config) + return block, state From b930f7e150a947fb7c66f645bdbb17907d2682df Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sun, 22 Feb 2026 02:48:20 +0530 Subject: [PATCH 17/47] test: add genesis block creation and application coverage --- tests/test_genesis.py | 85 ++++++++++++++++++++++++++++++++++++++++++ tests/test_scaffold.py | 1 + 2 files changed, 86 insertions(+) create mode 100644 tests/test_genesis.py diff --git a/tests/test_genesis.py b/tests/test_genesis.py new file mode 100644 index 0000000..1db85b4 --- /dev/null +++ b/tests/test_genesis.py @@ -0,0 +1,85 @@ +"""Unit tests for genesis block and state initialization.""" + +from __future__ import annotations + +from dataclasses import replace + +from minichain.crypto import blake2b_digest +from minichain.genesis import ( + GENESIS_PREVIOUS_HASH, + GenesisConfig, + apply_genesis_block, + create_genesis_block, + create_genesis_state, +) +from minichain.state import Account, State + + +def test_create_genesis_block_uses_conventional_fields() -> None: + config = GenesisConfig( + initial_balances={"11" * 20: 1_000_000}, + timestamp=1_739_123_456, + difficulty_target=123_456, + version=0, + ) + + block = create_genesis_block(config) + + assert block.header.block_height == 0 + assert block.header.previous_hash == GENESIS_PREVIOUS_HASH + assert block.header.timestamp == config.timestamp + assert block.header.difficulty_target == config.difficulty_target + assert block.header.nonce == 0 + assert block.header.merkle_root == blake2b_digest(b"").hex() + assert block.transactions == [] + + +def test_apply_genesis_block_initializes_expected_balances() -> None: + balances = {"aa" * 20: 500, "bb" * 20: 300} + config = GenesisConfig(initial_balances=balances) + block = create_genesis_block(config) + state = State() + + apply_genesis_block(state, block, config) + + assert state.get_account("aa" * 20).balance == 500 + assert state.get_account("aa" * 20).nonce == 0 + assert state.get_account("bb" * 20).balance == 300 + assert state.get_account("bb" * 20).nonce == 0 + + +def test_create_genesis_state_builds_block_and_state() -> None: + config = GenesisConfig(initial_balances={"cc" * 20: 42}) + + block, state = create_genesis_state(config) + + assert block.header.block_height == 0 + assert state.get_account("cc" * 20).balance == 42 + + +def test_genesis_requires_empty_state() -> None: + config = GenesisConfig(initial_balances={"dd" * 20: 1}) + block = create_genesis_block(config) + state = State() + state.set_account("ff" * 20, Account(balance=1, nonce=0)) + + try: + apply_genesis_block(state, block, config) + except ValueError as exc: + assert "empty state" in str(exc) + else: + raise AssertionError("Expected ValueError for non-empty state") + + +def test_genesis_block_rejects_wrong_previous_hash() -> None: + config = GenesisConfig(initial_balances={"ee" * 20: 10}) + block = create_genesis_block(config) + block.header = replace(block.header, previous_hash="11" * 32) + state = State() + + try: + apply_genesis_block(state, block, config) + except ValueError as exc: + assert "previous_hash" in str(exc) + else: + raise AssertionError("Expected ValueError for invalid previous_hash") diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py index a783321..5c3d553 100644 --- a/tests/test_scaffold.py +++ b/tests/test_scaffold.py @@ -16,6 +16,7 @@ "node", "serialization", "merkle", + "genesis", ] From 42cba00140ec04d9c25ce73f48fc324b1386dec5 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sun, 22 Feb 2026 03:52:09 +0530 Subject: [PATCH 18/47] feat: implement proof-of-work mining engine --- src/minichain/consensus.py | 61 +++++++++++++++++++++++++++++++++++++- 1 file changed, 60 insertions(+), 1 deletion(-) diff --git a/src/minichain/consensus.py b/src/minichain/consensus.py index 41953b9..94100aa 100644 --- a/src/minichain/consensus.py +++ b/src/minichain/consensus.py @@ -1 +1,60 @@ -"""Consensus and mining primitives (to be implemented).""" +"""Consensus and Proof-of-Work mining primitives.""" + +from __future__ import annotations + +from dataclasses import replace +from threading import Event + +from minichain.block import BlockHeader + +MAX_TARGET = (1 << 256) - 1 + + +class MiningInterrupted(Exception): + """Raised when mining is cancelled via a stop signal.""" + + +def hash_to_int(block_hash: bytes) -> int: + """Convert a hash digest into a big-endian integer.""" + return int.from_bytes(block_hash, byteorder="big", signed=False) + + +def validate_difficulty_target(target: int) -> None: + """Validate difficulty target bounds.""" + if target <= 0: + raise ValueError("difficulty_target must be positive") + if target > MAX_TARGET: + raise ValueError("difficulty_target exceeds hash space") + + +def is_valid_pow(header: BlockHeader) -> bool: + """Return whether a header satisfies its own difficulty target.""" + if header.difficulty_target <= 0 or header.difficulty_target > MAX_TARGET: + return False + return hash_to_int(header.hash()) <= header.difficulty_target + + +def mine_block_header( + header_template: BlockHeader, + *, + start_nonce: int = 0, + max_nonce: int = (1 << 64) - 1, + stop_event: Event | None = None, +) -> tuple[int, bytes]: + """Search nonces until a header hash satisfies the difficulty target.""" + validate_difficulty_target(header_template.difficulty_target) + if start_nonce < 0: + raise ValueError("start_nonce must be non-negative") + if max_nonce < start_nonce: + raise ValueError("max_nonce must be greater than or equal to start_nonce") + + for nonce in range(start_nonce, max_nonce + 1): + if stop_event is not None and stop_event.is_set(): + raise MiningInterrupted("Mining interrupted by stop event") + + candidate = replace(header_template, nonce=nonce) + digest = candidate.hash() + if hash_to_int(digest) <= candidate.difficulty_target: + return nonce, digest + + raise RuntimeError("No valid nonce found within nonce range") From 0783e6d912d5d57a61dba1a493c314b4978ea7d6 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sun, 22 Feb 2026 03:53:12 +0530 Subject: [PATCH 19/47] test: add pow validation and mining interruption coverage --- tests/test_consensus.py | 69 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 tests/test_consensus.py diff --git a/tests/test_consensus.py b/tests/test_consensus.py new file mode 100644 index 0000000..a7f44a8 --- /dev/null +++ b/tests/test_consensus.py @@ -0,0 +1,69 @@ +"""Unit tests for Proof-of-Work mining primitives.""" + +from __future__ import annotations + +from threading import Event + +from minichain.block import BlockHeader +from minichain.consensus import MiningInterrupted, is_valid_pow, mine_block_header + + +def _header_template(difficulty_target: int) -> BlockHeader: + return BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="11" * 32, + timestamp=1_740_000_000, + difficulty_target=difficulty_target, + nonce=0, + block_height=10, + ) + + +def test_valid_pow_is_accepted() -> None: + header = _header_template(difficulty_target=(1 << 256) - 1) + assert is_valid_pow(header) + + +def test_invalid_pow_is_rejected() -> None: + header = _header_template(difficulty_target=1) + assert not is_valid_pow(header) + + +def test_mining_finds_valid_nonce_for_reasonable_target() -> None: + header = _header_template(difficulty_target=1 << 252) + nonce, _digest = mine_block_header(header, max_nonce=500_000) + + mined_header = BlockHeader( + version=header.version, + previous_hash=header.previous_hash, + merkle_root=header.merkle_root, + timestamp=header.timestamp, + difficulty_target=header.difficulty_target, + nonce=nonce, + block_height=header.block_height, + ) + assert is_valid_pow(mined_header) + + +def test_mining_honors_stop_event() -> None: + header = _header_template(difficulty_target=1 << 240) + stop = Event() + stop.set() + + try: + mine_block_header(header, max_nonce=1_000_000, stop_event=stop) + except MiningInterrupted as exc: + assert "interrupted" in str(exc).lower() + else: + raise AssertionError("Expected mining interruption") + + +def test_mining_raises_when_nonce_range_exhausted() -> None: + header = _header_template(difficulty_target=1) + try: + mine_block_header(header, start_nonce=0, max_nonce=10) + except RuntimeError as exc: + assert "No valid nonce found" in str(exc) + else: + raise AssertionError("Expected RuntimeError when nonce space exhausted") From 3d723fdb72d7d457c3daec87104a36314fd09669 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sun, 22 Feb 2026 22:35:02 +0530 Subject: [PATCH 20/47] feat: implement bounded difficulty adjustment --- src/minichain/consensus.py | 41 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/src/minichain/consensus.py b/src/minichain/consensus.py index 94100aa..7339569 100644 --- a/src/minichain/consensus.py +++ b/src/minichain/consensus.py @@ -4,6 +4,7 @@ from dataclasses import replace from threading import Event +from typing import Sequence from minichain.block import BlockHeader @@ -34,6 +35,46 @@ def is_valid_pow(header: BlockHeader) -> bool: return hash_to_int(header.hash()) <= header.difficulty_target +def compute_next_difficulty_target( + chain: Sequence[BlockHeader], + *, + adjustment_interval: int = 10, + target_block_time_seconds: int = 30, +) -> int: + """Compute the next difficulty target using bounded proportional retargeting.""" + if adjustment_interval <= 0: + raise ValueError("adjustment_interval must be positive") + if target_block_time_seconds <= 0: + raise ValueError("target_block_time_seconds must be positive") + if not chain: + raise ValueError("chain must contain at least one header") + + tip = chain[-1] + validate_difficulty_target(tip.difficulty_target) + + if tip.block_height == 0: + return tip.difficulty_target + if tip.block_height % adjustment_interval != 0: + return tip.difficulty_target + if len(chain) <= adjustment_interval: + return tip.difficulty_target + + start_header = chain[-(adjustment_interval + 1)] + elapsed_seconds = tip.timestamp - start_header.timestamp + if elapsed_seconds <= 0: + elapsed_seconds = 1 + + expected_seconds = adjustment_interval * target_block_time_seconds + unbounded_target = (tip.difficulty_target * elapsed_seconds) // expected_seconds + + min_target = max(1, tip.difficulty_target // 2) + max_target = min(MAX_TARGET, tip.difficulty_target * 2) + bounded_target = min(max(unbounded_target, min_target), max_target) + + validate_difficulty_target(bounded_target) + return bounded_target + + def mine_block_header( header_template: BlockHeader, *, From 8a4d38b31aa2301672c075e7abd0d32baa3659ee Mon Sep 17 00:00:00 2001 From: Arunabha Date: Sun, 22 Feb 2026 22:35:07 +0530 Subject: [PATCH 21/47] test: add difficulty retarget scenarios --- tests/test_consensus.py | 101 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 100 insertions(+), 1 deletion(-) diff --git a/tests/test_consensus.py b/tests/test_consensus.py index a7f44a8..9e7dcfb 100644 --- a/tests/test_consensus.py +++ b/tests/test_consensus.py @@ -5,7 +5,12 @@ from threading import Event from minichain.block import BlockHeader -from minichain.consensus import MiningInterrupted, is_valid_pow, mine_block_header +from minichain.consensus import ( + MiningInterrupted, + compute_next_difficulty_target, + is_valid_pow, + mine_block_header, +) def _header_template(difficulty_target: int) -> BlockHeader: @@ -20,6 +25,28 @@ def _header_template(difficulty_target: int) -> BlockHeader: ) +def _make_chain( + *, + heights: list[int], + timestamps: list[int], + difficulty_target: int, +) -> list[BlockHeader]: + if len(heights) != len(timestamps): + raise ValueError("heights and timestamps must have the same length") + return [ + BlockHeader( + version=0, + previous_hash=f"{height:064x}", + merkle_root="22" * 32, + timestamp=timestamp, + difficulty_target=difficulty_target, + nonce=0, + block_height=height, + ) + for height, timestamp in zip(heights, timestamps, strict=True) + ] + + def test_valid_pow_is_accepted() -> None: header = _header_template(difficulty_target=(1 << 256) - 1) assert is_valid_pow(header) @@ -67,3 +94,75 @@ def test_mining_raises_when_nonce_range_exhausted() -> None: assert "No valid nonce found" in str(exc) else: raise AssertionError("Expected RuntimeError when nonce space exhausted") + + +def test_difficulty_unchanged_when_not_on_adjustment_height() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 5], + timestamps=[0, 10, 20, 30, 40], + difficulty_target=1_000_000, + ) + assert ( + compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + == 1_000_000 + ) + + +def test_difficulty_target_decreases_when_blocks_are_fast() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 5, 10, 15, 20], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 500_000 + + +def test_difficulty_target_increases_when_blocks_are_slow() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 20, 40, 60, 80], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 2_000_000 + + +def test_difficulty_adjustment_is_capped_to_half_on_extreme_speed() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 1, 2, 3, 4], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 500_000 + + +def test_difficulty_adjustment_is_capped_to_double_on_extreme_delay() -> None: + chain = _make_chain( + heights=[0, 1, 2, 3, 4], + timestamps=[0, 100, 200, 300, 400], + difficulty_target=1_000_000, + ) + new_target = compute_next_difficulty_target( + chain, + adjustment_interval=4, + target_block_time_seconds=10, + ) + assert new_target == 2_000_000 From 748a959e6f4a8b856d55ae5678f0426927884b94 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Mon, 23 Feb 2026 18:38:10 +0530 Subject: [PATCH 22/47] feat: add coinbase transaction validation and state handling --- src/minichain/block.py | 28 ++++++++++++++++++++++++++ src/minichain/state.py | 26 +++++++++++++++++++----- src/minichain/transaction.py | 39 ++++++++++++++++++++++++++++++++++++ 3 files changed, 88 insertions(+), 5 deletions(-) diff --git a/src/minichain/block.py b/src/minichain/block.py index f6c5829..01f8a1e 100644 --- a/src/minichain/block.py +++ b/src/minichain/block.py @@ -10,6 +10,10 @@ from minichain.transaction import Transaction +class BlockValidationError(ValueError): + """Raised when a block fails structural or semantic validation.""" + + @dataclass class BlockHeader: """Consensus-critical block header.""" @@ -52,5 +56,29 @@ def update_header_merkle_root(self) -> None: def has_valid_merkle_root(self) -> bool: return self.header.merkle_root == self.computed_merkle_root_hex() + def validate_coinbase(self, *, block_reward: int) -> None: + """Validate coinbase placement and reward accounting.""" + if block_reward < 0: + raise BlockValidationError("block_reward must be non-negative") + if not self.transactions: + raise BlockValidationError("Block must contain a coinbase transaction") + if not self.has_valid_merkle_root(): + raise BlockValidationError("Block merkle_root does not match body") + + coinbase = self.transactions[0] + if not coinbase.is_coinbase(): + raise BlockValidationError("First transaction must be a valid coinbase") + + for transaction in self.transactions[1:]: + if transaction.is_coinbase(): + raise BlockValidationError("Coinbase transaction must only appear once") + + total_fees = sum(transaction.fee for transaction in self.transactions[1:]) + expected_amount = block_reward + total_fees + if coinbase.amount != expected_amount: + raise BlockValidationError( + f"Invalid coinbase amount: expected {expected_amount}, got {coinbase.amount}" + ) + def hash(self) -> bytes: return self.header.hash() diff --git a/src/minichain/state.py b/src/minichain/state.py index c38e583..9238ca4 100644 --- a/src/minichain/state.py +++ b/src/minichain/state.py @@ -4,7 +4,7 @@ from dataclasses import dataclass -from minichain.block import Block +from minichain.block import Block, BlockValidationError from minichain.transaction import Transaction @@ -43,6 +43,10 @@ def get_account(self, address: str) -> Account: return self.accounts[address] def apply_transaction(self, transaction: Transaction) -> None: + if transaction.is_coinbase(): + raise StateTransitionError( + "Coinbase transaction must be applied through apply_block" + ) if not transaction.verify(): raise StateTransitionError("Transaction signature/identity verification failed") @@ -66,10 +70,22 @@ def apply_transaction(self, transaction: Transaction) -> None: sender.nonce += 1 recipient.balance += transaction.amount - def apply_block(self, block: Block) -> None: + def apply_coinbase_transaction(self, transaction: Transaction) -> None: + if not transaction.is_coinbase(): + raise StateTransitionError("Invalid coinbase transaction") + miner = self.get_account(transaction.recipient) + miner.balance += transaction.amount + + def apply_block(self, block: Block, *, block_reward: int = 0) -> None: + try: + block.validate_coinbase(block_reward=block_reward) + except BlockValidationError as exc: + raise StateTransitionError(f"Block validation failed: {exc}") from exc + snapshot = self.copy() try: - for transaction in block.transactions: + self.apply_coinbase_transaction(block.transactions[0]) + for transaction in block.transactions[1:]: self.apply_transaction(transaction) except StateTransitionError as exc: self.accounts = snapshot.accounts @@ -81,6 +97,6 @@ def apply_transaction(state: State, transaction: Transaction) -> None: state.apply_transaction(transaction) -def apply_block(state: State, block: Block) -> None: +def apply_block(state: State, block: Block, *, block_reward: int = 0) -> None: """Apply all block transactions atomically, rolling back on failure.""" - state.apply_block(block) + state.apply_block(block, block_reward=block_reward) diff --git a/src/minichain/transaction.py b/src/minichain/transaction.py index 551b6e5..758a659 100644 --- a/src/minichain/transaction.py +++ b/src/minichain/transaction.py @@ -17,6 +17,7 @@ ADDRESS_HEX_LENGTH = 40 PUBLIC_KEY_HEX_LENGTH = 64 SIGNATURE_HEX_LENGTH = 128 +COINBASE_SENDER = "00" * 20 def _is_lower_hex(value: str, expected_length: int) -> bool: @@ -38,6 +39,22 @@ class Transaction: signature: str = "" public_key: str = "" + def is_coinbase(self) -> bool: + """Return whether this transaction follows coinbase conventions.""" + if not _is_lower_hex(self.recipient, ADDRESS_HEX_LENGTH): + return False + if not isinstance(self.amount, int) or self.amount <= 0: + return False + if not isinstance(self.timestamp, int) or self.timestamp < 0: + return False + return ( + self.sender == COINBASE_SENDER + and self.nonce == 0 + and self.fee == 0 + and self.signature == "" + and self.public_key == "" + ) + def signing_payload(self) -> dict[str, int | str]: """Return the canonical transaction payload that is signed.""" return { @@ -87,6 +104,8 @@ def sign(self, signing_key: object) -> None: def verify(self) -> bool: """Verify transaction structure, signer identity, and signature.""" + if self.is_coinbase(): + return True if not self._validate_common_fields(): return False if not _is_lower_hex(self.public_key, PUBLIC_KEY_HEX_LENGTH): @@ -103,3 +122,23 @@ def verify(self) -> bool: return False signature_bytes = bytes.fromhex(self.signature) return verify_signature(self.signing_bytes(), signature_bytes, verify_key) + + +def create_coinbase_transaction( + *, + miner_address: str, + amount: int, + timestamp: int, +) -> Transaction: + """Build a canonical coinbase transaction.""" + coinbase = Transaction( + sender=COINBASE_SENDER, + recipient=miner_address, + amount=amount, + nonce=0, + fee=0, + timestamp=timestamp, + ) + if not coinbase.is_coinbase(): + raise ValueError("Invalid coinbase transaction fields") + return coinbase From 8f05a01119741688744a08db2f596fbd709f8418 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Mon, 23 Feb 2026 18:38:21 +0530 Subject: [PATCH 23/47] test: cover coinbase acceptance and rejection paths --- tests/test_block.py | 44 +++++++++++++- tests/test_state.py | 121 ++++++++++++++++++++++++++++++++++++-- tests/test_transaction.py | 24 +++++++- 3 files changed, 181 insertions(+), 8 deletions(-) diff --git a/tests/test_block.py b/tests/test_block.py index 8554366..0be3783 100644 --- a/tests/test_block.py +++ b/tests/test_block.py @@ -8,9 +8,9 @@ pytest.importorskip("nacl") -from minichain.block import Block, BlockHeader +from minichain.block import Block, BlockHeader, BlockValidationError from minichain.crypto import derive_address, generate_key_pair -from minichain.transaction import Transaction +from minichain.transaction import Transaction, create_coinbase_transaction def _make_signed_transaction(amount: int, nonce: int) -> Transaction: @@ -46,6 +46,32 @@ def _make_block() -> Block: return block +def _make_block_with_coinbase(*, block_reward: int = 50) -> Block: + miner_key, miner_verify = generate_key_pair() + _ = miner_key + regular_transactions = [ + _make_signed_transaction(amount=10, nonce=0), + _make_signed_transaction(amount=11, nonce=1), + ] + coinbase = create_coinbase_transaction( + miner_address=derive_address(miner_verify), + amount=block_reward + sum(tx.fee for tx in regular_transactions), + timestamp=1_739_800_111, + ) + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_800_111, + difficulty_target=1_000_000, + nonce=7, + block_height=1, + ) + block = Block(header=header, transactions=[coinbase, *regular_transactions]) + block.update_header_merkle_root() + return block + + def test_block_hash_is_deterministic() -> None: block = _make_block() assert block.hash() == block.hash() @@ -76,3 +102,17 @@ def test_header_merkle_root_matches_transaction_body() -> None: block.transactions[0].amount += 1 assert not block.has_valid_merkle_root() + + +def test_validate_coinbase_accepts_correct_amount() -> None: + block = _make_block_with_coinbase(block_reward=50) + block.validate_coinbase(block_reward=50) + + +def test_validate_coinbase_rejects_wrong_amount() -> None: + block = _make_block_with_coinbase(block_reward=50) + block.transactions[0].amount += 1 + block.update_header_merkle_root() + + with pytest.raises(BlockValidationError, match="Invalid coinbase amount"): + block.validate_coinbase(block_reward=50) diff --git a/tests/test_state.py b/tests/test_state.py index 94b5bcc..256ca84 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -9,7 +9,7 @@ from minichain.block import Block, BlockHeader from minichain.crypto import derive_address, generate_key_pair from minichain.state import Account, State, StateTransitionError -from minichain.transaction import Transaction +from minichain.transaction import Transaction, create_coinbase_transaction def _signed_transaction( @@ -33,7 +33,17 @@ def _signed_transaction( return tx -def _block_with_transactions(transactions: list[Transaction]) -> Block: +def _block_with_transactions( + *, + miner_address: str, + transactions: list[Transaction], + block_reward: int, +) -> Block: + coinbase = create_coinbase_transaction( + miner_address=miner_address, + amount=block_reward + sum(tx.fee for tx in transactions), + timestamp=1_739_900_100, + ) header = BlockHeader( version=0, previous_hash="00" * 32, @@ -43,7 +53,7 @@ def _block_with_transactions(transactions: list[Transaction]) -> Block: nonce=0, block_height=1, ) - block = Block(header=header, transactions=transactions) + block = Block(header=header, transactions=[coinbase, *transactions]) block.update_header_merkle_root() return block @@ -132,10 +142,13 @@ def test_transfer_to_new_address_creates_recipient_account() -> None: def test_apply_block_is_atomic_and_rolls_back_on_failure() -> None: sender_key, sender_verify = generate_key_pair() recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() _ = recipient_key + _ = miner_key sender_address = derive_address(sender_verify) recipient_address = derive_address(recipient_verify) + miner_address = derive_address(miner_verify) state = State() state.set_account(sender_address, Account(balance=100, nonce=0)) @@ -146,12 +159,110 @@ def test_apply_block_is_atomic_and_rolls_back_on_failure() -> None: tx_fail = _signed_transaction( sender_key, sender_address, recipient_address, amount=95, nonce=1, fee=10 ) - block = _block_with_transactions([tx_ok, tx_fail]) + block_reward = 50 + block = _block_with_transactions( + miner_address=miner_address, + transactions=[tx_ok, tx_fail], + block_reward=block_reward, + ) with pytest.raises(StateTransitionError, match="Block application failed"): - state.apply_block(block) + state.apply_block(block, block_reward=block_reward) assert state.get_account(sender_address).balance == 100 assert state.get_account(sender_address).nonce == 0 assert state.get_account(recipient_address).balance == 0 assert state.get_account(recipient_address).nonce == 0 + assert miner_address not in state.accounts + + +def test_apply_block_with_valid_coinbase_pays_reward_and_fees() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + miner_address = derive_address(miner_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=25, nonce=0, fee=3 + ) + block_reward = 50 + block = _block_with_transactions( + miner_address=miner_address, + transactions=[tx], + block_reward=block_reward, + ) + + state.apply_block(block, block_reward=block_reward) + + assert state.get_account(sender_address).balance == 72 + assert state.get_account(sender_address).nonce == 1 + assert state.get_account(recipient_address).balance == 25 + assert state.get_account(miner_address).balance == 53 + + +def test_block_with_incorrect_coinbase_amount_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + miner_address = derive_address(miner_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=2 + ) + block_reward = 50 + block = _block_with_transactions( + miner_address=miner_address, + transactions=[tx], + block_reward=block_reward, + ) + block.transactions[0].amount += 1 + block.update_header_merkle_root() + + with pytest.raises(StateTransitionError, match="Invalid coinbase amount"): + state.apply_block(block, block_reward=block_reward) + + +def test_block_without_coinbase_is_rejected() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_address = derive_address(sender_verify) + recipient_address = derive_address(recipient_verify) + + state = State() + state.set_account(sender_address, Account(balance=100, nonce=0)) + + tx = _signed_transaction( + sender_key, sender_address, recipient_address, amount=10, nonce=0, fee=1 + ) + header = BlockHeader( + version=0, + previous_hash="00" * 32, + merkle_root="", + timestamp=1_739_900_100, + difficulty_target=1_000_000, + nonce=0, + block_height=1, + ) + block = Block(header=header, transactions=[tx]) + block.update_header_merkle_root() + + with pytest.raises(StateTransitionError, match="coinbase"): + state.apply_block(block, block_reward=50) diff --git a/tests/test_transaction.py b/tests/test_transaction.py index 258586d..d5b2540 100644 --- a/tests/test_transaction.py +++ b/tests/test_transaction.py @@ -9,7 +9,7 @@ pytest.importorskip("nacl") from minichain.crypto import derive_address, generate_key_pair, serialize_verify_key -from minichain.transaction import Transaction +from minichain.transaction import COINBASE_SENDER, Transaction, create_coinbase_transaction def _build_signed_transaction() -> tuple[Transaction, object]: @@ -61,3 +61,25 @@ def test_transaction_id_changes_when_signature_changes() -> None: tampered = replace(tx, signature="00" * 64) assert tampered.transaction_id() != original_id + + +def test_coinbase_transaction_verifies_without_signature() -> None: + tx = create_coinbase_transaction( + miner_address="ef" * 20, + amount=55, + timestamp=1_739_760_111, + ) + + assert tx.sender == COINBASE_SENDER + assert tx.verify() + + +def test_coinbase_with_auth_fields_is_rejected() -> None: + tx = create_coinbase_transaction( + miner_address="ef" * 20, + amount=55, + timestamp=1_739_760_111, + ) + tampered = replace(tx, signature="00" * 64) + + assert not tampered.verify() From 17e53fbaa12775cac832003f55be7541aaf45afe Mon Sep 17 00:00:00 2001 From: Arunabha Date: Mon, 23 Feb 2026 18:52:39 +0530 Subject: [PATCH 24/47] feat: implement mempool queueing and mining selection --- src/minichain/mempool.py | 248 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 247 insertions(+), 1 deletion(-) diff --git a/src/minichain/mempool.py b/src/minichain/mempool.py index 3e15d3b..a9e1db1 100644 --- a/src/minichain/mempool.py +++ b/src/minichain/mempool.py @@ -1 +1,247 @@ -"""Mempool data structures and transaction selection logic (to be implemented).""" +"""Mempool data structures and transaction selection logic.""" + +from __future__ import annotations + +import time +from dataclasses import dataclass, field +from heapq import heappop, heappush +from typing import Iterable + +from minichain.state import Account, State +from minichain.transaction import Transaction + + +class MempoolValidationError(ValueError): + """Raised when a transaction cannot be accepted into the mempool.""" + + +@dataclass +class _PoolEntry: + transaction: Transaction + transaction_id: str + received_at: int + + @property + def fee(self) -> int: + return self.transaction.fee + + +@dataclass +class _SenderPool: + entries: dict[int, _PoolEntry] = field(default_factory=dict) + ready_nonces: set[int] = field(default_factory=set) + waiting_nonces: set[int] = field(default_factory=set) + + +class Mempool: + """Holds validated pending transactions and exposes mining selection.""" + + def __init__(self, *, max_size: int = 1_000, max_age_seconds: int = 3_600) -> None: + if max_size <= 0: + raise ValueError("max_size must be positive") + if max_age_seconds <= 0: + raise ValueError("max_age_seconds must be positive") + + self.max_size = max_size + self.max_age_seconds = max_age_seconds + self._entries_by_id: dict[str, _PoolEntry] = {} + self._sender_pools: dict[str, _SenderPool] = {} + self._id_by_sender_nonce: dict[tuple[str, int], str] = {} + + def size(self) -> int: + return len(self._entries_by_id) + + def ready_count(self) -> int: + return sum(len(pool.ready_nonces) for pool in self._sender_pools.values()) + + def waiting_count(self) -> int: + return sum(len(pool.waiting_nonces) for pool in self._sender_pools.values()) + + def contains(self, transaction_id: str) -> bool: + return transaction_id in self._entries_by_id + + def add_transaction( + self, + transaction: Transaction, + state: State, + *, + received_at: int | None = None, + ) -> str: + """Validate and enqueue a transaction, returning its transaction id.""" + if transaction.is_coinbase(): + raise MempoolValidationError("Coinbase transactions are not accepted") + if not transaction.verify(): + raise MempoolValidationError("Transaction failed signature/identity validation") + + transaction_id = transaction.transaction_id().hex() + if transaction_id in self._entries_by_id: + raise MempoolValidationError("Duplicate transaction") + + sender = transaction.sender + nonce_key = (sender, transaction.nonce) + if nonce_key in self._id_by_sender_nonce: + raise MempoolValidationError("Duplicate sender nonce in mempool") + + sender_account = state.accounts.get(sender, Account()) + if transaction.nonce < sender_account.nonce: + raise MempoolValidationError("Transaction nonce is stale") + + if transaction.nonce == sender_account.nonce: + immediate_cost = transaction.amount + transaction.fee + if immediate_cost > sender_account.balance: + raise MempoolValidationError("Insufficient balance for pending transaction") + + entry = _PoolEntry( + transaction=transaction, + transaction_id=transaction_id, + received_at=int(time.time()) if received_at is None else received_at, + ) + + pool = self._sender_pools.setdefault(sender, _SenderPool()) + pool.entries[transaction.nonce] = entry + self._entries_by_id[transaction_id] = entry + self._id_by_sender_nonce[nonce_key] = transaction_id + self._recompute_sender_pool(sender, state) + self.evict(state, current_time=entry.received_at) + return transaction_id + + def get_transactions_for_mining( + self, state: State, *, limit: int, current_time: int | None = None + ) -> list[Transaction]: + """Return up to `limit` ready transactions, prioritized by fee.""" + if limit <= 0: + return [] + + now = int(time.time()) if current_time is None else current_time + self.evict(state, current_time=now) + + sender_ready: dict[str, list[_PoolEntry]] = {} + for sender, pool in self._sender_pools.items(): + self._recompute_sender_pool(sender, state) + ready_entries = sorted( + (pool.entries[nonce] for nonce in pool.ready_nonces), + key=lambda entry: entry.transaction.nonce, + ) + if ready_entries: + sender_ready[sender] = ready_entries + + heap: list[tuple[int, int, str, int]] = [] + for sender, entries in sender_ready.items(): + first = entries[0] + heappush(heap, (-first.fee, first.transaction.nonce, sender, 0)) + + selected: list[Transaction] = [] + while heap and len(selected) < limit: + _neg_fee, _nonce, sender, index = heappop(heap) + entry = sender_ready[sender][index] + selected.append(entry.transaction) + + next_index = index + 1 + if next_index < len(sender_ready[sender]): + nxt = sender_ready[sender][next_index] + heappush(heap, (-nxt.fee, nxt.transaction.nonce, sender, next_index)) + + return selected + + def remove_confirmed_transactions( + self, + transactions: Iterable[Transaction], + state: State, + ) -> None: + """Remove transactions confirmed in a block and revalidate sender queues.""" + touched_senders: set[str] = set() + for transaction in transactions: + transaction_id = transaction.transaction_id().hex() + entry = self._entries_by_id.get(transaction_id) + if entry is None: + continue + touched_senders.add(entry.transaction.sender) + self._remove_entry(entry) + + for sender in touched_senders: + self._recompute_sender_pool(sender, state) + + for sender in list(self._sender_pools): + self._recompute_sender_pool(sender, state) + + def evict(self, state: State, *, current_time: int | None = None) -> list[str]: + """Evict stale transactions and, if oversized, low-fee transactions.""" + now = int(time.time()) if current_time is None else current_time + evicted_ids: list[str] = [] + + stale_ids = [ + tx_id + for tx_id, entry in self._entries_by_id.items() + if now - entry.received_at > self.max_age_seconds + ] + for tx_id in stale_ids: + entry = self._entries_by_id.get(tx_id) + if entry is None: + continue + evicted_ids.append(tx_id) + self._remove_entry(entry) + + while len(self._entries_by_id) > self.max_size: + entry = min( + self._entries_by_id.values(), + key=lambda item: (item.fee, item.received_at), + ) + evicted_ids.append(entry.transaction_id) + self._remove_entry(entry) + + for sender in list(self._sender_pools): + self._recompute_sender_pool(sender, state) + + return evicted_ids + + def _recompute_sender_pool(self, sender: str, state: State) -> None: + pool = self._sender_pools.get(sender) + if pool is None: + return + + account = state.accounts.get(sender, Account()) + state_nonce = account.nonce + available_balance = account.balance + + for nonce in [nonce for nonce in pool.entries if nonce < state_nonce]: + self._remove_entry(pool.entries[nonce]) + + pool = self._sender_pools.get(sender) + if pool is None: + return + + ready_nonces: set[int] = set() + expected_nonce = state_nonce + while expected_nonce in pool.entries: + candidate = pool.entries[expected_nonce].transaction + candidate_cost = candidate.amount + candidate.fee + if candidate_cost > available_balance: + break + ready_nonces.add(expected_nonce) + available_balance -= candidate_cost + expected_nonce += 1 + + all_nonces = set(pool.entries.keys()) + pool.ready_nonces = ready_nonces + pool.waiting_nonces = all_nonces - ready_nonces + + if not pool.entries: + self._sender_pools.pop(sender, None) + + def _remove_entry(self, entry: _PoolEntry) -> None: + transaction = entry.transaction + sender = transaction.sender + nonce = transaction.nonce + + self._entries_by_id.pop(entry.transaction_id, None) + self._id_by_sender_nonce.pop((sender, nonce), None) + + pool = self._sender_pools.get(sender) + if pool is None: + return + + pool.entries.pop(nonce, None) + pool.ready_nonces.discard(nonce) + pool.waiting_nonces.discard(nonce) + if not pool.entries: + self._sender_pools.pop(sender, None) From d5e8f92eec79abf4387c230d642a0afdf52f84fd Mon Sep 17 00:00:00 2001 From: Arunabha Date: Mon, 23 Feb 2026 18:52:42 +0530 Subject: [PATCH 25/47] test: add mempool dedup ordering and eviction coverage --- tests/test_mempool.py | 313 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 313 insertions(+) create mode 100644 tests/test_mempool.py diff --git a/tests/test_mempool.py b/tests/test_mempool.py new file mode 100644 index 0000000..8cc2d8a --- /dev/null +++ b/tests/test_mempool.py @@ -0,0 +1,313 @@ +"""Unit tests for mempool transaction queuing and selection behavior.""" + +from __future__ import annotations + +from dataclasses import replace + +import pytest + +pytest.importorskip("nacl") + +from minichain.crypto import derive_address, generate_key_pair +from minichain.mempool import Mempool, MempoolValidationError +from minichain.state import Account, State +from minichain.transaction import Transaction + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int = 1_739_950_000, +) -> Transaction: + transaction = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp + nonce, + ) + transaction.sign(sender_key) + return transaction + + +def test_deduplicates_transactions_by_id() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + + mempool.add_transaction(tx, state) + with pytest.raises(MempoolValidationError, match="Duplicate transaction"): + mempool.add_transaction(tx, state) + + +def test_fee_priority_respects_sender_nonce_ordering() -> None: + a_key, a_verify = generate_key_pair() + b_key, b_verify = generate_key_pair() + c_key, c_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_a = derive_address(a_verify) + sender_b = derive_address(b_verify) + sender_c = derive_address(c_verify) + recipient = derive_address(recipient_verify) + + state = State() + state.set_account(sender_a, Account(balance=100, nonce=0)) + state.set_account(sender_b, Account(balance=100, nonce=0)) + state.set_account(sender_c, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx_a0 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx_a1 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=5, + nonce=1, + fee=10, + ) + tx_b0 = _signed_transaction( + sender_key=b_key, + sender_address=sender_b, + recipient=recipient, + amount=5, + nonce=0, + fee=8, + ) + tx_c0 = _signed_transaction( + sender_key=c_key, + sender_address=sender_c, + recipient=recipient, + amount=5, + nonce=0, + fee=4, + ) + + mempool.add_transaction(tx_a1, state) + mempool.add_transaction(tx_b0, state) + mempool.add_transaction(tx_a0, state) + mempool.add_transaction(tx_c0, state) + + selected = mempool.get_transactions_for_mining(state, limit=4) + + assert [tx.fee for tx in selected] == [8, 4, 1, 10] + assert selected[2].sender == sender_a and selected[2].nonce == 0 + assert selected[3].sender == sender_a and selected[3].nonce == 1 + + +def test_evicts_low_fee_when_pool_exceeds_max_size() -> None: + s1_key, s1_verify = generate_key_pair() + s2_key, s2_verify = generate_key_pair() + s3_key, s3_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender1 = derive_address(s1_verify) + sender2 = derive_address(s2_verify) + sender3 = derive_address(s3_verify) + recipient = derive_address(recipient_verify) + + state = State() + state.set_account(sender1, Account(balance=100, nonce=0)) + state.set_account(sender2, Account(balance=100, nonce=0)) + state.set_account(sender3, Account(balance=100, nonce=0)) + mempool = Mempool(max_size=2, max_age_seconds=10_000) + + tx1 = _signed_transaction( + sender_key=s1_key, + sender_address=sender1, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx2 = _signed_transaction( + sender_key=s2_key, + sender_address=sender2, + recipient=recipient, + amount=5, + nonce=0, + fee=6, + ) + tx3 = _signed_transaction( + sender_key=s3_key, + sender_address=sender3, + recipient=recipient, + amount=5, + nonce=0, + fee=3, + ) + + id1 = mempool.add_transaction(tx1, state, received_at=1) + id2 = mempool.add_transaction(tx2, state, received_at=2) + id3 = mempool.add_transaction(tx3, state, received_at=3) + + assert mempool.size() == 2 + assert not mempool.contains(id1) + assert mempool.contains(id2) + assert mempool.contains(id3) + + +def test_nonce_gap_is_held_then_promoted_when_gap_filled() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx_nonce_1 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=1, + fee=5, + ) + tx_nonce_0 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + + mempool.add_transaction(tx_nonce_1, state) + assert mempool.ready_count() == 0 + assert mempool.waiting_count() == 1 + + mempool.add_transaction(tx_nonce_0, state) + assert mempool.ready_count() == 2 + assert mempool.waiting_count() == 0 + + selected = mempool.get_transactions_for_mining(state, limit=2) + assert [tx.nonce for tx in selected] == [0, 1] + + +def test_confirmed_transaction_removal_revalidates_pending_set() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx0 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=10, + nonce=0, + fee=2, + ) + tx1 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=10, + nonce=1, + fee=1, + ) + + mempool.add_transaction(tx0, state) + mempool.add_transaction(tx1, state) + assert mempool.size() == 2 + assert mempool.ready_count() == 2 + + state.apply_transaction(tx0) + mempool.remove_confirmed_transactions([tx0], state) + + assert mempool.size() == 1 + assert mempool.ready_count() == 1 + selected = mempool.get_transactions_for_mining(state, limit=1) + assert selected[0].nonce == 1 + + +def test_rejects_duplicate_sender_nonce_even_if_tx_id_differs() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool() + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx_modified = replace(tx, amount=6) + tx_modified.sign(sender_key) + + mempool.add_transaction(tx, state) + with pytest.raises(MempoolValidationError, match="Duplicate sender nonce"): + mempool.add_transaction(tx_modified, state) + + +def test_evicts_stale_transactions_by_age() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + state = State() + state.set_account(sender, Account(balance=100, nonce=0)) + mempool = Mempool(max_size=10, max_age_seconds=10) + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + ) + tx_id = mempool.add_transaction(tx, state, received_at=100) + + evicted = mempool.evict(state, current_time=111) + assert tx_id in evicted + assert mempool.size() == 0 From e29ada4fe1dd6e30248fc75b1fccd960fb000591 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 00:57:22 +0530 Subject: [PATCH 26/47] feat: implement chain manager with fork reorg handling --- src/minichain/chain.py | 224 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 224 insertions(+) create mode 100644 src/minichain/chain.py diff --git a/src/minichain/chain.py b/src/minichain/chain.py new file mode 100644 index 0000000..acfc4a4 --- /dev/null +++ b/src/minichain/chain.py @@ -0,0 +1,224 @@ +"""Chain manager and fork-resolution logic.""" + +from __future__ import annotations + +from dataclasses import dataclass + +from minichain.block import Block, BlockHeader +from minichain.consensus import compute_next_difficulty_target, is_valid_pow +from minichain.genesis import GENESIS_PREVIOUS_HASH +from minichain.state import State, StateTransitionError + + +class ChainValidationError(ValueError): + """Raised when a block or branch is invalid.""" + + +@dataclass(frozen=True) +class ChainConfig: + """Configuration for chain validation and state transitions.""" + + block_reward: int = 50 + difficulty_adjustment_interval: int = 10 + target_block_time_seconds: int = 30 + + def validate(self) -> None: + if self.block_reward < 0: + raise ValueError("block_reward must be non-negative") + if self.difficulty_adjustment_interval <= 0: + raise ValueError("difficulty_adjustment_interval must be positive") + if self.target_block_time_seconds <= 0: + raise ValueError("target_block_time_seconds must be positive") + + +class ChainManager: + """Maintains canonical chain, block index, and current canonical state.""" + + def __init__( + self, + *, + genesis_block: Block, + genesis_state: State, + config: ChainConfig | None = None, + ) -> None: + self.config = config or ChainConfig() + self.config.validate() + self._validate_genesis(genesis_block) + + genesis_hash = genesis_block.hash().hex() + self._genesis_hash = genesis_hash + self._blocks_by_hash: dict[str, Block] = {genesis_hash: genesis_block} + self._heights: dict[str, int] = {genesis_hash: 0} + self._canonical_hashes: list[str] = [genesis_hash] + self._tip_hash = genesis_hash + + self._genesis_state = genesis_state.copy() + self.state = genesis_state.copy() + + @property + def tip_hash(self) -> str: + return self._tip_hash + + @property + def height(self) -> int: + return self._heights[self._tip_hash] + + @property + def tip_block(self) -> Block: + return self._blocks_by_hash[self._tip_hash] + + def contains_block(self, block_hash: str) -> bool: + return block_hash in self._blocks_by_hash + + def canonical_chain(self) -> list[Block]: + return [self._blocks_by_hash[block_hash] for block_hash in self._canonical_hashes] + + def get_block_by_hash(self, block_hash: str) -> Block | None: + return self._blocks_by_hash.get(block_hash) + + def get_canonical_block_by_height(self, height: int) -> Block | None: + if height < 0 or height >= len(self._canonical_hashes): + return None + return self._blocks_by_hash[self._canonical_hashes[height]] + + def expected_next_difficulty(self, *, parent_hash: str | None = None) -> int: + """Compute the expected next block target after the given parent.""" + path_hashes = ( + self._canonical_hashes + if parent_hash is None + else self._path_from_genesis(parent_hash) + ) + headers = [self._blocks_by_hash[block_hash].header for block_hash in path_hashes] + return compute_next_difficulty_target( + headers, + adjustment_interval=self.config.difficulty_adjustment_interval, + target_block_time_seconds=self.config.target_block_time_seconds, + ) + + def add_block(self, block: Block) -> str: + """Add a block to chain storage and update canonical tip when appropriate.""" + block_hash = block.hash().hex() + if block_hash in self._blocks_by_hash: + return "duplicate" + + parent_hash = block.header.previous_hash + if parent_hash not in self._blocks_by_hash: + raise ChainValidationError(f"Unknown parent block: {parent_hash}") + + self._blocks_by_hash[block_hash] = block + self._heights[block_hash] = block.header.block_height + + try: + candidate_path, candidate_state = self._replay_state_for_tip(block_hash) + except ChainValidationError: + self._blocks_by_hash.pop(block_hash, None) + self._heights.pop(block_hash, None) + raise + + parent_is_tip = parent_hash == self._tip_hash + candidate_height = len(candidate_path) - 1 + canonical_height = self.height + + if parent_is_tip and candidate_height == canonical_height + 1: + self._canonical_hashes.append(block_hash) + self._tip_hash = block_hash + self.state = candidate_state + return "extended" + + if candidate_height > canonical_height: + self._canonical_hashes = candidate_path + self._tip_hash = block_hash + self.state = candidate_state + return "reorg" + + return "stored_fork" + + def _replay_state_for_tip(self, tip_hash: str) -> tuple[list[str], State]: + path_hashes = self._path_from_genesis(tip_hash) + replay_state = self._genesis_state.copy() + replayed_headers = [self._blocks_by_hash[path_hashes[0]].header] + + for index, block_hash in enumerate(path_hashes[1:], start=1): + block = self._blocks_by_hash[block_hash] + parent_hash = path_hashes[index - 1] + parent_header = replayed_headers[-1] + + self._validate_link( + parent_hash=parent_hash, + parent_height=parent_header.block_height, + block=block, + ) + self._validate_consensus(block=block, parent_headers=replayed_headers) + + try: + replay_state.apply_block(block, block_reward=self.config.block_reward) + except StateTransitionError as exc: + raise ChainValidationError(f"State transition failed: {exc}") from exc + + replayed_headers.append(block.header) + + return path_hashes, replay_state + + def _path_from_genesis(self, tip_hash: str) -> list[str]: + if tip_hash not in self._blocks_by_hash: + raise ChainValidationError(f"Unknown block hash: {tip_hash}") + + path: list[str] = [] + seen: set[str] = set() + cursor = tip_hash + while True: + if cursor in seen: + raise ChainValidationError("Cycle detected in block ancestry") + seen.add(cursor) + path.append(cursor) + + if cursor == self._genesis_hash: + break + + parent_hash = self._blocks_by_hash[cursor].header.previous_hash + if parent_hash not in self._blocks_by_hash: + raise ChainValidationError( + f"Missing ancestor for block {cursor}: {parent_hash}" + ) + cursor = parent_hash + + path.reverse() + if path[0] != self._genesis_hash: + raise ChainValidationError("Candidate chain does not start at genesis") + return path + + def _validate_consensus(self, *, block: Block, parent_headers: list[BlockHeader]) -> None: + if not block.has_valid_merkle_root(): + raise ChainValidationError("Block merkle_root does not match transaction body") + + expected_target = compute_next_difficulty_target( + parent_headers, + adjustment_interval=self.config.difficulty_adjustment_interval, + target_block_time_seconds=self.config.target_block_time_seconds, + ) + if block.header.difficulty_target != expected_target: + raise ChainValidationError( + "Invalid difficulty target: " + f"expected {expected_target}, got {block.header.difficulty_target}" + ) + if not is_valid_pow(block.header): + raise ChainValidationError("Block does not satisfy Proof-of-Work target") + + @staticmethod + def _validate_link(*, parent_hash: str, parent_height: int, block: Block) -> None: + if block.header.previous_hash != parent_hash: + raise ChainValidationError("Block previous_hash does not match parent hash") + expected_height = parent_height + 1 + if block.header.block_height != expected_height: + raise ChainValidationError( + f"Invalid block height: expected {expected_height}, got {block.header.block_height}" + ) + + @staticmethod + def _validate_genesis(genesis_block: Block) -> None: + if genesis_block.header.block_height != 0: + raise ValueError("Genesis block height must be 0") + if genesis_block.header.previous_hash != GENESIS_PREVIOUS_HASH: + raise ValueError("Genesis previous_hash must be all zeros") + if genesis_block.transactions: + raise ValueError("Genesis block must not include transactions") From 387adf3a0319365b9044ba241c1cd61a6d880feb Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 00:57:25 +0530 Subject: [PATCH 27/47] test: add chain extension and reorg coverage --- tests/test_chain.py | 196 +++++++++++++++++++++++++++++++++++++++++ tests/test_scaffold.py | 1 + 2 files changed, 197 insertions(+) create mode 100644 tests/test_chain.py diff --git a/tests/test_chain.py b/tests/test_chain.py new file mode 100644 index 0000000..0b50f5c --- /dev/null +++ b/tests/test_chain.py @@ -0,0 +1,196 @@ +"""Unit tests for chain management and fork resolution.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.transaction import create_coinbase_transaction + + +def _build_manager(*, block_reward: int = 50) -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=block_reward, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +def _coinbase_block( + manager: ChainManager, + *, + parent: Block, + miner_address: str, + timestamp: int, + coinbase_amount: int | None = None, + difficulty_target: int | None = None, +) -> Block: + reward_amount = manager.config.block_reward if coinbase_amount is None else coinbase_amount + target = ( + manager.expected_next_difficulty(parent_hash=parent.hash().hex()) + if difficulty_target is None + else difficulty_target + ) + coinbase = create_coinbase_transaction( + miner_address=miner_address, + amount=reward_amount, + timestamp=timestamp, + ) + header = BlockHeader( + version=0, + previous_hash=parent.hash().hex(), + merkle_root="", + timestamp=timestamp, + difficulty_target=target, + nonce=0, + block_height=parent.header.block_height + 1, + ) + block = Block(header=header, transactions=[coinbase]) + block.update_header_merkle_root() + return block + + +def test_appends_valid_blocks_to_tip() -> None: + manager = _build_manager(block_reward=50) + miner = "11" * 20 + + block_1 = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address=miner, + timestamp=1_739_000_030, + ) + result_1 = manager.add_block(block_1) + assert result_1 == "extended" + assert manager.height == 1 + + block_2 = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address=miner, + timestamp=1_739_000_060, + ) + result_2 = manager.add_block(block_2) + assert result_2 == "extended" + assert manager.height == 2 + assert manager.tip_hash == block_2.hash().hex() + assert manager.state.get_account(miner).balance == 100 + + +def test_longer_fork_triggers_reorg_and_state_replay() -> None: + manager = _build_manager(block_reward=50) + miner_a = "11" * 20 + miner_b = "22" * 20 + + a1 = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address=miner_a, + timestamp=1_739_000_030, + ) + manager.add_block(a1) + + a2 = _coinbase_block( + manager, + parent=a1, + miner_address=miner_a, + timestamp=1_739_000_060, + ) + assert manager.add_block(a2) == "extended" + assert manager.state.get_account(miner_a).balance == 100 + + b2 = _coinbase_block( + manager, + parent=a1, + miner_address=miner_b, + timestamp=1_739_000_061, + ) + assert manager.add_block(b2) == "stored_fork" + + b3 = _coinbase_block( + manager, + parent=b2, + miner_address=miner_b, + timestamp=1_739_000_090, + ) + assert manager.add_block(b3) == "reorg" + assert manager.tip_hash == b3.hash().hex() + assert manager.height == 3 + assert manager.state.get_account(miner_a).balance == 50 + assert manager.state.get_account(miner_b).balance == 100 + + +def test_rejects_block_with_unknown_parent() -> None: + manager = _build_manager(block_reward=50) + coinbase = create_coinbase_transaction( + miner_address="33" * 20, + amount=50, + timestamp=1_739_000_030, + ) + block = Block( + header=BlockHeader( + version=0, + previous_hash="ff" * 32, + merkle_root="", + timestamp=1_739_000_030, + difficulty_target=MAX_TARGET, + nonce=0, + block_height=1, + ), + transactions=[coinbase], + ) + block.update_header_merkle_root() + + with pytest.raises(ChainValidationError, match="Unknown parent block"): + manager.add_block(block) + + +def test_rejects_invalid_coinbase_amount() -> None: + manager = _build_manager(block_reward=50) + + invalid_block = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address="44" * 20, + timestamp=1_739_000_030, + coinbase_amount=60, + ) + invalid_hash = invalid_block.hash().hex() + with pytest.raises(ChainValidationError, match="State transition failed"): + manager.add_block(invalid_block) + + assert manager.height == 0 + assert not manager.contains_block(invalid_hash) + + +def test_rejects_block_with_wrong_difficulty_target() -> None: + manager = _build_manager(block_reward=50) + expected = manager.expected_next_difficulty(parent_hash=manager.tip_hash) + wrong_target = expected - 1 + + invalid_block = _coinbase_block( + manager, + parent=manager.tip_block, + miner_address="55" * 20, + timestamp=1_739_000_030, + difficulty_target=wrong_target, + ) + + with pytest.raises(ChainValidationError, match="Invalid difficulty target"): + manager.add_block(invalid_block) diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py index 5c3d553..36d6d7c 100644 --- a/tests/test_scaffold.py +++ b/tests/test_scaffold.py @@ -17,6 +17,7 @@ "serialization", "merkle", "genesis", + "chain", ] From 834cbb0d327e2941e00017b6cf657a071ba6fdc9 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 01:23:10 +0530 Subject: [PATCH 28/47] feat: add candidate block construction for mining --- src/minichain/mining.py | 90 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 90 insertions(+) create mode 100644 src/minichain/mining.py diff --git a/src/minichain/mining.py b/src/minichain/mining.py new file mode 100644 index 0000000..140e391 --- /dev/null +++ b/src/minichain/mining.py @@ -0,0 +1,90 @@ +"""Block construction utilities for miners.""" + +from __future__ import annotations + +import time +from dataclasses import replace +from threading import Event + +from minichain.block import Block, BlockHeader +from minichain.chain import ChainManager +from minichain.consensus import mine_block_header +from minichain.mempool import Mempool +from minichain.transaction import ADDRESS_HEX_LENGTH, create_coinbase_transaction + + +class BlockConstructionError(ValueError): + """Raised when a candidate block cannot be constructed.""" + + +def build_candidate_block( + *, + chain_manager: ChainManager, + mempool: Mempool, + miner_address: str, + max_transactions: int, + timestamp: int | None = None, +) -> Block: + """Build a candidate block template from chain tip and mempool.""" + if max_transactions < 0: + raise BlockConstructionError("max_transactions must be non-negative") + if not _is_lower_hex(miner_address, ADDRESS_HEX_LENGTH): + raise BlockConstructionError("miner_address must be a 20-byte lowercase hex string") + + parent = chain_manager.tip_block + parent_hash = chain_manager.tip_hash + base_timestamp = int(time.time()) if timestamp is None else timestamp + if base_timestamp < 0: + raise BlockConstructionError("timestamp must be non-negative") + block_timestamp = max(base_timestamp, parent.header.timestamp + 1) + + selected_transactions = mempool.get_transactions_for_mining( + chain_manager.state, + limit=max_transactions, + current_time=block_timestamp, + ) + total_fees = sum(transaction.fee for transaction in selected_transactions) + coinbase_amount = chain_manager.config.block_reward + total_fees + coinbase = create_coinbase_transaction( + miner_address=miner_address, + amount=coinbase_amount, + timestamp=block_timestamp, + ) + + header = BlockHeader( + version=parent.header.version, + previous_hash=parent_hash, + merkle_root="", + timestamp=block_timestamp, + difficulty_target=chain_manager.expected_next_difficulty(parent_hash=parent_hash), + nonce=0, + block_height=parent.header.block_height + 1, + ) + candidate = Block(header=header, transactions=[coinbase, *selected_transactions]) + candidate.update_header_merkle_root() + return candidate + + +def mine_candidate_block( + *, + block_template: Block, + start_nonce: int = 0, + max_nonce: int = (1 << 64) - 1, + stop_event: Event | None = None, +) -> tuple[Block, bytes]: + """Search for a valid nonce and return a mined copy of the block.""" + nonce, digest = mine_block_header( + block_template.header, + start_nonce=start_nonce, + max_nonce=max_nonce, + stop_event=stop_event, + ) + mined_header = replace(block_template.header, nonce=nonce) + mined_block = Block(header=mined_header, transactions=list(block_template.transactions)) + return mined_block, digest + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) From 9a153acc01f7233ce933dbfc9e97c34658a10b47 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 01:23:14 +0530 Subject: [PATCH 29/47] test: cover block template assembly and mining flow --- tests/test_mining.py | 223 +++++++++++++++++++++++++++++++++++++++++ tests/test_scaffold.py | 1 + 2 files changed, 224 insertions(+) create mode 100644 tests/test_mining.py diff --git a/tests/test_mining.py b/tests/test_mining.py new file mode 100644 index 0000000..dde750b --- /dev/null +++ b/tests/test_mining.py @@ -0,0 +1,223 @@ +"""Unit tests for candidate block construction and mining flow.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig, ChainManager +from minichain.consensus import MAX_TARGET, is_valid_pow +from minichain.crypto import derive_address, generate_key_pair +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.transaction import Transaction + + +def _build_manager( + *, + initial_balances: dict[str, int], + genesis_target: int = MAX_TARGET, +) -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances=initial_balances, + timestamp=1_739_000_000, + difficulty_target=genesis_target, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + tx.sign(sender_key) + return tx + + +def test_candidate_block_selects_by_fee_with_sender_nonce_ordering() -> None: + a_key, a_verify = generate_key_pair() + b_key, b_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender_a = derive_address(a_verify) + sender_b = derive_address(b_verify) + recipient = derive_address(recipient_verify) + + manager = _build_manager(initial_balances={sender_a: 100, sender_b: 100}) + mempool = Mempool() + + tx_a1 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=10, + nonce=1, + fee=10, + timestamp=1_739_000_010, + ) + tx_b0 = _signed_transaction( + sender_key=b_key, + sender_address=sender_b, + recipient=recipient, + amount=10, + nonce=0, + fee=8, + timestamp=1_739_000_011, + ) + tx_a0 = _signed_transaction( + sender_key=a_key, + sender_address=sender_a, + recipient=recipient, + amount=10, + nonce=0, + fee=1, + timestamp=1_739_000_012, + ) + + mempool.add_transaction(tx_a1, manager.state) + mempool.add_transaction(tx_b0, manager.state) + mempool.add_transaction(tx_a0, manager.state) + + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="11" * 20, + max_transactions=3, + timestamp=1_739_000_030, + ) + + assert candidate.header.previous_hash == manager.tip_hash + assert candidate.header.block_height == manager.height + 1 + assert candidate.header.difficulty_target == manager.expected_next_difficulty() + assert candidate.transactions[0].is_coinbase() + assert [tx.fee for tx in candidate.transactions[1:]] == [8, 1, 10] + assert [tx.nonce for tx in candidate.transactions[1:] if tx.sender == sender_a] == [0, 1] + + total_fees = sum(tx.fee for tx in candidate.transactions[1:]) + assert candidate.transactions[0].amount == manager.config.block_reward + total_fees + assert candidate.has_valid_merkle_root() + + +def test_candidate_block_respects_max_transaction_limit() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + manager = _build_manager(initial_balances={sender: 200}) + mempool = Mempool() + + tx0 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=0, + fee=1, + timestamp=1_739_000_010, + ) + tx1 = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=5, + nonce=1, + fee=2, + timestamp=1_739_000_011, + ) + mempool.add_transaction(tx0, manager.state) + mempool.add_transaction(tx1, manager.state) + + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="22" * 20, + max_transactions=1, + timestamp=1_739_000_030, + ) + + assert len(candidate.transactions) == 2 + assert candidate.transactions[1].nonce == 0 + assert candidate.transactions[0].amount == manager.config.block_reward + tx0.fee + + +def test_mined_candidate_block_is_accepted_by_chain_manager() -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + manager = _build_manager(initial_balances={sender: 100}, genesis_target=1 << 252) + mempool = Mempool() + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=10, + nonce=0, + fee=2, + timestamp=1_739_000_010, + ) + mempool.add_transaction(tx, manager.state) + + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="33" * 20, + max_transactions=10, + timestamp=1_739_000_030, + ) + mined_block, _digest = mine_candidate_block( + block_template=candidate, + max_nonce=500_000, + ) + + assert is_valid_pow(mined_block.header) + result = manager.add_block(mined_block) + assert result == "extended" + assert manager.height == 1 + assert manager.state.get_account("33" * 20).balance == manager.config.block_reward + tx.fee + + +def test_candidate_block_timestamp_is_monotonic() -> None: + manager = _build_manager(initial_balances={}) + mempool = Mempool() + candidate = build_candidate_block( + chain_manager=manager, + mempool=mempool, + miner_address="44" * 20, + max_transactions=0, + timestamp=manager.tip_block.header.timestamp - 10, + ) + + assert candidate.header.timestamp == manager.tip_block.header.timestamp + 1 diff --git a/tests/test_scaffold.py b/tests/test_scaffold.py index 36d6d7c..cc0eb7c 100644 --- a/tests/test_scaffold.py +++ b/tests/test_scaffold.py @@ -18,6 +18,7 @@ "merkle", "genesis", "chain", + "mining", ] From 420a7476ff68f6c4f49db0f7dbd18d6e4c56f92c Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 02:12:40 +0530 Subject: [PATCH 30/47] feat: implement sqlite persistence for blocks state and metadata --- src/minichain/storage.py | 281 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 280 insertions(+), 1 deletion(-) diff --git a/src/minichain/storage.py b/src/minichain/storage.py index 0b8f8ee..ffd1aab 100644 --- a/src/minichain/storage.py +++ b/src/minichain/storage.py @@ -1 +1,280 @@ -"""Persistent storage integration (to be implemented).""" +"""Persistent storage integration using SQLite.""" + +from __future__ import annotations + +import json +import sqlite3 +from dataclasses import asdict +from pathlib import Path + +from minichain.block import Block, BlockHeader +from minichain.state import Account, State +from minichain.transaction import Transaction + + +class StorageError(ValueError): + """Raised when persistence operations fail validation or constraints.""" + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +class SQLiteStorage: + """SQLite-backed block/state persistence.""" + + def __init__(self, db_path: str | Path) -> None: + self.db_path = str(db_path) + self._connection = sqlite3.connect(self.db_path) + self._connection.execute("PRAGMA foreign_keys = ON") + self._initialize_schema() + + def close(self) -> None: + self._connection.close() + + def __enter__(self) -> SQLiteStorage: + return self + + def __exit__(self, _exc_type: object, _exc: object, _tb: object) -> None: + self.close() + + def _initialize_schema(self) -> None: + self._connection.executescript( + """ + CREATE TABLE IF NOT EXISTS blocks ( + hash TEXT PRIMARY KEY, + height INTEGER NOT NULL UNIQUE, + version INTEGER NOT NULL, + previous_hash TEXT NOT NULL, + merkle_root TEXT NOT NULL, + timestamp INTEGER NOT NULL, + difficulty_target TEXT NOT NULL, + nonce INTEGER NOT NULL, + transactions_json TEXT NOT NULL + ); + + CREATE TABLE IF NOT EXISTS accounts ( + address TEXT PRIMARY KEY, + balance INTEGER NOT NULL, + nonce INTEGER NOT NULL + ); + + CREATE TABLE IF NOT EXISTS chain_metadata ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL + ); + """ + ) + self._connection.commit() + + def store_block(self, block: Block, *, connection: sqlite3.Connection | None = None) -> None: + """Persist a block by hash and height.""" + if connection is None: + with self._connection: + self.store_block(block, connection=self._connection) + return + + if not block.has_valid_merkle_root(): + raise StorageError("Block merkle_root does not match transactions") + + block_hash = block.hash().hex() + transactions_json = json.dumps( + [asdict(transaction) for transaction in block.transactions], + sort_keys=True, + separators=(",", ":"), + ) + conn = connection + + try: + conn.execute( + """ + INSERT INTO blocks ( + hash, height, version, previous_hash, merkle_root, + timestamp, difficulty_target, nonce, transactions_json + ) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) + """, + ( + block_hash, + block.header.block_height, + block.header.version, + block.header.previous_hash, + block.header.merkle_root, + block.header.timestamp, + str(block.header.difficulty_target), + block.header.nonce, + transactions_json, + ), + ) + except sqlite3.IntegrityError as exc: + raise StorageError( + f"Block already exists or violates constraints: {block_hash}" + ) from exc + + def get_block_by_hash(self, block_hash: str) -> Block | None: + """Load a block by hash.""" + row = self._connection.execute( + """ + SELECT + height, version, previous_hash, merkle_root, timestamp, + difficulty_target, nonce, transactions_json + FROM blocks + WHERE hash = ? + """, + (block_hash,), + ).fetchone() + if row is None: + return None + return self._row_to_block(row) + + def get_block_by_height(self, height: int) -> Block | None: + """Load a block by canonical height.""" + row = self._connection.execute( + """ + SELECT + height, version, previous_hash, merkle_root, timestamp, + difficulty_target, nonce, transactions_json + FROM blocks + WHERE height = ? + """, + (height,), + ).fetchone() + if row is None: + return None + return self._row_to_block(row) + + def save_state(self, state: State, *, connection: sqlite3.Connection | None = None) -> None: + """Persist all accounts as the current canonical state snapshot.""" + if connection is None: + with self._connection: + self.save_state(state, connection=self._connection) + return + + conn = connection + + for address, account in state.accounts.items(): + if not _is_lower_hex(address, 40): + raise StorageError(f"Invalid account address: {address}") + if account.balance < 0 or account.nonce < 0: + raise StorageError(f"Invalid account values for {address}") + + conn.execute("DELETE FROM accounts") + rows = [ + (address, account.balance, account.nonce) + for address, account in sorted(state.accounts.items()) + ] + conn.executemany( + "INSERT INTO accounts (address, balance, nonce) VALUES (?, ?, ?)", + rows, + ) + + def load_state(self) -> State: + """Load the latest persisted account snapshot.""" + state = State() + rows = self._connection.execute( + "SELECT address, balance, nonce FROM accounts ORDER BY address" + ).fetchall() + for address, balance, nonce in rows: + state.set_account(address, Account(balance=balance, nonce=nonce)) + return state + + def save_chain_metadata( + self, + *, + height: int, + head_hash: str, + connection: sqlite3.Connection | None = None, + ) -> None: + """Persist canonical chain metadata.""" + if connection is None: + with self._connection: + self.save_chain_metadata( + height=height, + head_hash=head_hash, + connection=self._connection, + ) + return + + if height < 0: + raise StorageError("height must be non-negative") + if not _is_lower_hex(head_hash, 64): + raise StorageError("head_hash must be a 32-byte lowercase hex string") + + conn = connection + conn.execute( + """ + INSERT INTO chain_metadata (key, value) VALUES ('height', ?) + ON CONFLICT(key) DO UPDATE SET value = excluded.value + """, + (str(height),), + ) + conn.execute( + """ + INSERT INTO chain_metadata (key, value) VALUES ('head_hash', ?) + ON CONFLICT(key) DO UPDATE SET value = excluded.value + """, + (head_hash,), + ) + + def load_chain_metadata(self) -> dict[str, int | str] | None: + """Load canonical chain metadata (height and head hash).""" + rows = self._connection.execute( + "SELECT key, value FROM chain_metadata WHERE key IN ('height', 'head_hash')" + ).fetchall() + if not rows: + return None + kv = {key: value for key, value in rows} + if "height" not in kv or "head_hash" not in kv: + raise StorageError("Incomplete chain metadata in storage") + return {"height": int(kv["height"]), "head_hash": kv["head_hash"]} + + def persist_block_state_and_metadata( + self, + *, + block: Block, + state: State, + height: int | None = None, + head_hash: str | None = None, + ) -> None: + """Atomically persist block, state snapshot, and metadata.""" + resolved_height = block.header.block_height if height is None else height + resolved_head_hash = block.hash().hex() if head_hash is None else head_hash + + with self._connection: + self.store_block(block, connection=self._connection) + self.save_state(state, connection=self._connection) + self.save_chain_metadata( + height=resolved_height, + head_hash=resolved_head_hash, + connection=self._connection, + ) + + @staticmethod + def _row_to_block(row: sqlite3.Row | tuple[object, ...]) -> Block: + ( + height, + version, + previous_hash, + merkle_root, + timestamp, + difficulty_target, + nonce, + transactions_json, + ) = row + header = BlockHeader( + version=int(version), + previous_hash=str(previous_hash), + merkle_root=str(merkle_root), + timestamp=int(timestamp), + difficulty_target=int(difficulty_target), + nonce=int(nonce), + block_height=int(height), + ) + transaction_dicts = json.loads(str(transactions_json)) + transactions = [Transaction(**tx) for tx in transaction_dicts] + block = Block(header=header, transactions=transactions) + if not block.has_valid_merkle_root(): + raise StorageError("Corrupt block data: merkle_root mismatch") + return block From 6bdb5cd44406f46823278387937c472475c092ba Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 02:12:43 +0530 Subject: [PATCH 31/47] test: add sqlite roundtrip restart and atomicity coverage --- tests/test_storage.py | 180 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 180 insertions(+) create mode 100644 tests/test_storage.py diff --git a/tests/test_storage.py b/tests/test_storage.py new file mode 100644 index 0000000..22df41b --- /dev/null +++ b/tests/test_storage.py @@ -0,0 +1,180 @@ +"""Unit tests for SQLite persistence and transactional storage behavior.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block, BlockHeader +from minichain.crypto import derive_address, generate_key_pair +from minichain.state import Account, State +from minichain.storage import SQLiteStorage, StorageError +from minichain.transaction import Transaction, create_coinbase_transaction + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + tx.sign(sender_key) + return tx + + +def _block_with_transactions( + *, + previous_hash: str, + height: int, + timestamp: int, + transactions: list[Transaction], +) -> Block: + header = BlockHeader( + version=0, + previous_hash=previous_hash, + merkle_root="", + timestamp=timestamp, + difficulty_target=(1 << 255), + nonce=0, + block_height=height, + ) + block = Block(header=header, transactions=transactions) + block.update_header_merkle_root() + return block + + +def test_store_and_load_block_round_trip(tmp_path: pytest.TempPathFactory) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + miner_key, miner_verify = generate_key_pair() + _ = recipient_key + _ = miner_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + miner = derive_address(miner_verify) + + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=25, + nonce=0, + fee=2, + timestamp=1_739_990_001, + ) + coinbase = create_coinbase_transaction( + miner_address=miner, + amount=52, + timestamp=1_739_990_001, + ) + block = _block_with_transactions( + previous_hash="00" * 32, + height=1, + timestamp=1_739_990_001, + transactions=[coinbase, tx], + ) + + db_path = tmp_path / "chain.db" + with SQLiteStorage(db_path) as storage: + storage.store_block(block) + loaded_by_hash = storage.get_block_by_hash(block.hash().hex()) + loaded_by_height = storage.get_block_by_height(block.header.block_height) + + assert loaded_by_hash is not None + assert loaded_by_hash.hash() == block.hash() + assert loaded_by_hash.header.previous_hash == block.header.previous_hash + assert len(loaded_by_hash.transactions) == 2 + assert loaded_by_hash.transactions[0].is_coinbase() + assert loaded_by_hash.transactions[1].signature == tx.signature + assert loaded_by_height is not None + assert loaded_by_height.hash() == block.hash() + + +def test_state_and_metadata_persist_across_restart( + tmp_path: pytest.TempPathFactory, +) -> None: + db_path = tmp_path / "chain.db" + state = State() + state.set_account("11" * 20, Account(balance=100, nonce=2)) + state.set_account("22" * 20, Account(balance=50, nonce=0)) + head_hash = "ab" * 32 + + storage = SQLiteStorage(db_path) + storage.save_state(state) + storage.save_chain_metadata(height=7, head_hash=head_hash) + storage.close() + + reopened = SQLiteStorage(db_path) + loaded_state = reopened.load_state() + metadata = reopened.load_chain_metadata() + reopened.close() + + assert loaded_state.get_account("11" * 20).balance == 100 + assert loaded_state.get_account("11" * 20).nonce == 2 + assert loaded_state.get_account("22" * 20).balance == 50 + assert metadata == {"height": 7, "head_hash": head_hash} + + +def test_atomic_persist_rolls_back_on_metadata_failure( + tmp_path: pytest.TempPathFactory, +) -> None: + db_path = tmp_path / "chain.db" + with SQLiteStorage(db_path) as storage: + base_state = State() + base_state.set_account("aa" * 20, Account(balance=10, nonce=0)) + block_1 = _block_with_transactions( + previous_hash="00" * 32, + height=1, + timestamp=1_739_990_100, + transactions=[ + create_coinbase_transaction( + miner_address="bb" * 20, + amount=50, + timestamp=1_739_990_100, + ) + ], + ) + storage.persist_block_state_and_metadata(block=block_1, state=base_state) + + failing_state = State() + failing_state.set_account("cc" * 20, Account(balance=999, nonce=5)) + block_2 = _block_with_transactions( + previous_hash=block_1.hash().hex(), + height=2, + timestamp=1_739_990_130, + transactions=[ + create_coinbase_transaction( + miner_address="dd" * 20, + amount=50, + timestamp=1_739_990_130, + ) + ], + ) + + with pytest.raises(StorageError, match="head_hash"): + storage.persist_block_state_and_metadata( + block=block_2, + state=failing_state, + head_hash="invalid-hash", + ) + + assert storage.get_block_by_hash(block_2.hash().hex()) is None + loaded_state = storage.load_state() + metadata = storage.load_chain_metadata() + assert loaded_state.get_account("aa" * 20).balance == 10 + assert "cc" * 20 not in loaded_state.accounts + assert metadata == {"height": 1, "head_hash": block_1.hash().hex()} From d02843cffb64473e41d30dc5ebb74aae6cb1edb9 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 03:09:19 +0530 Subject: [PATCH 32/47] feat: implement node lifecycle orchestration and persistence --- src/minichain/__main__.py | 26 +++- src/minichain/node.py | 267 +++++++++++++++++++++++++++++++++++++- 2 files changed, 289 insertions(+), 4 deletions(-) diff --git a/src/minichain/__main__.py b/src/minichain/__main__.py index 614289f..9d53b7d 100644 --- a/src/minichain/__main__.py +++ b/src/minichain/__main__.py @@ -3,20 +3,42 @@ from __future__ import annotations import argparse +from pathlib import Path -from minichain.node import start_node +from minichain.node import MiniChainNode, NodeConfig def build_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(description="Run a MiniChain node.") parser.add_argument("--host", default="127.0.0.1", help="Host interface for the node") parser.add_argument("--port", default=7000, type=int, help="Port for the node") + parser.add_argument( + "--data-dir", + default=".minichain", + help="Directory for node data (sqlite db, chain state)", + ) + parser.add_argument( + "--miner-address", + default=None, + help="Optional 20-byte lowercase hex address used for mining rewards", + ) return parser def main() -> None: args = build_parser().parse_args() - start_node(host=args.host, port=args.port) + node = MiniChainNode( + NodeConfig( + data_dir=Path(args.data_dir), + miner_address=args.miner_address, + ) + ) + node.start() + try: + print(f"MiniChain node started on {args.host}:{args.port}") + print(f"chain_height={node.height} tip={node.tip_hash}") + finally: + node.stop() if __name__ == "__main__": diff --git a/src/minichain/node.py b/src/minichain/node.py index 8922753..fceb35b 100644 --- a/src/minichain/node.py +++ b/src/minichain/node.py @@ -2,7 +2,270 @@ from __future__ import annotations +from dataclasses import dataclass, field +from pathlib import Path + +from minichain.block import Block +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool, MempoolValidationError +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.state import State +from minichain.storage import SQLiteStorage, StorageError +from minichain.transaction import ADDRESS_HEX_LENGTH, Transaction + + +class NodeError(ValueError): + """Raised when node lifecycle or orchestration operations fail.""" + + +@dataclass(frozen=True) +class NodeConfig: + """Runtime configuration for a MiniChain node.""" + + data_dir: Path | str + database_filename: str = "chain.sqlite3" + miner_address: str | None = None + max_block_transactions: int = 1_000 + mempool_max_size: int = 1_000 + mempool_max_age_seconds: int = 3_600 + genesis_config: GenesisConfig = field(default_factory=GenesisConfig) + chain_config: ChainConfig = field(default_factory=ChainConfig) + + def validate(self) -> None: + if self.max_block_transactions < 0: + raise NodeError("max_block_transactions must be non-negative") + if self.mempool_max_size <= 0: + raise NodeError("mempool_max_size must be positive") + if self.mempool_max_age_seconds <= 0: + raise NodeError("mempool_max_age_seconds must be positive") + if self.miner_address is not None and not _is_lower_hex( + self.miner_address, ADDRESS_HEX_LENGTH + ): + raise NodeError("miner_address must be a 20-byte lowercase hex string") + self.genesis_config.validate() + self.chain_config.validate() + + +class MiniChainNode: + """Top-level node that coordinates chain, mempool, mining, and storage.""" + + def __init__(self, config: NodeConfig) -> None: + self.config = config + self.config.validate() + + self._storage: SQLiteStorage | None = None + self._chain_manager: ChainManager | None = None + self._mempool: Mempool | None = None + self._running = False + + @property + def running(self) -> bool: + return self._running + + @property + def chain_manager(self) -> ChainManager: + if self._chain_manager is None: + raise NodeError("Node is not started") + return self._chain_manager + + @property + def mempool(self) -> Mempool: + if self._mempool is None: + raise NodeError("Node is not started") + return self._mempool + + @property + def storage(self) -> SQLiteStorage: + if self._storage is None: + raise NodeError("Node is not started") + return self._storage + + @property + def height(self) -> int: + return self.chain_manager.height + + @property + def tip_hash(self) -> str: + return self.chain_manager.tip_hash + + def start(self) -> None: + """Start node components and load or initialize persistent chain state.""" + if self._running: + return + + data_dir = Path(self.config.data_dir) + data_dir.mkdir(parents=True, exist_ok=True) + db_path = data_dir / self.config.database_filename + + storage = SQLiteStorage(db_path) + chain_manager = self._initialize_chain_manager(storage) + mempool = Mempool( + max_size=self.config.mempool_max_size, + max_age_seconds=self.config.mempool_max_age_seconds, + ) + + self._storage = storage + self._chain_manager = chain_manager + self._mempool = mempool + self._running = True + + def stop(self) -> None: + """Stop node components and close persistent resources.""" + if not self._running: + return + try: + if self._storage is not None: + self._storage.close() + finally: + self._storage = None + self._chain_manager = None + self._mempool = None + self._running = False + + def submit_transaction(self, transaction: Transaction) -> str: + """Validate and enqueue a transaction into the mempool.""" + self._require_started() + try: + return self.mempool.add_transaction(transaction, self.chain_manager.state) + except MempoolValidationError as exc: + raise NodeError(f"Transaction rejected by mempool: {exc}") from exc + + def accept_block(self, block: Block) -> str: + """Validate and apply a block; persist state on canonical updates.""" + self._require_started() + try: + result = self.chain_manager.add_block(block) + except ChainValidationError as exc: + raise NodeError(f"Block rejected: {exc}") from exc + + if result in {"extended", "reorg"}: + self.mempool.remove_confirmed_transactions(block.transactions, self.chain_manager.state) + self._persist_head() + + return result + + def mine_one_block( + self, + *, + timestamp: int | None = None, + max_nonce: int = (1 << 64) - 1, + max_transactions: int | None = None, + ) -> Block: + """Build, mine, and apply one block on top of the canonical tip.""" + self._require_started() + miner_address = self.config.miner_address + if miner_address is None: + raise NodeError("miner_address must be configured to mine blocks") + + limit = ( + self.config.max_block_transactions + if max_transactions is None + else max_transactions + ) + candidate = build_candidate_block( + chain_manager=self.chain_manager, + mempool=self.mempool, + miner_address=miner_address, + max_transactions=limit, + timestamp=timestamp, + ) + mined_block, _digest = mine_candidate_block(block_template=candidate, max_nonce=max_nonce) + result = self.accept_block(mined_block) + if result not in {"extended", "reorg"}: + raise NodeError(f"Mined block was not canonicalized: {result}") + return mined_block + + def _persist_head(self) -> None: + try: + self.storage.persist_block_state_and_metadata( + block=self.chain_manager.tip_block, + state=self.chain_manager.state, + height=self.chain_manager.height, + head_hash=self.chain_manager.tip_hash, + ) + except StorageError as exc: + raise NodeError(f"Failed to persist canonical head: {exc}") from exc + + def _initialize_chain_manager(self, storage: SQLiteStorage) -> ChainManager: + metadata = storage.load_chain_metadata() + genesis_block, genesis_state = create_genesis_state(self.config.genesis_config) + manager = ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=self.config.chain_config, + ) + + if metadata is None: + storage.persist_block_state_and_metadata( + block=manager.tip_block, + state=manager.state, + height=0, + head_hash=manager.tip_hash, + ) + return manager + + stored_genesis = storage.get_block_by_height(0) + if stored_genesis is None: + raise NodeError("Storage metadata exists but genesis block is missing") + if stored_genesis.hash().hex() != manager.tip_hash: + raise NodeError("Stored genesis does not match configured genesis") + + target_height = int(metadata["height"]) + for height in range(1, target_height + 1): + block = storage.get_block_by_height(height) + if block is None: + raise NodeError(f"Missing persisted block at height {height}") + result = manager.add_block(block) + if result not in {"extended", "reorg"}: + raise NodeError( + f"Unexpected replay result at height {height}: {result}" + ) + + expected_head_hash = str(metadata["head_hash"]) + if manager.tip_hash != expected_head_hash: + raise NodeError( + "Persisted head hash mismatch: " + f"expected {expected_head_hash}, got {manager.tip_hash}" + ) + + persisted_state = storage.load_state() + if not _states_equal(persisted_state, manager.state): + raise NodeError("Persisted state does not match replayed canonical state") + + return manager + + def _require_started(self) -> None: + if not self._running: + raise NodeError("Node is not started") + def start_node(host: str, port: int) -> None: - """Start a MiniChain node (placeholder for Issue #20 integration).""" - print(f"MiniChain node scaffold started on {host}:{port}") + """Start a MiniChain node with local defaults and print its status.""" + data_dir = Path(".minichain") + default_config = NodeConfig(data_dir=data_dir) + node = MiniChainNode(default_config) + node.start() + try: + print(f"MiniChain node started on {host}:{port}") + print(f"chain_height={node.height} tip={node.tip_hash}") + finally: + node.stop() + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + +def _states_equal(left: State, right: State) -> bool: + left_accounts = { + address: (account.balance, account.nonce) + for address, account in left.accounts.items() + } + right_accounts = { + address: (account.balance, account.nonce) + for address, account in right.accounts.items() + } + return left_accounts == right_accounts From d73589d3d65375e3272d0f32fca4d834b4683d28 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 03:09:25 +0530 Subject: [PATCH 33/47] test: add node startup mining and restart coverage --- tests/test_node.py | 164 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 164 insertions(+) create mode 100644 tests/test_node.py diff --git a/tests/test_node.py b/tests/test_node.py new file mode 100644 index 0000000..b46998f --- /dev/null +++ b/tests/test_node.py @@ -0,0 +1,164 @@ +"""Integration-style tests for node orchestration and persistence.""" + +from __future__ import annotations + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig +from minichain.crypto import derive_address, generate_key_pair +from minichain.genesis import GenesisConfig +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.node import MiniChainNode, NodeConfig +from minichain.storage import SQLiteStorage +from minichain.transaction import Transaction + + +def _signed_transaction( + *, + sender_key: object, + sender_address: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + tx = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + tx.sign(sender_key) + return tx + + +def test_node_start_initializes_and_persists_genesis( + tmp_path: pytest.TempPathFactory, +) -> None: + config = NodeConfig( + data_dir=tmp_path / "node-data", + genesis_config=GenesisConfig( + initial_balances={"aa" * 20: 123}, + timestamp=1_739_000_000, + difficulty_target=(1 << 255) - 1, + ), + chain_config=ChainConfig(block_reward=50), + ) + node = MiniChainNode(config) + node.start() + try: + assert node.running + assert node.height == 0 + assert node.chain_manager.state.get_account("aa" * 20).balance == 123 + metadata = node.storage.load_chain_metadata() + assert metadata is not None + assert metadata["height"] == 0 + assert metadata["head_hash"] == node.tip_hash + finally: + node.stop() + + with SQLiteStorage((tmp_path / "node-data") / "chain.sqlite3") as storage: + assert storage.get_block_by_height(0) is not None + persisted_meta = storage.load_chain_metadata() + assert persisted_meta is not None + assert persisted_meta["height"] == 0 + + +def test_node_mine_block_then_reload_from_disk( + tmp_path: pytest.TempPathFactory, +) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + miner = "11" * 20 + + config = NodeConfig( + data_dir=tmp_path / "node-data", + miner_address=miner, + genesis_config=GenesisConfig( + initial_balances={sender: 200}, + timestamp=1_739_000_000, + difficulty_target=(1 << 255) - 1, + ), + chain_config=ChainConfig(block_reward=50), + ) + + node = MiniChainNode(config) + node.start() + try: + tx = _signed_transaction( + sender_key=sender_key, + sender_address=sender, + recipient=recipient, + amount=25, + nonce=0, + fee=3, + timestamp=1_739_000_010, + ) + node.submit_transaction(tx) + node.mine_one_block(max_nonce=500_000, timestamp=1_739_000_030) + + assert node.height == 1 + assert node.chain_manager.state.get_account(sender).balance == 172 + assert node.chain_manager.state.get_account(recipient).balance == 25 + assert node.chain_manager.state.get_account(miner).balance == 53 + finally: + node.stop() + + restarted = MiniChainNode(config) + restarted.start() + try: + assert restarted.height == 1 + assert restarted.chain_manager.state.get_account(sender).balance == 172 + assert restarted.chain_manager.state.get_account(recipient).balance == 25 + assert restarted.chain_manager.state.get_account(miner).balance == 53 + finally: + restarted.stop() + + +def test_accept_block_persists_chain_head( + tmp_path: pytest.TempPathFactory, +) -> None: + miner = "22" * 20 + config = NodeConfig( + data_dir=tmp_path / "node-data", + miner_address=miner, + genesis_config=GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=(1 << 255) - 1, + ), + chain_config=ChainConfig(block_reward=50), + ) + node = MiniChainNode(config) + node.start() + try: + candidate = build_candidate_block( + chain_manager=node.chain_manager, + mempool=node.mempool, + miner_address=miner, + max_transactions=0, + timestamp=1_739_000_030, + ) + mined_block, _digest = mine_candidate_block(block_template=candidate, max_nonce=500_000) + result = node.accept_block(mined_block) + assert result == "extended" + assert node.height == 1 + finally: + node.stop() + + reopened = MiniChainNode(config) + reopened.start() + try: + assert reopened.height == 1 + assert reopened.chain_manager.state.get_account(miner).balance == 50 + finally: + reopened.stop() From 9762f65e98a3eb842185f3cede94438b8b1d28c7 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 03:46:57 +0530 Subject: [PATCH 34/47] feat: implement multi-command cli interface --- src/minichain/__main__.py | 235 +++++++++++++++++++++++++++++++++++++- 1 file changed, 229 insertions(+), 6 deletions(-) diff --git a/src/minichain/__main__.py b/src/minichain/__main__.py index 9d53b7d..492fb1d 100644 --- a/src/minichain/__main__.py +++ b/src/minichain/__main__.py @@ -3,13 +3,24 @@ from __future__ import annotations import argparse +import json +import time +from dataclasses import asdict from pathlib import Path +from minichain.crypto import ( + derive_address, + deserialize_signing_key, + generate_key_pair, + serialize_signing_key, + serialize_verify_key, +) from minichain.node import MiniChainNode, NodeConfig +from minichain.transaction import ADDRESS_HEX_LENGTH, Transaction def build_parser() -> argparse.ArgumentParser: - parser = argparse.ArgumentParser(description="Run a MiniChain node.") + parser = argparse.ArgumentParser(description="MiniChain CLI.") parser.add_argument("--host", default="127.0.0.1", help="Host interface for the node") parser.add_argument("--port", default=7000, type=int, help="Port for the node") parser.add_argument( @@ -22,24 +33,236 @@ def build_parser() -> argparse.ArgumentParser: default=None, help="Optional 20-byte lowercase hex address used for mining rewards", ) + subparsers = parser.add_subparsers(dest="command") + + subparsers.add_parser("start", help="Start node and print current status") + subparsers.add_parser("generate-key", help="Generate a new keypair and address") + + balance = subparsers.add_parser("balance", help="Query account balance and nonce") + balance.add_argument("--address", required=True, help="20-byte lowercase hex address") + + submit_tx = subparsers.add_parser("submit-tx", help="Submit a signed transfer transaction") + submit_tx.add_argument("--private-key", required=True, help="hex-encoded Ed25519 signing key") + submit_tx.add_argument("--recipient", required=True, help="20-byte lowercase hex address") + submit_tx.add_argument("--amount", required=True, type=int, help="transfer amount") + submit_tx.add_argument("--fee", default=1, type=int, help="transaction fee") + submit_tx.add_argument("--nonce", default=None, type=int, help="optional sender nonce") + submit_tx.add_argument( + "--mine-now", + action="store_true", + help="mine one block immediately after submission (default behavior)", + ) + submit_tx.add_argument( + "--no-mine-now", + action="store_false", + dest="mine_now", + help="do not mine immediately after submission", + ) + submit_tx.set_defaults(mine_now=True) + + block = subparsers.add_parser("block", help="Query a block by height or hash") + block_group = block.add_mutually_exclusive_group(required=True) + block_group.add_argument("--height", type=int, help="block height") + block_group.add_argument("--hash", dest="block_hash", help="block hash (hex)") + + mine = subparsers.add_parser("mine", help="Mine one or more blocks") + mine.add_argument("--count", default=1, type=int, help="number of blocks to mine") + mine.add_argument( + "--max-transactions", + default=None, + type=int, + help="max non-coinbase tx per block", + ) + + subparsers.add_parser("chain-info", help="Query chain height and canonical tip hash") return parser -def main() -> None: - args = build_parser().parse_args() +def main(argv: list[str] | None = None) -> None: + args = build_parser().parse_args(argv) + command = args.command or "start" + + if command == "generate-key": + _run_generate_key() + return + + miner_address = args.miner_address + if command == "submit-tx" and args.mine_now and miner_address is None: + inferred = _infer_sender_from_private_key(args.private_key) + miner_address = inferred + node = MiniChainNode( NodeConfig( data_dir=Path(args.data_dir), - miner_address=args.miner_address, + miner_address=miner_address, ) ) node.start() try: - print(f"MiniChain node started on {args.host}:{args.port}") - print(f"chain_height={node.height} tip={node.tip_hash}") + if command == "start": + print(f"MiniChain node started on {args.host}:{args.port}") + print(f"chain_height={node.height} tip={node.tip_hash}") + return + + if command == "balance": + _run_balance(node=node, address=args.address) + return + + if command == "chain-info": + _run_chain_info(node=node) + return + + if command == "block": + _run_block_query(node=node, height=args.height, block_hash=args.block_hash) + return + + if command == "submit-tx": + _run_submit_transaction( + node=node, + private_key_hex=args.private_key, + recipient=args.recipient, + amount=args.amount, + fee=args.fee, + nonce=args.nonce, + mine_now=args.mine_now, + ) + return + + if command == "mine": + _run_mine( + node=node, + count=args.count, + max_transactions=args.max_transactions, + ) + return + + raise ValueError(f"Unsupported command: {command}") finally: node.stop() +def _run_generate_key() -> None: + signing_key, verify_key = generate_key_pair() + private_key = serialize_signing_key(signing_key) + public_key = serialize_verify_key(verify_key) + address = derive_address(verify_key) + print(f"private_key={private_key}") + print(f"public_key={public_key}") + print(f"address={address}") + + +def _run_balance(*, node: MiniChainNode, address: str) -> None: + if not _is_lower_hex(address, ADDRESS_HEX_LENGTH): + raise ValueError("address must be a 20-byte lowercase hex string") + account = node.chain_manager.state.get_account(address) + print(f"address={address}") + print(f"balance={account.balance}") + print(f"nonce={account.nonce}") + + +def _run_chain_info(*, node: MiniChainNode) -> None: + print(f"height={node.height}") + print(f"tip_hash={node.tip_hash}") + + +def _run_block_query( + *, + node: MiniChainNode, + height: int | None, + block_hash: str | None, +) -> None: + if height is not None: + block = node.storage.get_block_by_height(height) + else: + if block_hash is None: + raise ValueError("block hash is required") + block = node.storage.get_block_by_hash(block_hash) + + if block is None: + print("block_not_found") + return + + payload = { + "hash": block.hash().hex(), + "header": asdict(block.header), + "transactions": [asdict(transaction) for transaction in block.transactions], + } + print(json.dumps(payload, sort_keys=True)) + + +def _run_submit_transaction( + *, + node: MiniChainNode, + private_key_hex: str, + recipient: str, + amount: int, + fee: int, + nonce: int | None, + mine_now: bool, +) -> None: + if amount < 0: + raise ValueError("amount must be non-negative") + if fee < 0: + raise ValueError("fee must be non-negative") + if not _is_lower_hex(recipient, ADDRESS_HEX_LENGTH): + raise ValueError("recipient must be a 20-byte lowercase hex string") + + signing_key = deserialize_signing_key(private_key_hex) + sender_address = derive_address(signing_key.verify_key) + sender_account = node.chain_manager.state.get_account(sender_address) + resolved_nonce = sender_account.nonce if nonce is None else nonce + if resolved_nonce < 0: + raise ValueError("nonce must be non-negative") + + transaction = Transaction( + sender=sender_address, + recipient=recipient, + amount=amount, + nonce=resolved_nonce, + fee=fee, + timestamp=int(time.time()), + ) + transaction.sign(signing_key) + + transaction_id = node.submit_transaction(transaction) + print(f"submitted_tx_id={transaction_id}") + print(f"sender={sender_address}") + print(f"recipient={recipient}") + + if not mine_now: + print("queued_in_mempool=true") + return + + mined_block = node.mine_one_block() + print(f"mined_block_height={mined_block.header.block_height}") + print(f"mined_block_hash={mined_block.hash().hex()}") + + +def _run_mine( + *, + node: MiniChainNode, + count: int, + max_transactions: int | None, +) -> None: + if count <= 0: + raise ValueError("count must be positive") + for index in range(1, count + 1): + block = node.mine_one_block(max_transactions=max_transactions) + print( + f"mined_block_{index}=height:{block.header.block_height},hash:{block.hash().hex()}" + ) + + +def _infer_sender_from_private_key(private_key_hex: str) -> str: + signing_key = deserialize_signing_key(private_key_hex) + return derive_address(signing_key.verify_key) + + +def _is_lower_hex(value: str, expected_length: int) -> bool: + if len(value) != expected_length: + return False + return all(ch in "0123456789abcdef" for ch in value) + + if __name__ == "__main__": main() From cd064d7c069ac9499abd003e6bfecce8c3574466 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 03:47:01 +0530 Subject: [PATCH 35/47] test: add cli command flow coverage --- tests/test_cli.py | 147 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 147 insertions(+) create mode 100644 tests/test_cli.py diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..7aa710c --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,147 @@ +"""Unit tests for CLI command parsing and end-to-end command flow.""" + +from __future__ import annotations + +import json + +import pytest + +pytest.importorskip("nacl") + +from minichain.__main__ import build_parser, main +from minichain.crypto import ( + derive_address, + deserialize_signing_key, + deserialize_verify_key, + generate_key_pair, + serialize_signing_key, +) + + +def _parse_kv_lines(text: str) -> dict[str, str]: + pairs: dict[str, str] = {} + for line in text.strip().splitlines(): + if "=" in line: + key, value = line.split("=", 1) + pairs[key.strip()] = value.strip() + return pairs + + +def test_parser_defaults() -> None: + args = build_parser().parse_args([]) + assert args.host == "127.0.0.1" + assert args.port == 7000 + assert args.command is None + + +def test_generate_key_outputs_valid_material(capsys: pytest.CaptureFixture[str]) -> None: + main(["generate-key"]) + out = capsys.readouterr().out + values = _parse_kv_lines(out) + + assert "private_key" in values + assert "public_key" in values + assert "address" in values + + signing_key = deserialize_signing_key(values["private_key"]) + verify_key = deserialize_verify_key(values["public_key"]) + assert signing_key.verify_key == verify_key + assert derive_address(verify_key) == values["address"] + + +def test_mine_chain_info_and_balance_commands( + tmp_path: pytest.TempPathFactory, + capsys: pytest.CaptureFixture[str], +) -> None: + miner_key, miner_verify = generate_key_pair() + _ = miner_key + miner_address = derive_address(miner_verify) + data_dir = tmp_path / "cli-node" + + main( + [ + "--data-dir", + str(data_dir), + "--miner-address", + miner_address, + "mine", + "--count", + "1", + ] + ) + mined_output = capsys.readouterr().out + assert "mined_block_1=height:1" in mined_output + + main(["--data-dir", str(data_dir), "chain-info"]) + chain_info = _parse_kv_lines(capsys.readouterr().out) + assert chain_info["height"] == "1" + assert len(chain_info["tip_hash"]) == 64 + + main(["--data-dir", str(data_dir), "balance", "--address", miner_address]) + balance_info = _parse_kv_lines(capsys.readouterr().out) + assert balance_info["address"] == miner_address + assert balance_info["balance"] == "50" + assert balance_info["nonce"] == "0" + + +def test_submit_tx_then_query_balances_and_block( + tmp_path: pytest.TempPathFactory, + capsys: pytest.CaptureFixture[str], +) -> None: + sender_key, sender_verify = generate_key_pair() + recipient_key, recipient_verify = generate_key_pair() + _ = recipient_key + + sender = derive_address(sender_verify) + recipient = derive_address(recipient_verify) + private_key_hex = serialize_signing_key(sender_key) + data_dir = tmp_path / "cli-node" + + main( + [ + "--data-dir", + str(data_dir), + "--miner-address", + sender, + "mine", + "--count", + "1", + ] + ) + _ = capsys.readouterr() + + main( + [ + "--data-dir", + str(data_dir), + "submit-tx", + "--private-key", + private_key_hex, + "--recipient", + recipient, + "--amount", + "10", + "--fee", + "2", + ] + ) + submit_output = _parse_kv_lines(capsys.readouterr().out) + assert "submitted_tx_id" in submit_output + assert submit_output["sender"] == sender + assert submit_output["recipient"] == recipient + assert submit_output["mined_block_height"] == "2" + + main(["--data-dir", str(data_dir), "balance", "--address", sender]) + sender_balance = _parse_kv_lines(capsys.readouterr().out) + assert sender_balance["balance"] == "90" + assert sender_balance["nonce"] == "1" + + main(["--data-dir", str(data_dir), "balance", "--address", recipient]) + recipient_balance = _parse_kv_lines(capsys.readouterr().out) + assert recipient_balance["balance"] == "10" + assert recipient_balance["nonce"] == "0" + + main(["--data-dir", str(data_dir), "block", "--height", "2"]) + block_payload = json.loads(capsys.readouterr().out.strip()) + assert block_payload["header"]["block_height"] == 2 + assert len(block_payload["transactions"]) == 2 From 72b961ad63ae23679c40d28adf90fd7bd0ff8cdf Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 17:53:46 +0530 Subject: [PATCH 36/47] feat: implement peer discovery networking service --- src/minichain/network.py | 549 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 548 insertions(+), 1 deletion(-) diff --git a/src/minichain/network.py b/src/minichain/network.py index 7245a33..146bad9 100644 --- a/src/minichain/network.py +++ b/src/minichain/network.py @@ -1 +1,548 @@ -"""P2P networking layer built on py-libp2p (to be implemented).""" +"""Peer-to-peer networking and peer discovery for MiniChain.""" + +from __future__ import annotations + +import asyncio +import contextlib +import json +import secrets +import socket +import struct +import time +from dataclasses import dataclass, field +from typing import Any, Callable, Coroutine + +_LOCAL_DISCOVERY_REGISTRY: set["MiniChainNetwork"] = set() + + +class NetworkError(ValueError): + """Raised when networking configuration or message handling is invalid.""" + + +@dataclass(frozen=True) +class PeerAddress: + """Network address for a peer node.""" + + host: str + port: int + + def validate(self) -> None: + if not self.host: + raise NetworkError("peer host must be non-empty") + if not (0 <= self.port <= 65535): + raise NetworkError("peer port must be between 0 and 65535") + + @classmethod + def from_string(cls, value: str) -> PeerAddress: + if ":" not in value: + raise NetworkError("peer must be formatted as host:port") + host, port_text = value.rsplit(":", 1) + if not port_text.isdigit(): + raise NetworkError("peer port must be numeric") + peer = cls(host=host, port=int(port_text)) + peer.validate() + return peer + + +@dataclass(frozen=True) +class PeerInfo: + """Metadata tracked for a discovered peer.""" + + node_id: str + address: PeerAddress + discovered_via: str + last_seen: int + + +@dataclass(frozen=True) +class NetworkConfig: + """Runtime configuration for the MiniChain networking service.""" + + host: str = "127.0.0.1" + port: int = 0 + node_id: str | None = None + bootstrap_peers: tuple[PeerAddress, ...] = field(default_factory=tuple) + connect_timeout_seconds: float = 2.0 + enable_mdns: bool = True + mdns_group: str = "224.1.1.199" + mdns_port: int = 10099 + mdns_interval_seconds: float = 0.5 + + def validate(self) -> None: + if not self.host: + raise NetworkError("host must be non-empty") + if not (0 <= self.port <= 65535): + raise NetworkError("port must be between 0 and 65535") + if self.connect_timeout_seconds <= 0: + raise NetworkError("connect_timeout_seconds must be positive") + if not (0 <= self.mdns_port <= 65535): + raise NetworkError("mdns_port must be between 0 and 65535") + if self.mdns_interval_seconds <= 0: + raise NetworkError("mdns_interval_seconds must be positive") + for peer in self.bootstrap_peers: + peer.validate() + + +@dataclass +class _PeerConnection: + peer: PeerInfo + reader: asyncio.StreamReader + writer: asyncio.StreamWriter + task: asyncio.Task[None] | None = None + + +class _DiscoveryProtocol(asyncio.DatagramProtocol): + def __init__(self, callback: Callable[[bytes, tuple[str, int]], None]) -> None: + self._callback = callback + + def datagram_received(self, data: bytes, addr: tuple[str, int]) -> None: + self._callback(data, addr) + + +class MiniChainNetwork: + """Async TCP-based peer networking with bootstrap and multicast discovery.""" + + def __init__(self, config: NetworkConfig) -> None: + self.config = config + self.config.validate() + + self._node_id = self.config.node_id or secrets.token_hex(16) + self._server: asyncio.AbstractServer | None = None + self._connections: dict[str, _PeerConnection] = {} + self._known_peers: dict[str, PeerInfo] = {} + self._connecting_addresses: set[tuple[str, int]] = set() + self._background_tasks: set[asyncio.Task[None]] = set() + self._running = False + self._listen_port = self.config.port + + self._mdns_transport: asyncio.DatagramTransport | None = None + self._mdns_protocol: _DiscoveryProtocol | None = None + self._mdns_announce_task: asyncio.Task[None] | None = None + self._use_local_discovery = False + + @property + def node_id(self) -> str: + return self._node_id + + @property + def running(self) -> bool: + return self._running + + @property + def listen_host(self) -> str: + return self.config.host + + @property + def listen_port(self) -> int: + return self._listen_port + + def listen_address(self) -> PeerAddress: + return PeerAddress(host=self.listen_host, port=self.listen_port) + + def known_peers(self) -> list[PeerInfo]: + return sorted(self._known_peers.values(), key=lambda peer: peer.node_id) + + def connected_peer_ids(self) -> set[str]: + return set(self._connections) + + def is_connected_to(self, peer_id: str) -> bool: + return peer_id in self._connections + + async def wait_for_connected_peers(self, expected_count: int, *, timeout: float = 5.0) -> None: + if expected_count < 0: + raise NetworkError("expected_count must be non-negative") + if timeout <= 0: + raise NetworkError("timeout must be positive") + + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if len(self._connections) >= expected_count: + return + await asyncio.sleep(0.05) + raise TimeoutError( + f"Timed out waiting for {expected_count} peers; got {len(self._connections)}" + ) + + async def start(self) -> None: + """Start the TCP server, discovery tasks, and bootstrap connections.""" + if self._running: + return + + self._server = await asyncio.start_server( + self._handle_incoming_connection, + host=self.config.host, + port=self.config.port, + ) + sockets = self._server.sockets or [] + if not sockets: + raise NetworkError("failed to bind network server socket") + self._listen_port = int(sockets[0].getsockname()[1]) + self._running = True + + if self.config.enable_mdns: + await self._start_mdns_discovery() + + for peer in self.config.bootstrap_peers: + self._spawn(self.connect_to_peer(peer, discovered_via="bootstrap")) + + async def stop(self) -> None: + """Stop server, discovery, and all active peer connections.""" + if not self._running: + return + self._running = False + + if self._mdns_announce_task is not None: + self._mdns_announce_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await self._mdns_announce_task + self._mdns_announce_task = None + + if self._mdns_transport is not None: + self._mdns_transport.close() + self._mdns_transport = None + self._mdns_protocol = None + if self._use_local_discovery: + _LOCAL_DISCOVERY_REGISTRY.discard(self) + self._use_local_discovery = False + + if self._server is not None: + self._server.close() + await self._server.wait_closed() + self._server = None + + for peer_id in list(self._connections): + self._close_connection(peer_id) + + if self._background_tasks: + for task in list(self._background_tasks): + task.cancel() + await asyncio.gather(*self._background_tasks, return_exceptions=True) + self._background_tasks.clear() + + async def connect_to_peer(self, peer: PeerAddress, *, discovered_via: str) -> bool: + """Open a TCP connection to a peer and perform handshake.""" + peer.validate() + if peer == self.listen_address(): + return False + + address_key = (peer.host, peer.port) + if address_key in self._connecting_addresses: + return False + if any(connection.peer.address == peer for connection in self._connections.values()): + return False + + self._connecting_addresses.add(address_key) + try: + try: + connection = asyncio.open_connection(peer.host, peer.port) + reader, writer = await asyncio.wait_for( + connection, + timeout=self.config.connect_timeout_seconds, + ) + except (TimeoutError, OSError): + return False + + try: + await self._write_message(writer, self._hello_payload()) + message = await self._read_message(reader) + peer_info = self._peer_from_hello( + message=message, + fallback_host=peer.host, + discovered_via=discovered_via, + ) + if peer_info.node_id == self.node_id: + writer.close() + await writer.wait_closed() + return False + + if not self._register_connection(peer_info, reader, writer): + writer.close() + await writer.wait_closed() + return False + + await self._write_message(writer, self._peer_list_payload()) + self._start_peer_reader(peer_info.node_id) + return True + except Exception: + writer.close() + with contextlib.suppress(Exception): + await writer.wait_closed() + raise + finally: + self._connecting_addresses.discard(address_key) + + async def _handle_incoming_connection( + self, + reader: asyncio.StreamReader, + writer: asyncio.StreamWriter, + ) -> None: + try: + message = await self._read_message(reader) + peername = writer.get_extra_info("peername") + fallback_host = "127.0.0.1" if not peername else str(peername[0]) + peer_info = self._peer_from_hello( + message=message, + fallback_host=fallback_host, + discovered_via="incoming", + ) + if peer_info.node_id == self.node_id: + writer.close() + await writer.wait_closed() + return + + await self._write_message(writer, self._hello_payload()) + if not self._register_connection(peer_info, reader, writer): + writer.close() + await writer.wait_closed() + return + + await self._write_message(writer, self._peer_list_payload()) + self._start_peer_reader(peer_info.node_id) + except Exception: + writer.close() + with contextlib.suppress(Exception): + await writer.wait_closed() + + def _register_connection( + self, + peer: PeerInfo, + reader: asyncio.StreamReader, + writer: asyncio.StreamWriter, + ) -> bool: + existing = self._connections.get(peer.node_id) + if existing is not None: + return False + + self._known_peers[peer.node_id] = peer + self._connections[peer.node_id] = _PeerConnection( + peer=peer, + reader=reader, + writer=writer, + ) + return True + + def _start_peer_reader(self, peer_id: str) -> None: + connection = self._connections.get(peer_id) + if connection is None: + return + task = asyncio.create_task(self._peer_reader_loop(peer_id)) + connection.task = task + self._background_tasks.add(task) + task.add_done_callback(self._background_tasks.discard) + + async def _peer_reader_loop(self, peer_id: str) -> None: + connection = self._connections.get(peer_id) + if connection is None: + return + try: + while self._running: + message = await self._read_message(connection.reader, eof_ok=True) + if message is None: + break + await self._handle_peer_message(peer_id, message) + except Exception: + pass + finally: + self._close_connection(peer_id) + + async def _handle_peer_message(self, peer_id: str, message: dict[str, object]) -> None: + message_type = message.get("type") + if message_type != "peers": + return + + peers = message.get("peers") + if not isinstance(peers, list): + raise NetworkError("peers message requires list payload") + + for candidate in peers: + if not isinstance(candidate, dict): + continue + host = candidate.get("host") + port = candidate.get("port") + if not isinstance(host, str) or not isinstance(port, int): + continue + peer = PeerAddress(host=host, port=port) + if peer == self.listen_address(): + continue + self._spawn( + self.connect_to_peer( + peer, + discovered_via=f"peer:{peer_id}", + ) + ) + + def _close_connection(self, peer_id: str) -> None: + connection = self._connections.pop(peer_id, None) + if connection is None: + return + + if connection.task is not None and not connection.task.done(): + connection.task.cancel() + connection.writer.close() + + async def _start_mdns_discovery(self) -> None: + loop = asyncio.get_running_loop() + try: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + if hasattr(socket, "SO_REUSEPORT"): + with contextlib.suppress(OSError): + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + sock.bind(("", self.config.mdns_port)) + + membership = struct.pack( + "=4s4s", + socket.inet_aton(self.config.mdns_group), + socket.inet_aton("0.0.0.0"), + ) + sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, membership) + sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) + sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 1) + + protocol = _DiscoveryProtocol(self._on_discovery_packet) + transport, _ = await loop.create_datagram_endpoint( + lambda: protocol, + sock=sock, + ) + self._mdns_transport = transport + self._mdns_protocol = protocol + except OSError: + self._use_local_discovery = True + _LOCAL_DISCOVERY_REGISTRY.add(self) + + self._mdns_announce_task = asyncio.create_task(self._announce_loop()) + self._background_tasks.add(self._mdns_announce_task) + self._mdns_announce_task.add_done_callback(self._background_tasks.discard) + + async def _announce_loop(self) -> None: + while self._running and (self._mdns_transport is not None or self._use_local_discovery): + payload = { + "service": "minichain", + "node_id": self.node_id, + "host": self.listen_host, + "port": self.listen_port, + } + encoded = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode("utf-8") + if self._use_local_discovery: + for peer in list(_LOCAL_DISCOVERY_REGISTRY): + if peer is self: + continue + peer._on_discovery_packet(encoded, (self.listen_host, self.listen_port)) + elif self._mdns_transport is not None: + self._mdns_transport.sendto( + encoded, + (self.config.mdns_group, self.config.mdns_port), + ) + await asyncio.sleep(self.config.mdns_interval_seconds) + + def _on_discovery_packet(self, data: bytes, _addr: tuple[str, int]) -> None: + try: + payload = json.loads(data.decode("utf-8")) + except (UnicodeDecodeError, json.JSONDecodeError): + return + if not isinstance(payload, dict): + return + if payload.get("service") != "minichain": + return + + node_id = payload.get("node_id") + host = payload.get("host") + port = payload.get("port") + if not isinstance(node_id, str) or not isinstance(host, str) or not isinstance(port, int): + return + if node_id == self.node_id: + return + + peer = PeerAddress(host=host, port=port) + info = PeerInfo( + node_id=node_id, + address=peer, + discovered_via="mdns", + last_seen=int(time.time()), + ) + existing = self._known_peers.get(node_id) + if existing is None: + self._known_peers[node_id] = info + self._spawn(self.connect_to_peer(peer, discovered_via="mdns")) + + def _spawn(self, coroutine: Coroutine[Any, Any, Any]) -> None: + task = asyncio.create_task(coroutine) + self._background_tasks.add(task) + task.add_done_callback(self._background_tasks.discard) + + def _hello_payload(self) -> dict[str, object]: + return { + "type": "hello", + "node_id": self.node_id, + "host": self.listen_host, + "port": self.listen_port, + } + + def _peer_list_payload(self) -> dict[str, object]: + unique_peers = { + (peer.address.host, peer.address.port) + for peer in self._known_peers.values() + } + unique_peers.update((peer.host, peer.port) for peer in self.config.bootstrap_peers) + unique_peers.discard((self.listen_host, self.listen_port)) + peers = [{"host": host, "port": port} for host, port in sorted(unique_peers)] + return {"type": "peers", "peers": peers} + + def _peer_from_hello( + self, + *, + message: dict[str, object], + fallback_host: str, + discovered_via: str, + ) -> PeerInfo: + if message.get("type") != "hello": + raise NetworkError("handshake message must be type=hello") + node_id = message.get("node_id") + host = message.get("host") + port = message.get("port") + if not isinstance(node_id, str): + raise NetworkError("handshake node_id must be a string") + if not isinstance(port, int): + raise NetworkError("handshake port must be an integer") + if not isinstance(host, str) or not host: + host = fallback_host + address = PeerAddress(host=host, port=port) + address.validate() + return PeerInfo( + node_id=node_id, + address=address, + discovered_via=discovered_via, + last_seen=int(time.time()), + ) + + async def _read_message( + self, + reader: asyncio.StreamReader, + *, + eof_ok: bool = False, + ) -> dict[str, object] | None: + line = await asyncio.wait_for( + reader.readline(), + timeout=self.config.connect_timeout_seconds, + ) + if not line: + if eof_ok: + return None + raise NetworkError("unexpected EOF while reading peer message") + + try: + payload = json.loads(line.decode("utf-8")) + except (UnicodeDecodeError, json.JSONDecodeError) as exc: + raise NetworkError("received malformed JSON message") from exc + if not isinstance(payload, dict): + raise NetworkError("message payload must be an object") + return payload + + async def _write_message( + self, + writer: asyncio.StreamWriter, + payload: dict[str, object], + ) -> None: + body = json.dumps(payload, sort_keys=True, separators=(",", ":")).encode("utf-8") + writer.write(body + b"\n") + await writer.drain() From 6debee48d583418c1fdc322df448b9c8993dbbe9 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 17:53:50 +0530 Subject: [PATCH 37/47] test: add peer discovery integration coverage --- tests/test_network.py | 92 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 tests/test_network.py diff --git a/tests/test_network.py b/tests/test_network.py new file mode 100644 index 0000000..f2d6c6c --- /dev/null +++ b/tests/test_network.py @@ -0,0 +1,92 @@ +"""Integration tests for peer networking and discovery.""" + +from __future__ import annotations + +import asyncio +import socket + +from minichain.network import MiniChainNetwork, NetworkConfig + + +def test_bootstrap_peer_discovery() -> None: + async def scenario() -> None: + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-a", + enable_mdns=False, + ) + ) + await node_a.start() + + node_b = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-b", + enable_mdns=False, + bootstrap_peers=(node_a.listen_address(),), + ) + ) + await node_b.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(1, timeout=3.0) + + assert node_a.is_connected_to("node-b") + assert node_b.is_connected_to("node-a") + finally: + await node_b.stop() + await node_a.stop() + + asyncio.run(scenario()) + + +def test_mdns_discovery_connects_two_nodes() -> None: + async def scenario() -> None: + mdns_port = _pick_free_udp_port() + config_a = NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-mdns-a", + enable_mdns=True, + mdns_group="224.1.1.199", + mdns_port=mdns_port, + mdns_interval_seconds=0.2, + ) + config_b = NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-mdns-b", + enable_mdns=True, + mdns_group="224.1.1.199", + mdns_port=mdns_port, + mdns_interval_seconds=0.2, + ) + node_a = MiniChainNetwork(config_a) + node_b = MiniChainNetwork(config_b) + await node_a.start() + await node_b.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=5.0) + await node_b.wait_for_connected_peers(1, timeout=5.0) + + assert node_a.is_connected_to("node-mdns-b") + assert node_b.is_connected_to("node-mdns-a") + finally: + await node_b.stop() + await node_a.stop() + + asyncio.run(scenario()) + + +def _pick_free_udp_port() -> int: + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + sock.bind(("127.0.0.1", 0)) + return int(sock.getsockname()[1]) + finally: + sock.close() From b5ed620fd7c0f34d9f1ad01c72fd192f011c64cb Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:01:19 +0530 Subject: [PATCH 38/47] feat: add transaction gossip protocol with dedup forwarding --- src/minichain/network.py | 121 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 119 insertions(+), 2 deletions(-) diff --git a/src/minichain/network.py b/src/minichain/network.py index 146bad9..6bfa81f 100644 --- a/src/minichain/network.py +++ b/src/minichain/network.py @@ -9,10 +9,14 @@ import socket import struct import time -from dataclasses import dataclass, field +from collections import deque +from dataclasses import asdict, dataclass, field from typing import Any, Callable, Coroutine +from minichain.transaction import Transaction + _LOCAL_DISCOVERY_REGISTRY: set["MiniChainNetwork"] = set() +TX_GOSSIP_PROTOCOL_ID = "/minichain/tx/1.0.0" class NetworkError(ValueError): @@ -67,6 +71,7 @@ class NetworkConfig: mdns_group: str = "224.1.1.199" mdns_port: int = 10099 mdns_interval_seconds: float = 0.5 + seen_tx_cache_size: int = 20_000 def validate(self) -> None: if not self.host: @@ -79,6 +84,8 @@ def validate(self) -> None: raise NetworkError("mdns_port must be between 0 and 65535") if self.mdns_interval_seconds <= 0: raise NetworkError("mdns_interval_seconds must be positive") + if self.seen_tx_cache_size <= 0: + raise NetworkError("seen_tx_cache_size must be positive") for peer in self.bootstrap_peers: peer.validate() @@ -119,6 +126,9 @@ def __init__(self, config: NetworkConfig) -> None: self._mdns_protocol: _DiscoveryProtocol | None = None self._mdns_announce_task: asyncio.Task[None] | None = None self._use_local_discovery = False + self._seen_transactions: set[str] = set() + self._seen_transaction_order: deque[str] = deque() + self._transaction_handler: Callable[[Transaction], bool] | None = None @property def node_id(self) -> str: @@ -148,6 +158,10 @@ def connected_peer_ids(self) -> set[str]: def is_connected_to(self, peer_id: str) -> bool: return peer_id in self._connections + def set_transaction_handler(self, handler: Callable[[Transaction], bool] | None) -> None: + """Register a local transaction validation/ingestion callback.""" + self._transaction_handler = handler + async def wait_for_connected_peers(self, expected_count: int, *, timeout: float = 5.0) -> None: if expected_count < 0: raise NetworkError("expected_count must be non-negative") @@ -163,6 +177,20 @@ async def wait_for_connected_peers(self, expected_count: int, *, timeout: float f"Timed out waiting for {expected_count} peers; got {len(self._connections)}" ) + async def submit_transaction(self, transaction: Transaction) -> bool: + """Validate and gossip a locally submitted transaction.""" + if not transaction.verify(): + raise NetworkError("cannot gossip invalid transaction") + tx_id = transaction.transaction_id().hex() + if not self._remember_seen_transaction(tx_id): + return False + if not self._accept_transaction(transaction): + return False + + message = self._transaction_payload(transaction, tx_id=tx_id) + await self._broadcast_message(message, exclude_peer_ids=set()) + return True + async def start(self) -> None: """Start the TCP server, discovery tasks, and bootstrap connections.""" if self._running: @@ -347,9 +375,14 @@ async def _peer_reader_loop(self, peer_id: str) -> None: async def _handle_peer_message(self, peer_id: str, message: dict[str, object]) -> None: message_type = message.get("type") - if message_type != "peers": + if message_type == "peers": + await self._handle_peer_addresses(peer_id, message) + return + if message_type == "tx_gossip": + await self._handle_transaction_gossip(peer_id, message) return + async def _handle_peer_addresses(self, peer_id: str, message: dict[str, object]) -> None: peers = message.get("peers") if not isinstance(peers, list): raise NetworkError("peers message requires list payload") @@ -371,6 +404,39 @@ async def _handle_peer_message(self, peer_id: str, message: dict[str, object]) - ) ) + async def _handle_transaction_gossip( + self, + source_peer_id: str, + message: dict[str, object], + ) -> None: + if message.get("protocol") != TX_GOSSIP_PROTOCOL_ID: + raise NetworkError("tx_gossip protocol id mismatch") + + payload = message.get("transaction") + if not isinstance(payload, dict): + raise NetworkError("tx_gossip transaction payload must be an object") + transaction = self._transaction_from_payload(payload) + if not transaction.verify(): + return + + announced_id = message.get("transaction_id") + if announced_id is not None and not isinstance(announced_id, str): + raise NetworkError("transaction_id must be a string") + + tx_id = transaction.transaction_id().hex() + if announced_id is not None and announced_id != tx_id: + return + if not self._remember_seen_transaction(tx_id): + return + if not self._accept_transaction(transaction): + return + + forward_payload = self._transaction_payload(transaction, tx_id=tx_id) + await self._broadcast_message( + forward_payload, + exclude_peer_ids={source_peer_id}, + ) + def _close_connection(self, peer_id: str) -> None: connection = self._connections.pop(peer_id, None) if connection is None: @@ -380,6 +446,24 @@ def _close_connection(self, peer_id: str) -> None: connection.task.cancel() connection.writer.close() + async def _broadcast_message( + self, + payload: dict[str, object], + *, + exclude_peer_ids: set[str], + ) -> None: + failed_peer_ids: list[str] = [] + for peer_id, connection in list(self._connections.items()): + if peer_id in exclude_peer_ids: + continue + try: + await self._write_message(connection.writer, payload) + except Exception: + failed_peer_ids.append(peer_id) + + for peer_id in failed_peer_ids: + self._close_connection(peer_id) + async def _start_mdns_discovery(self) -> None: loop = asyncio.get_running_loop() try: @@ -470,6 +554,25 @@ def _spawn(self, coroutine: Coroutine[Any, Any, Any]) -> None: self._background_tasks.add(task) task.add_done_callback(self._background_tasks.discard) + def _remember_seen_transaction(self, transaction_id: str) -> bool: + if transaction_id in self._seen_transactions: + return False + + self._seen_transactions.add(transaction_id) + self._seen_transaction_order.append(transaction_id) + while len(self._seen_transactions) > self.config.seen_tx_cache_size: + oldest = self._seen_transaction_order.popleft() + self._seen_transactions.discard(oldest) + return True + + def _accept_transaction(self, transaction: Transaction) -> bool: + if self._transaction_handler is None: + return True + try: + return bool(self._transaction_handler(transaction)) + except Exception: + return False + def _hello_payload(self) -> dict[str, object]: return { "type": "hello", @@ -488,6 +591,20 @@ def _peer_list_payload(self) -> dict[str, object]: peers = [{"host": host, "port": port} for host, port in sorted(unique_peers)] return {"type": "peers", "peers": peers} + def _transaction_payload(self, transaction: Transaction, *, tx_id: str) -> dict[str, object]: + return { + "type": "tx_gossip", + "protocol": TX_GOSSIP_PROTOCOL_ID, + "transaction_id": tx_id, + "transaction": asdict(transaction), + } + + def _transaction_from_payload(self, payload: dict[str, object]) -> Transaction: + try: + return Transaction(**payload) + except TypeError as exc: + raise NetworkError("invalid transaction payload shape") from exc + def _peer_from_hello( self, *, From 4aaa08e235e4fd539268d6bb6fcc2f74fb3464b2 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:01:27 +0530 Subject: [PATCH 39/47] test: add three-node transaction gossip propagation coverage --- tests/test_transaction_gossip.py | 103 +++++++++++++++++++++++++++++++ 1 file changed, 103 insertions(+) create mode 100644 tests/test_transaction_gossip.py diff --git a/tests/test_transaction_gossip.py b/tests/test_transaction_gossip.py new file mode 100644 index 0000000..ae95d5f --- /dev/null +++ b/tests/test_transaction_gossip.py @@ -0,0 +1,103 @@ +"""Integration tests for transaction gossip propagation.""" + +from __future__ import annotations + +import asyncio + +from minichain.crypto import derive_address, generate_key_pair +from minichain.network import MiniChainNetwork, NetworkConfig +from minichain.transaction import Transaction + + +def test_transaction_gossip_propagates_across_three_nodes() -> None: + async def scenario() -> None: + seen_by_node: dict[str, list[str]] = {"a": [], "b": [], "c": []} + + def make_handler(node_name: str): + def handler(transaction: Transaction) -> bool: + seen_by_node[node_name].append(transaction.transaction_id().hex()) + return True + + return handler + + node_b = MiniChainNetwork( + NetworkConfig(host="127.0.0.1", port=0, node_id="node-b", enable_mdns=False) + ) + node_b.set_transaction_handler(make_handler("b")) + await node_b.start() + + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-a", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_a.set_transaction_handler(make_handler("a")) + await node_a.start() + + node_c = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-c", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_c.set_transaction_handler(make_handler("c")) + await node_c.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(2, timeout=3.0) + await node_c.wait_for_connected_peers(1, timeout=3.0) + + transaction = _build_signed_transaction() + assert await node_a.submit_transaction(transaction) + + await _wait_until( + lambda: len(seen_by_node["b"]) == 1 and len(seen_by_node["c"]) == 1, + timeout=3.0, + ) + assert len(seen_by_node["a"]) == 1 + assert len(seen_by_node["b"]) == 1 + assert len(seen_by_node["c"]) == 1 + + assert not await node_a.submit_transaction(transaction) + await asyncio.sleep(0.2) + assert len(seen_by_node["a"]) == 1 + assert len(seen_by_node["b"]) == 1 + assert len(seen_by_node["c"]) == 1 + finally: + await node_c.stop() + await node_a.stop() + await node_b.stop() + + asyncio.run(scenario()) + + +def _build_signed_transaction() -> Transaction: + signing_key, verify_key = generate_key_pair() + sender = derive_address(verify_key) + transaction = Transaction( + sender=sender, + recipient="11" * 20, + amount=25, + nonce=0, + fee=1, + timestamp=1_700_000_000, + ) + transaction.sign(signing_key) + return transaction + + +async def _wait_until(predicate, *, timeout: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if predicate(): + return + await asyncio.sleep(0.05) + raise TimeoutError("condition was not met before timeout") From 59bc33ce15d3b6d532a8542632a5dc756e6fde2a Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:10:26 +0530 Subject: [PATCH 40/47] feat: add block propagation protocol with dedup forwarding --- src/minichain/network.py | 116 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 116 insertions(+) diff --git a/src/minichain/network.py b/src/minichain/network.py index 6bfa81f..9d2c4c7 100644 --- a/src/minichain/network.py +++ b/src/minichain/network.py @@ -13,10 +13,12 @@ from dataclasses import asdict, dataclass, field from typing import Any, Callable, Coroutine +from minichain.block import Block, BlockHeader from minichain.transaction import Transaction _LOCAL_DISCOVERY_REGISTRY: set["MiniChainNetwork"] = set() TX_GOSSIP_PROTOCOL_ID = "/minichain/tx/1.0.0" +BLOCK_GOSSIP_PROTOCOL_ID = "/minichain/block/1.0.0" class NetworkError(ValueError): @@ -72,6 +74,7 @@ class NetworkConfig: mdns_port: int = 10099 mdns_interval_seconds: float = 0.5 seen_tx_cache_size: int = 20_000 + seen_block_cache_size: int = 5_000 def validate(self) -> None: if not self.host: @@ -86,6 +89,8 @@ def validate(self) -> None: raise NetworkError("mdns_interval_seconds must be positive") if self.seen_tx_cache_size <= 0: raise NetworkError("seen_tx_cache_size must be positive") + if self.seen_block_cache_size <= 0: + raise NetworkError("seen_block_cache_size must be positive") for peer in self.bootstrap_peers: peer.validate() @@ -128,7 +133,10 @@ def __init__(self, config: NetworkConfig) -> None: self._use_local_discovery = False self._seen_transactions: set[str] = set() self._seen_transaction_order: deque[str] = deque() + self._seen_blocks: set[str] = set() + self._seen_block_order: deque[str] = deque() self._transaction_handler: Callable[[Transaction], bool] | None = None + self._block_handler: Callable[[Block], bool] | None = None @property def node_id(self) -> str: @@ -162,6 +170,10 @@ def set_transaction_handler(self, handler: Callable[[Transaction], bool] | None) """Register a local transaction validation/ingestion callback.""" self._transaction_handler = handler + def set_block_handler(self, handler: Callable[[Block], bool] | None) -> None: + """Register a local block validation/ingestion callback.""" + self._block_handler = handler + async def wait_for_connected_peers(self, expected_count: int, *, timeout: float = 5.0) -> None: if expected_count < 0: raise NetworkError("expected_count must be non-negative") @@ -191,6 +203,21 @@ async def submit_transaction(self, transaction: Transaction) -> bool: await self._broadcast_message(message, exclude_peer_ids=set()) return True + async def submit_block(self, block: Block) -> bool: + """Validate and gossip a locally mined or received canonical block.""" + if not block.has_valid_merkle_root(): + raise NetworkError("cannot gossip block with invalid merkle root") + + block_hash = block.hash().hex() + if not self._remember_seen_block(block_hash): + return False + if not self._accept_block(block): + return False + + message = self._block_payload(block, block_hash=block_hash) + await self._broadcast_message(message, exclude_peer_ids=set()) + return True + async def start(self) -> None: """Start the TCP server, discovery tasks, and bootstrap connections.""" if self._running: @@ -381,6 +408,9 @@ async def _handle_peer_message(self, peer_id: str, message: dict[str, object]) - if message_type == "tx_gossip": await self._handle_transaction_gossip(peer_id, message) return + if message_type == "block_gossip": + await self._handle_block_gossip(peer_id, message) + return async def _handle_peer_addresses(self, peer_id: str, message: dict[str, object]) -> None: peers = message.get("peers") @@ -437,6 +467,39 @@ async def _handle_transaction_gossip( exclude_peer_ids={source_peer_id}, ) + async def _handle_block_gossip( + self, + source_peer_id: str, + message: dict[str, object], + ) -> None: + if message.get("protocol") != BLOCK_GOSSIP_PROTOCOL_ID: + raise NetworkError("block_gossip protocol id mismatch") + + payload = message.get("block") + if not isinstance(payload, dict): + raise NetworkError("block_gossip payload must be an object") + block = self._block_from_payload(payload) + if not block.has_valid_merkle_root(): + return + + announced_hash = message.get("block_hash") + if announced_hash is not None and not isinstance(announced_hash, str): + raise NetworkError("block_hash must be a string") + + block_hash = block.hash().hex() + if announced_hash is not None and announced_hash != block_hash: + return + if not self._remember_seen_block(block_hash): + return + if not self._accept_block(block): + return + + forward_payload = self._block_payload(block, block_hash=block_hash) + await self._broadcast_message( + forward_payload, + exclude_peer_ids={source_peer_id}, + ) + def _close_connection(self, peer_id: str) -> None: connection = self._connections.pop(peer_id, None) if connection is None: @@ -573,6 +636,25 @@ def _accept_transaction(self, transaction: Transaction) -> bool: except Exception: return False + def _remember_seen_block(self, block_hash: str) -> bool: + if block_hash in self._seen_blocks: + return False + + self._seen_blocks.add(block_hash) + self._seen_block_order.append(block_hash) + while len(self._seen_blocks) > self.config.seen_block_cache_size: + oldest = self._seen_block_order.popleft() + self._seen_blocks.discard(oldest) + return True + + def _accept_block(self, block: Block) -> bool: + if self._block_handler is None: + return True + try: + return bool(self._block_handler(block)) + except Exception: + return False + def _hello_payload(self) -> dict[str, object]: return { "type": "hello", @@ -605,6 +687,40 @@ def _transaction_from_payload(self, payload: dict[str, object]) -> Transaction: except TypeError as exc: raise NetworkError("invalid transaction payload shape") from exc + def _block_payload(self, block: Block, *, block_hash: str) -> dict[str, object]: + return { + "type": "block_gossip", + "protocol": BLOCK_GOSSIP_PROTOCOL_ID, + "block_hash": block_hash, + "block": { + "header": asdict(block.header), + "transactions": [asdict(transaction) for transaction in block.transactions], + }, + } + + def _block_from_payload(self, payload: dict[str, object]) -> Block: + header_payload = payload.get("header") + transactions_payload = payload.get("transactions") + if not isinstance(header_payload, dict): + raise NetworkError("block header payload must be an object") + if not isinstance(transactions_payload, list): + raise NetworkError("block transactions payload must be a list") + + try: + header = BlockHeader(**header_payload) + except TypeError as exc: + raise NetworkError("invalid block header payload shape") from exc + + transactions: list[Transaction] = [] + for transaction_payload in transactions_payload: + if not isinstance(transaction_payload, dict): + raise NetworkError("transaction entry must be an object") + try: + transactions.append(Transaction(**transaction_payload)) + except TypeError as exc: + raise NetworkError("invalid block transaction payload shape") from exc + return Block(header=header, transactions=transactions) + def _peer_from_hello( self, *, From 6f8c171fda4b871cb039665ae75ad1abda65af5a Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:10:29 +0530 Subject: [PATCH 41/47] test: add three-node block propagation integration coverage --- tests/test_block_gossip.py | 144 +++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) create mode 100644 tests/test_block_gossip.py diff --git a/tests/test_block_gossip.py b/tests/test_block_gossip.py new file mode 100644 index 0000000..cd12915 --- /dev/null +++ b/tests/test_block_gossip.py @@ -0,0 +1,144 @@ +"""Integration tests for block propagation across peers.""" + +from __future__ import annotations + +import asyncio + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.network import MiniChainNetwork, NetworkConfig + + +def test_block_gossip_propagates_and_applies_on_three_nodes() -> None: + async def scenario() -> None: + manager_a = _build_manager() + manager_b = _build_manager() + manager_c = _build_manager() + + accepted_hashes: dict[str, list[str]] = {"a": [], "b": [], "c": []} + + def make_block_handler(manager: ChainManager, node_name: str): + def handler(block) -> bool: + try: + result = manager.add_block(block) + except ChainValidationError: + return False + if result in {"extended", "reorg"}: + accepted_hashes[node_name].append(block.hash().hex()) + return True + return False + + return handler + + node_b = MiniChainNetwork( + NetworkConfig(host="127.0.0.1", port=0, node_id="node-block-b", enable_mdns=False) + ) + node_b.set_block_handler(make_block_handler(manager_b, "b")) + await node_b.start() + + node_a = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-block-a", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_a.set_block_handler(make_block_handler(manager_a, "a")) + await node_a.start() + + node_c = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-block-c", + enable_mdns=False, + bootstrap_peers=(node_b.listen_address(),), + ) + ) + node_c.set_block_handler(make_block_handler(manager_c, "c")) + await node_c.start() + + try: + await node_a.wait_for_connected_peers(1, timeout=3.0) + await node_b.wait_for_connected_peers(2, timeout=3.0) + await node_c.wait_for_connected_peers(1, timeout=3.0) + + candidate = build_candidate_block( + chain_manager=manager_a, + mempool=Mempool(), + miner_address="11" * 20, + max_transactions=0, + timestamp=1_739_000_030, + ) + mined_block, _digest = mine_candidate_block( + block_template=candidate, + max_nonce=100_000, + ) + + assert await node_a.submit_block(mined_block) + + await _wait_until( + lambda: manager_b.height == 1 and manager_c.height == 1, + timeout=3.0, + ) + + expected_tip = mined_block.hash().hex() + assert manager_a.height == 1 + assert manager_b.height == 1 + assert manager_c.height == 1 + assert manager_a.tip_hash == expected_tip + assert manager_b.tip_hash == expected_tip + assert manager_c.tip_hash == expected_tip + + assert len(accepted_hashes["a"]) == 1 + assert len(accepted_hashes["b"]) == 1 + assert len(accepted_hashes["c"]) == 1 + + assert not await node_a.submit_block(mined_block) + await asyncio.sleep(0.2) + assert len(accepted_hashes["a"]) == 1 + assert len(accepted_hashes["b"]) == 1 + assert len(accepted_hashes["c"]) == 1 + finally: + await node_c.stop() + await node_a.stop() + await node_b.stop() + + asyncio.run(scenario()) + + +def _build_manager() -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +async def _wait_until(predicate, *, timeout: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if predicate(): + return + await asyncio.sleep(0.05) + raise TimeoutError("condition was not met before timeout") From 7f9a9e31e4923f3264709e958d049186f25989d7 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:27:01 +0530 Subject: [PATCH 42/47] feat: add chain synchronization protocol with range sync --- src/minichain/network.py | 200 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 196 insertions(+), 4 deletions(-) diff --git a/src/minichain/network.py b/src/minichain/network.py index 9d2c4c7..6c52681 100644 --- a/src/minichain/network.py +++ b/src/minichain/network.py @@ -19,6 +19,7 @@ _LOCAL_DISCOVERY_REGISTRY: set["MiniChainNetwork"] = set() TX_GOSSIP_PROTOCOL_ID = "/minichain/tx/1.0.0" BLOCK_GOSSIP_PROTOCOL_ID = "/minichain/block/1.0.0" +SYNC_PROTOCOL_ID = "/minichain/sync/1.0.0" class NetworkError(ValueError): @@ -75,6 +76,7 @@ class NetworkConfig: mdns_interval_seconds: float = 0.5 seen_tx_cache_size: int = 20_000 seen_block_cache_size: int = 5_000 + sync_batch_size: int = 128 def validate(self) -> None: if not self.host: @@ -91,6 +93,8 @@ def validate(self) -> None: raise NetworkError("seen_tx_cache_size must be positive") if self.seen_block_cache_size <= 0: raise NetworkError("seen_block_cache_size must be positive") + if self.sync_batch_size <= 0: + raise NetworkError("sync_batch_size must be positive") for peer in self.bootstrap_peers: peer.validate() @@ -137,6 +141,11 @@ def __init__(self, config: NetworkConfig) -> None: self._seen_block_order: deque[str] = deque() self._transaction_handler: Callable[[Transaction], bool] | None = None self._block_handler: Callable[[Block], bool] | None = None + self._sync_height_getter: Callable[[], int] | None = None + self._sync_block_getter: Callable[[int], Block | None] | None = None + self._sync_block_applier: Callable[[Block], bool] | None = None + self._peer_advertised_heights: dict[str, int] = {} + self._sync_inflight: set[str] = set() @property def node_id(self) -> str: @@ -174,6 +183,18 @@ def set_block_handler(self, handler: Callable[[Block], bool] | None) -> None: """Register a local block validation/ingestion callback.""" self._block_handler = handler + def set_sync_handlers( + self, + *, + get_height: Callable[[], int] | None, + get_block_by_height: Callable[[int], Block | None] | None, + apply_block: Callable[[Block], bool] | None, + ) -> None: + """Register callbacks used by `/minichain/sync/1.0.0`.""" + self._sync_height_getter = get_height + self._sync_block_getter = get_block_by_height + self._sync_block_applier = apply_block + async def wait_for_connected_peers(self, expected_count: int, *, timeout: float = 5.0) -> None: if expected_count < 0: raise NetworkError("expected_count must be non-negative") @@ -189,6 +210,22 @@ async def wait_for_connected_peers(self, expected_count: int, *, timeout: float f"Timed out waiting for {expected_count} peers; got {len(self._connections)}" ) + async def wait_for_height(self, expected_height: int, *, timeout: float = 5.0) -> None: + """Wait until local sync height reaches at least `expected_height`.""" + if expected_height < 0: + raise NetworkError("expected_height must be non-negative") + if timeout <= 0: + raise NetworkError("timeout must be positive") + + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if self._local_chain_height() >= expected_height: + return + await asyncio.sleep(0.05) + raise TimeoutError( + f"Timed out waiting for height {expected_height}; got {self._local_chain_height()}" + ) + async def submit_transaction(self, transaction: Transaction) -> bool: """Validate and gossip a locally submitted transaction.""" if not transaction.verify(): @@ -267,6 +304,8 @@ async def stop(self) -> None: for peer_id in list(self._connections): self._close_connection(peer_id) + self._peer_advertised_heights.clear() + self._sync_inflight.clear() if self._background_tasks: for task in list(self._background_tasks): @@ -316,6 +355,7 @@ async def connect_to_peer(self, peer: PeerAddress, *, discovered_via: str) -> bo return False await self._write_message(writer, self._peer_list_payload()) + await self._send_sync_status(writer) self._start_peer_reader(peer_info.node_id) return True except Exception: @@ -352,6 +392,7 @@ async def _handle_incoming_connection( return await self._write_message(writer, self._peer_list_payload()) + await self._send_sync_status(writer) self._start_peer_reader(peer_info.node_id) except Exception: writer.close() @@ -411,6 +452,15 @@ async def _handle_peer_message(self, peer_id: str, message: dict[str, object]) - if message_type == "block_gossip": await self._handle_block_gossip(peer_id, message) return + if message_type == "sync_status": + await self._handle_sync_status(peer_id, message) + return + if message_type == "sync_request": + await self._handle_sync_request(peer_id, message) + return + if message_type == "sync_blocks": + await self._handle_sync_blocks(peer_id, message) + return async def _handle_peer_addresses(self, peer_id: str, message: dict[str, object]) -> None: peers = message.get("peers") @@ -500,11 +550,113 @@ async def _handle_block_gossip( exclude_peer_ids={source_peer_id}, ) + async def _handle_sync_status(self, peer_id: str, message: dict[str, object]) -> None: + if message.get("protocol") != SYNC_PROTOCOL_ID: + raise NetworkError("sync_status protocol id mismatch") + peer_height = message.get("height") + if not isinstance(peer_height, int): + raise NetworkError("sync_status height must be an integer") + if peer_height < 0: + raise NetworkError("sync_status height must be non-negative") + + self._peer_advertised_heights[peer_id] = peer_height + if peer_height > self._local_chain_height(): + self._spawn(self._request_missing_blocks(peer_id)) + + async def _handle_sync_request(self, peer_id: str, message: dict[str, object]) -> None: + if message.get("protocol") != SYNC_PROTOCOL_ID: + raise NetworkError("sync_request protocol id mismatch") + if self._sync_block_getter is None: + return + + from_height = message.get("from_height") + to_height = message.get("to_height") + if not isinstance(from_height, int) or not isinstance(to_height, int): + raise NetworkError("sync_request heights must be integers") + if from_height < 0 or to_height < from_height: + raise NetworkError("sync_request range is invalid") + + max_to_height = min(to_height, from_height + self.config.sync_batch_size - 1) + blocks: list[Block] = [] + for height in range(from_height, max_to_height + 1): + block = self._sync_block_getter(height) + if block is None: + break + blocks.append(block) + + connection = self._connections.get(peer_id) + if connection is None: + return + response = self._sync_blocks_payload(start_height=from_height, blocks=blocks) + await self._write_message(connection.writer, response) + + async def _handle_sync_blocks(self, peer_id: str, message: dict[str, object]) -> None: + if message.get("protocol") != SYNC_PROTOCOL_ID: + raise NetworkError("sync_blocks protocol id mismatch") + if self._sync_block_applier is None: + self._sync_inflight.discard(peer_id) + return + + start_height = message.get("start_height") + payloads = message.get("blocks") + if not isinstance(start_height, int): + raise NetworkError("sync_blocks start_height must be an integer") + if not isinstance(payloads, list): + raise NetworkError("sync_blocks blocks must be a list") + + for entry in payloads: + if not isinstance(entry, dict): + raise NetworkError("sync_blocks entry must be an object") + block = self._block_from_payload(entry) + if not block.has_valid_merkle_root(): + self._sync_inflight.discard(peer_id) + return + if not self._sync_block_applier(block): + self._sync_inflight.discard(peer_id) + return + self._remember_seen_block(block.hash().hex()) + + if not payloads: + self._sync_inflight.discard(peer_id) + return + + self._sync_inflight.discard(peer_id) + if self._peer_advertised_heights.get(peer_id, -1) > self._local_chain_height(): + self._spawn(self._request_missing_blocks(peer_id)) + + async def _request_missing_blocks(self, peer_id: str) -> None: + if peer_id in self._sync_inflight: + return + remote_height = self._peer_advertised_heights.get(peer_id) + if remote_height is None: + return + + local_height = self._local_chain_height() + if remote_height <= local_height: + return + + connection = self._connections.get(peer_id) + if connection is None: + return + + from_height = local_height + 1 + to_height = min(remote_height, from_height + self.config.sync_batch_size - 1) + request = self._sync_request_payload(from_height=from_height, to_height=to_height) + self._sync_inflight.add(peer_id) + try: + await self._write_message(connection.writer, request) + except Exception: + self._sync_inflight.discard(peer_id) + self._close_connection(peer_id) + def _close_connection(self, peer_id: str) -> None: connection = self._connections.pop(peer_id, None) if connection is None: return + self._peer_advertised_heights.pop(peer_id, None) + self._sync_inflight.discard(peer_id) + if connection.task is not None and not connection.task.done(): connection.task.cancel() connection.writer.close() @@ -617,6 +769,19 @@ def _spawn(self, coroutine: Coroutine[Any, Any, Any]) -> None: self._background_tasks.add(task) task.add_done_callback(self._background_tasks.discard) + async def _send_sync_status(self, writer: asyncio.StreamWriter) -> None: + payload = self._sync_status_payload(height=self._local_chain_height()) + await self._write_message(writer, payload) + + def _local_chain_height(self) -> int: + if self._sync_height_getter is None: + return 0 + try: + height = int(self._sync_height_getter()) + except Exception: + return 0 + return max(0, height) + def _remember_seen_transaction(self, transaction_id: str) -> bool: if transaction_id in self._seen_transactions: return False @@ -673,6 +838,29 @@ def _peer_list_payload(self) -> dict[str, object]: peers = [{"host": host, "port": port} for host, port in sorted(unique_peers)] return {"type": "peers", "peers": peers} + def _sync_status_payload(self, *, height: int) -> dict[str, object]: + return { + "type": "sync_status", + "protocol": SYNC_PROTOCOL_ID, + "height": height, + } + + def _sync_request_payload(self, *, from_height: int, to_height: int) -> dict[str, object]: + return { + "type": "sync_request", + "protocol": SYNC_PROTOCOL_ID, + "from_height": from_height, + "to_height": to_height, + } + + def _sync_blocks_payload(self, *, start_height: int, blocks: list[Block]) -> dict[str, object]: + return { + "type": "sync_blocks", + "protocol": SYNC_PROTOCOL_ID, + "start_height": start_height, + "blocks": [self._encode_block(block) for block in blocks], + } + def _transaction_payload(self, transaction: Transaction, *, tx_id: str) -> dict[str, object]: return { "type": "tx_gossip", @@ -692,10 +880,14 @@ def _block_payload(self, block: Block, *, block_hash: str) -> dict[str, object]: "type": "block_gossip", "protocol": BLOCK_GOSSIP_PROTOCOL_ID, "block_hash": block_hash, - "block": { - "header": asdict(block.header), - "transactions": [asdict(transaction) for transaction in block.transactions], - }, + "block": self._encode_block(block), + } + + @staticmethod + def _encode_block(block: Block) -> dict[str, object]: + return { + "header": asdict(block.header), + "transactions": [asdict(transaction) for transaction in block.transactions], } def _block_from_payload(self, payload: dict[str, object]) -> Block: From 984cdda7c093af6bd111397661234a6cbe0f287e Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:27:07 +0530 Subject: [PATCH 43/47] test: add two-node chain catch-up sync coverage --- tests/test_chain_sync.py | 111 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 111 insertions(+) create mode 100644 tests/test_chain_sync.py diff --git a/tests/test_chain_sync.py b/tests/test_chain_sync.py new file mode 100644 index 0000000..42c3eaa --- /dev/null +++ b/tests/test_chain_sync.py @@ -0,0 +1,111 @@ +"""Integration tests for `/minichain/sync/1.0.0` chain synchronization.""" + +from __future__ import annotations + +import asyncio + +import pytest + +pytest.importorskip("nacl") + +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.network import MiniChainNetwork, NetworkConfig + + +def test_chain_sync_catches_up_shorter_peer() -> None: + async def scenario() -> None: + source_manager = _build_manager() + target_manager = _build_manager() + _mine_blocks(source_manager, count=5) + + source_node = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-sync-source", + enable_mdns=False, + sync_batch_size=2, + ) + ) + source_node.set_sync_handlers( + get_height=lambda: source_manager.height, + get_block_by_height=source_manager.get_canonical_block_by_height, + apply_block=lambda _block: True, + ) + await source_node.start() + + target_node = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id="node-sync-target", + enable_mdns=False, + sync_batch_size=2, + bootstrap_peers=(source_node.listen_address(),), + ) + ) + target_node.set_sync_handlers( + get_height=lambda: target_manager.height, + get_block_by_height=target_manager.get_canonical_block_by_height, + apply_block=lambda block: _apply_block(target_manager, block), + ) + await target_node.start() + + try: + await source_node.wait_for_connected_peers(1, timeout=3.0) + await target_node.wait_for_connected_peers(1, timeout=3.0) + + await target_node.wait_for_height(source_manager.height, timeout=5.0) + assert target_manager.height == source_manager.height + assert target_manager.tip_hash == source_manager.tip_hash + finally: + await target_node.stop() + await source_node.stop() + + asyncio.run(scenario()) + + +def _build_manager() -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances={}, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=10, + target_block_time_seconds=30, + ), + ) + + +def _mine_blocks(manager: ChainManager, *, count: int) -> None: + for _ in range(count): + timestamp = manager.tip_block.header.timestamp + 30 + candidate = build_candidate_block( + chain_manager=manager, + mempool=Mempool(), + miner_address="aa" * 20, + max_transactions=0, + timestamp=timestamp, + ) + block, _digest = mine_candidate_block(block_template=candidate, max_nonce=0) + result = manager.add_block(block) + assert result == "extended" + + +def _apply_block(manager: ChainManager, block) -> bool: + try: + result = manager.add_block(block) + except ChainValidationError: + return False + return result in {"extended", "reorg", "duplicate"} From 1ee54624d759acbcb3aea83b5405a7cc8d8e535e Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:44:36 +0530 Subject: [PATCH 44/47] test: add multi-node convergence and fork reorg integration coverage --- tests/test_comprehensive_integration.py | 229 ++++++++++++++++++++++++ 1 file changed, 229 insertions(+) create mode 100644 tests/test_comprehensive_integration.py diff --git a/tests/test_comprehensive_integration.py b/tests/test_comprehensive_integration.py new file mode 100644 index 0000000..3993abe --- /dev/null +++ b/tests/test_comprehensive_integration.py @@ -0,0 +1,229 @@ +"""Comprehensive multi-node integration scenarios for v0.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass + +import pytest + +pytest.importorskip("nacl") + +from minichain.block import Block +from minichain.chain import ChainConfig, ChainManager, ChainValidationError +from minichain.consensus import MAX_TARGET +from minichain.genesis import GenesisConfig, create_genesis_state +from minichain.mempool import Mempool, MempoolValidationError +from minichain.mining import build_candidate_block, mine_candidate_block +from minichain.network import MiniChainNetwork, NetworkConfig, PeerAddress +from minichain.transaction import Transaction + + +@dataclass +class _IntegratedNode: + manager: ChainManager + mempool: Mempool + network: MiniChainNetwork + + +def test_three_node_network_mining_converges() -> None: + async def scenario() -> None: + node_b = _build_node(node_id="node-int-b", bootstrap_peers=()) + await node_b.network.start() + + node_a = _build_node( + node_id="node-int-a", + bootstrap_peers=(node_b.network.listen_address(),), + ) + node_c = _build_node( + node_id="node-int-c", + bootstrap_peers=(node_b.network.listen_address(),), + ) + await node_a.network.start() + await node_c.network.start() + + nodes = [node_a, node_b, node_c] + try: + await node_a.network.wait_for_connected_peers(1, timeout=3.0) + await node_b.network.wait_for_connected_peers(2, timeout=3.0) + await node_c.network.wait_for_connected_peers(1, timeout=3.0) + + miners = ["11" * 20, "22" * 20, "33" * 20] + + for expected_height, (mining_node, miner_address) in enumerate( + zip(nodes, miners), + start=1, + ): + await _mine_and_broadcast(mining_node, miner_address) + await _wait_until( + lambda: all(node.manager.height >= expected_height for node in nodes), + timeout=5.0, + ) + await _wait_until( + lambda: len({node.manager.tip_hash for node in nodes}) == 1 + and all(node.manager.height == expected_height for node in nodes), + timeout=5.0, + ) + + assert all(node.manager.height == 3 for node in nodes) + assert len({node.manager.tip_hash for node in nodes}) == 1 + finally: + await node_c.network.stop() + await node_a.network.stop() + await node_b.network.stop() + + asyncio.run(scenario()) + + +def test_competing_blocks_trigger_fork_then_reorg_convergence() -> None: + async def scenario() -> None: + node_a = _build_node(node_id="node-fork-a", bootstrap_peers=()) + await node_a.network.start() + node_b = _build_node( + node_id="node-fork-b", + bootstrap_peers=(node_a.network.listen_address(),), + ) + await node_b.network.start() + + try: + await node_a.network.wait_for_connected_peers(1, timeout=3.0) + await node_b.network.wait_for_connected_peers(1, timeout=3.0) + + timestamp = node_a.manager.tip_block.header.timestamp + 30 + block_a = _build_mined_block(node_a, miner_address="44" * 20, timestamp=timestamp) + block_b = _build_mined_block(node_b, miner_address="55" * 20, timestamp=timestamp) + + await asyncio.gather( + _apply_and_broadcast_block(node_a, block_a), + _apply_and_broadcast_block(node_b, block_b), + ) + + await _wait_until( + lambda: node_a.manager.height == 1 and node_b.manager.height == 1, + timeout=3.0, + ) + assert node_a.manager.tip_hash != node_b.manager.tip_hash + + resolved = await _mine_and_broadcast(node_a, "44" * 20) + await _wait_until( + lambda: node_a.manager.height == 2 and node_b.manager.height == 2, + timeout=5.0, + ) + assert node_a.manager.tip_hash == resolved.hash().hex() + assert node_b.manager.tip_hash == resolved.hash().hex() + finally: + await node_b.network.stop() + await node_a.network.stop() + + asyncio.run(scenario()) + + +def _build_node( + *, + node_id: str, + bootstrap_peers: tuple[PeerAddress, ...], + initial_balances: dict[str, int] | None = None, +) -> _IntegratedNode: + manager = _build_manager(initial_balances=initial_balances or {}) + mempool = Mempool(max_size=200, max_age_seconds=3_600) + network = MiniChainNetwork( + NetworkConfig( + host="127.0.0.1", + port=0, + node_id=node_id, + enable_mdns=False, + bootstrap_peers=bootstrap_peers, + sync_batch_size=4, + ) + ) + node = _IntegratedNode(manager=manager, mempool=mempool, network=network) + + network.set_transaction_handler(lambda transaction: _accept_transaction(node, transaction)) + network.set_block_handler(lambda block: _apply_block(node, block)) + network.set_sync_handlers( + get_height=lambda: node.manager.height, + get_block_by_height=node.manager.get_canonical_block_by_height, + apply_block=lambda block: _apply_block(node, block), + ) + return node + + +def _build_manager(*, initial_balances: dict[str, int]) -> ChainManager: + genesis_block, genesis_state = create_genesis_state( + GenesisConfig( + initial_balances=initial_balances, + timestamp=1_739_000_000, + difficulty_target=MAX_TARGET, + ) + ) + return ChainManager( + genesis_block=genesis_block, + genesis_state=genesis_state, + config=ChainConfig( + block_reward=50, + difficulty_adjustment_interval=1_000_000, + target_block_time_seconds=30, + ), + ) + + +def _build_mined_block( + node: _IntegratedNode, + *, + miner_address: str, + timestamp: int | None = None, +) -> Block: + candidate = build_candidate_block( + chain_manager=node.manager, + mempool=node.mempool, + miner_address=miner_address, + max_transactions=500, + timestamp=timestamp, + ) + block, _digest = mine_candidate_block(block_template=candidate, max_nonce=0) + return block + + +async def _mine_and_broadcast(node: _IntegratedNode, miner_address: str) -> Block: + next_timestamp = node.manager.tip_block.header.timestamp + 30 + block = _build_mined_block( + node, + miner_address=miner_address, + timestamp=next_timestamp, + ) + await _apply_and_broadcast_block(node, block) + return block + + +async def _apply_and_broadcast_block(node: _IntegratedNode, block: Block) -> None: + assert _apply_block(node, block) + sent = await node.network.submit_block(block) + assert sent + + +def _apply_block(node: _IntegratedNode, block: Block) -> bool: + try: + result = node.manager.add_block(block) + except ChainValidationError: + return False + + if result in {"extended", "reorg"}: + node.mempool.remove_confirmed_transactions(block.transactions, node.manager.state) + return result in {"extended", "reorg", "stored_fork", "duplicate"} + + +def _accept_transaction(node: _IntegratedNode, transaction: Transaction) -> bool: + try: + node.mempool.add_transaction(transaction, node.manager.state) + except MempoolValidationError: + return False + return True + + +async def _wait_until(predicate, *, timeout: float) -> None: + deadline = asyncio.get_running_loop().time() + timeout + while asyncio.get_running_loop().time() < deadline: + if predicate(): + return + await asyncio.sleep(0.05) + raise TimeoutError("condition was not met before timeout") From 3ba5320fd0c458048f53d733b0fdbac55db483ea Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 18:45:39 +0530 Subject: [PATCH 45/47] test: add double-spend rejection propagation scenario --- tests/test_comprehensive_integration.py | 96 +++++++++++++++++++++++++ 1 file changed, 96 insertions(+) diff --git a/tests/test_comprehensive_integration.py b/tests/test_comprehensive_integration.py index 3993abe..f219006 100644 --- a/tests/test_comprehensive_integration.py +++ b/tests/test_comprehensive_integration.py @@ -12,6 +12,7 @@ from minichain.block import Block from minichain.chain import ChainConfig, ChainManager, ChainValidationError from minichain.consensus import MAX_TARGET +from minichain.crypto import derive_address, generate_key_pair from minichain.genesis import GenesisConfig, create_genesis_state from minichain.mempool import Mempool, MempoolValidationError from minichain.mining import build_candidate_block, mine_candidate_block @@ -118,6 +119,79 @@ async def scenario() -> None: asyncio.run(scenario()) +def test_double_spend_nonce_reuse_is_rejected_and_not_forwarded() -> None: + async def scenario() -> None: + signing_key, verify_key = generate_key_pair() + sender = derive_address(verify_key) + balances = {sender: 200} + + node_b = _build_node( + node_id="node-ds-b", + bootstrap_peers=(), + initial_balances=balances, + ) + await node_b.network.start() + node_a = _build_node( + node_id="node-ds-a", + bootstrap_peers=(node_b.network.listen_address(),), + initial_balances=balances, + ) + node_c = _build_node( + node_id="node-ds-c", + bootstrap_peers=(node_b.network.listen_address(),), + initial_balances=balances, + ) + await node_a.network.start() + await node_c.network.start() + + try: + await node_a.network.wait_for_connected_peers(1, timeout=3.0) + await node_b.network.wait_for_connected_peers(2, timeout=3.0) + await node_c.network.wait_for_connected_peers(1, timeout=3.0) + + tx_one = _signed_transaction( + signing_key=signing_key, + sender=sender, + recipient="77" * 20, + amount=25, + nonce=0, + fee=1, + timestamp=1_739_000_010, + ) + tx_two = _signed_transaction( + signing_key=signing_key, + sender=sender, + recipient="88" * 20, + amount=30, + nonce=0, + fee=1, + timestamp=1_739_000_011, + ) + + tx_one_id = tx_one.transaction_id().hex() + tx_two_id = tx_two.transaction_id().hex() + + assert await node_a.network.submit_transaction(tx_one) + await _wait_until( + lambda: node_a.mempool.contains(tx_one_id) + and node_b.mempool.contains(tx_one_id) + and node_c.mempool.contains(tx_one_id), + timeout=3.0, + ) + + assert not await node_a.network.submit_transaction(tx_two) + await asyncio.sleep(0.2) + assert not node_a.mempool.contains(tx_two_id) + assert not node_b.mempool.contains(tx_two_id) + assert not node_c.mempool.contains(tx_two_id) + finally: + await node_c.network.stop() + await node_a.network.stop() + await node_b.network.stop() + + asyncio.run(scenario()) + + def _build_node( *, node_id: str, @@ -220,6 +294,28 @@ def _accept_transaction(node: _IntegratedNode, transaction: Transaction) -> bool return True +def _signed_transaction( + *, + signing_key: object, + sender: str, + recipient: str, + amount: int, + nonce: int, + fee: int, + timestamp: int, +) -> Transaction: + transaction = Transaction( + sender=sender, + recipient=recipient, + amount=amount, + nonce=nonce, + fee=fee, + timestamp=timestamp, + ) + transaction.sign(signing_key) + return transaction + + async def _wait_until(predicate, *, timeout: float) -> None: deadline = asyncio.get_running_loop().time() + timeout while asyncio.get_running_loop().time() < deadline: From 7c72a444433b1573660ddd30d19de4e57446b0f3 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 19:07:39 +0530 Subject: [PATCH 46/47] chore: update cli verification flow in readme --- README.md | 158 ++++++++++++++++++++++++++++++++++++++++-------------- 1 file changed, 117 insertions(+), 41 deletions(-) diff --git a/README.md b/README.md index 7706e0a..ecee851 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,25 @@ # MiniChain -MiniChain is a minimal, research-oriented blockchain implementation in Python. This repository currently contains the project scaffolding and development environment for the v0 core chain roadmap. - -## Current Status - -Issue #1 (project scaffolding) is implemented with: - -- Python package layout under `src/minichain` -- Placeholder component modules for: - - `crypto`, `transaction`, `block`, `state`, `mempool`, `consensus`, `network`, `storage`, `node` -- `pyproject.toml` project configuration -- `Makefile` for common developer tasks -- Basic CI workflow (`.github/workflows/ci.yml`) -- Baseline tests under `tests/` +MiniChain is a minimal, research-oriented blockchain implementation in Python. +It includes a full v0 core-chain pipeline with account-based state, PoW mining, +P2P propagation/sync primitives, SQLite persistence, and a CLI. + +## Implemented v0 Scope + +- Ed25519 identities and signatures (`src/minichain/crypto.py`) +- Deterministic serialization (`src/minichain/serialization.py`) +- Transactions, Merkle trees, blocks (`src/minichain/transaction.py`, `src/minichain/merkle.py`, `src/minichain/block.py`) +- Account state transition and chain manager with fork/reorg handling (`src/minichain/state.py`, `src/minichain/chain.py`) +- PoW mining and block construction (`src/minichain/consensus.py`, `src/minichain/mining.py`) +- Mempool with per-sender nonce queueing (`src/minichain/mempool.py`) +- SQLite persistence and restart recovery (`src/minichain/storage.py`) +- Node orchestration and CLI (`src/minichain/node.py`, `src/minichain/__main__.py`) +- Networking module with: + - peer discovery/bootstrap + - transaction gossip + - block propagation + - range-based chain synchronization + (`src/minichain/network.py`) ## Requirements @@ -27,45 +34,114 @@ python -m pip install --upgrade pip make dev-install ``` -If you also want networking dependencies: +Optional: ```bash python -m pip install -e .[network] ``` -## Common Commands +## Development Commands + +```bash +make test +make lint +make format +make start-node +``` + +## CLI End-to-End Verification + +Run this full flow in one shell session. ```bash -make test # run unit tests -make lint # run ruff checks -make format # format with ruff -make start-node # run scaffold node entrypoint +source .venv/bin/activate +export PYTHONPATH=src +rm -rf .demo ``` -## Run the Node Entrypoint +Generate 3 keypairs (miner, sender, recipient): ```bash -PYTHONPATH=src python -m minichain --host 127.0.0.1 --port 7000 +MINER_OUT=$(python -m minichain generate-key) +SENDER_OUT=$(python -m minichain generate-key) +RECIP_OUT=$(python -m minichain generate-key) + +MINER_ADDR=$(echo "$MINER_OUT" | awk -F= '/^address=/{print $2}') +SENDER_ADDR=$(echo "$SENDER_OUT" | awk -F= '/^address=/{print $2}') +SENDER_PK=$(echo "$SENDER_OUT" | awk -F= '/^private_key=/{print $2}') +RECIP_ADDR=$(echo "$RECIP_OUT" | awk -F= '/^address=/{print $2}') ``` -## Repository Layout - -```text -.github/workflows/ci.yml -src/minichain/ - __init__.py - __main__.py - crypto.py - transaction.py - block.py - state.py - mempool.py - consensus.py - network.py - storage.py - node.py -tests/ - test_scaffold.py -issues.md -architectureProposal.md +Sanity-check parsed values (must not be empty): + +```bash +echo "MINER_ADDR=$MINER_ADDR" +echo "SENDER_ADDR=$SENDER_ADDR" +echo "SENDER_PK_LEN=${#SENDER_PK}" +echo "RECIP_ADDR=$RECIP_ADDR" +``` + +Check startup/genesis status: + +```bash +python -m minichain --data-dir .demo start +python -m minichain --data-dir .demo chain-info +``` + +Mine rewards to sender and check balance: + +```bash +python -m minichain --data-dir .demo --miner-address "$SENDER_ADDR" mine --count 2 +python -m minichain --data-dir .demo balance --address "$SENDER_ADDR" ``` + +Submit transfer (auto-mines one block by default): + +```bash +python -m minichain --data-dir .demo submit-tx \ + --private-key "$SENDER_PK" \ + --recipient "$RECIP_ADDR" \ + --amount 7 \ + --fee 1 +``` + +Verify balances and blocks: + +```bash +python -m minichain --data-dir .demo balance --address "$SENDER_ADDR" +python -m minichain --data-dir .demo balance --address "$RECIP_ADDR" +python -m minichain --data-dir .demo chain-info +python -m minichain --data-dir .demo block --height 1 +python -m minichain --data-dir .demo block --height 2 +python -m minichain --data-dir .demo block --height 3 +``` + +Restart check (persistence): + +```bash +python -m minichain --data-dir .demo start +python -m minichain --data-dir .demo chain-info +``` + +Negative-path checks: + +```bash +python -m minichain --data-dir .demo balance --address bad || true +python -m minichain --data-dir .demo submit-tx \ + --private-key "$SENDER_PK" \ + --recipient "$RECIP_ADDR" \ + --amount -1 \ + --fee 1 || true +python -m minichain --data-dir .demo submit-tx \ + --private-key "$SENDER_PK" \ + --recipient "$RECIP_ADDR" \ + --amount 1 \ + --fee 1 \ + --nonce 0 || true +``` + +## Architecture and Roadmap Docs + +- `implementation.md`: implementation details for all components +- `docs/issues.md`: issue-by-issue roadmap +- `docs/architectureProposal.md`: full architecture proposal From e3f1c5ea718abe322b06beb78cdd53c2a226ccd0 Mon Sep 17 00:00:00 2001 From: Arunabha Date: Tue, 24 Feb 2026 19:07:47 +0530 Subject: [PATCH 47/47] chore: add detailed component implementation reference --- implementation.md | 195 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 195 insertions(+) create mode 100644 implementation.md diff --git a/implementation.md b/implementation.md new file mode 100644 index 0000000..a1811b5 --- /dev/null +++ b/implementation.md @@ -0,0 +1,195 @@ +# MiniChain Implementation Reference + +This document describes the current implementation of MiniChain v0 component by +component. + +## 1. Crypto Layer (`src/minichain/crypto.py`) + +- Uses PyNaCl (libsodium bindings) for: + - Ed25519 key generation/sign/verify + - BLAKE2b hashing +- Provides: + - `generate_key_pair()` + - `derive_address(verify_key)` (first 20 bytes of BLAKE2b digest) + - key serialization/deserialization helpers + - detached signature helpers (`sign_message`, `verify_signature`) +- Fails fast with actionable error if PyNaCl is missing. + +## 2. Canonical Serialization (`src/minichain/serialization.py`) + +- Deterministic UTF-8 JSON serialization with strict field sets/order. +- Rejects missing/extra fields to avoid consensus divergence. +- Consensus-critical helpers: + - `serialize_transaction(...)` + - `serialize_block_header(...)` + +## 3. Transaction Model (`src/minichain/transaction.py`) + +- `Transaction` fields: + - sender, recipient, amount, nonce, fee, timestamp + - signature, public_key +- Coinbase support: + - canonical coinbase sender constant + - `create_coinbase_transaction(...)` + - coinbase shape validation (no signature/public key, nonce=0, fee=0) +- Signing and verification: + - verifies signature bytes + - verifies sender matches derived address from included public key +- Deterministic `transaction_id()` includes signing payload + auth fields. + +## 4. Merkle Layer (`src/minichain/merkle.py`) + +- Computes deterministic Merkle root for transaction-id bytes. +- Handles empty and odd-length input sets deterministically. + +## 5. Block Layer (`src/minichain/block.py`) + +- `BlockHeader` contains version, previous hash, Merkle root, timestamp, + difficulty target, nonce, block height. +- Block hash is BLAKE2b over canonical header serialization. +- `Block` supports: + - transaction hash extraction + - computed/header Merkle root consistency checks + - coinbase placement and amount validation (`reward + fees`) + +## 6. Consensus / PoW (`src/minichain/consensus.py`) + +- PoW rule: `int(hash(header)) <= difficulty_target` +- Difficulty target bounds checked against 256-bit range. +- Retargeting: + - interval-based proportional adjustment + - bounded by 0.5x to 2x per interval +- Mining: + - nonce search over configured range + - optional interruption via `threading.Event` + +## 7. State Machine (`src/minichain/state.py`) + +- Account model: `{address -> (balance, nonce)}` +- Transaction application: + - validates sender/recipient + - enforces exact nonce progression + - enforces sufficient balance for `amount + fee` +- Block application: + - validates coinbase and non-coinbase tx list + - applies transfers atomically + - supports rollback behavior on failure + +## 8. Mempool (`src/minichain/mempool.py`) + +- Validates tx before acceptance (signature/identity/nonce/balance checks). +- Per-sender nonce queueing: + - tracks ready vs waiting nonces + - recomputes readiness against latest state +- Selection for mining: + - fee-prioritized across senders + - preserves per-sender nonce ordering +- Eviction: + - by staleness (age) + - by lowest fee when over capacity + +## 9. Chain Manager / Fork Choice (`src/minichain/chain.py`) + +- Tracks all known blocks and canonical path. +- Validates parent linkage, expected height, target, Merkle, and PoW. +- Replays candidate path from genesis state to evaluate fork validity. +- Fork choice: + - extend tip when direct valid successor + - reorg to longer valid chain + - store shorter forks without reorg + +## 10. Genesis (`src/minichain/genesis.py`) + +- Configurable genesis timestamp/target/version and initial balances. +- Builds genesis block with empty tx commitment. +- Applies initial account allocations into empty state. + +## 11. Mining Orchestration (`src/minichain/mining.py`) + +- `build_candidate_block(...)`: + - selects mempool txs + - computes fees + - inserts coinbase tx + - computes next target and Merkle root +- `mine_candidate_block(...)`: + - finds valid nonce + - returns mined block with immutable tx body copy + +## 12. Persistence (`src/minichain/storage.py`) + +- SQLite-backed storage for: + - blocks (by hash and by height) + - state snapshot (accounts) + - chain metadata (height, tip hash) +- Supports atomic persistence of block + state + metadata. +- On startup/recovery, node replays persisted chain and verifies snapshot + consistency. + +## 13. Network Layer (`src/minichain/network.py`) + +- Peer discovery and connectivity: + - TCP server/client handshake (`hello`) + - bootstrap peer connections + - peer exchange (`peers`) + - multicast mDNS-style announcements with local fallback +- Transaction gossip protocol: + - `/minichain/tx/1.0.0` + - dedup cache for seen transaction ids + - forwards valid unseen tx to peers (excluding source peer) +- Block propagation protocol: + - `/minichain/block/1.0.0` + - block payload serialization + Merkle/hash validation + - seen-block dedup and forwarding logic +- Sync protocol: + - `/minichain/sync/1.0.0` + - `sync_status` (height announce) + - `sync_request` (range by height) + - `sync_blocks` (batched block responses) + - catch-up loop with configurable batch size + +## 14. Node Orchestration (`src/minichain/node.py`) + +- `MiniChainNode` composes: + - chain manager + - mempool + - storage +- Startup: + - validate config + - open/create sqlite db + - load/create genesis + - replay persisted blocks and verify canonical state +- Runtime APIs: + - `submit_transaction(...)` + - `accept_block(...)` + - `mine_one_block(...)` +- Shutdown closes persistence cleanly. + +## 15. CLI (`src/minichain/__main__.py`) + +- Commands: + - `start` + - `generate-key` + - `balance --address` + - `submit-tx --private-key --recipient --amount --fee [--nonce]` + - `mine --count [--max-transactions]` + - `block --height | --hash` + - `chain-info` +- Each invocation runs as a command-style operation: + - starts node context + - performs action + - stops cleanly +- Persistence is controlled by `--data-dir`, so repeated commands share chain + state. + +## 16. Test Coverage (`tests/`) + +- Unit coverage spans all core modules. +- Integration coverage includes: + - peer discovery + - tx gossip + - block propagation + - chain sync + - comprehensive multi-node convergence, fork/reorg, and double-spend rejection + +Collectively, this forms a complete v0 educational blockchain implementation +with deterministic behavior and reproducible local testing via CLI.